pax_global_header00006660000000000000000000000064141700115110014501gustar00rootroot0000000000000052 comment=a59c356a45180dd56b1ec86c8725a257bffe71fa coq-8.15.0/000077500000000000000000000000001417001151100123565ustar00rootroot00000000000000coq-8.15.0/.github/000077500000000000000000000000001417001151100137165ustar00rootroot00000000000000coq-8.15.0/.github/CODEOWNERS000066400000000000000000000223131417001151100153120ustar00rootroot00000000000000# This file associates maintainer teams to each component. # See CONTRIBUTING.md ########## Contributing process ########## /.github/ @coq/contributing-process-maintainers /CONTRIBUTING.md @coq/contributing-process-maintainers ########## Build system ########## /Makefile* @coq/legacy-build-maintainers /dev/tools/make_git_revision.sh @coq/legacy-build-maintainers /configure @coq/legacy-build-maintainers @coq/build-maintainers /tools/configure/* @coq/legacy-build-maintainers @coq/build-maintainers /boot/ @coq/build-maintainers ########## CI infrastructure ########## /dev/ci/ @coq/ci-maintainers /dev/lint-*.sh @coq/ci-maintainers /.travis.yml @coq/ci-maintainers /.gitlab-ci.yml @coq/ci-maintainers /.github/workflows @coq/ci-maintainers /dev/ci/platform/ @coq/windows-build-maintainers /Makefile.ci @coq/ci-maintainers /dev/ci/nix @coq/nix-maintainers *.nix @coq/nix-maintainers /dev/ci/user-overlays/*.sh @ghost # Trick to avoid getting review requests # each time someone adds an overlay /dev/bench/ @coq/bench-maintainers ########## Documentation ########## /README.md @coq/doc-maintainers /INSTALL.md @coq/doc-maintainers /CODE_OF_CONDUCT.md @coq/code-of-conduct-team /doc/ @coq/doc-maintainers /Makefile.doc @coq/doc-maintainers /dev/doc/ @coq/doc-maintainers /doc/changelog/*/*.rst @ghost /dev/doc/changes.md @ghost # Trick to avoid getting review requests # each time someone modifies the changelog /dev/doc/build-system*.txt @coq/legacy-build-maintainers /dev/doc/build-system.dune.md @coq/build-maintainers /dev/doc/critical-bugs @coq/kernel-maintainers /dev/doc/econstr.md @coq/engine-maintainers /dev/doc/proof-engine.md @coq/engine-maintainers /dev/doc/release-process.md @coq/contributing-process-maintainers /dev/doc/shield-icon.png @coq/contributing-process-maintainers /dev/doc/SProp.md @coq/universes-maintainers /dev/doc/style.txt @coq/contributing-process-maintainers /dev/doc/unification.txt @coq/pretyper-maintainers /dev/doc/universes.md @coq/universes-maintainers /dev/doc/xml-protocol @coq/stm-maintainers /man/ @coq/doc-maintainers /doc/plugin_tutorial/ @coq/plugin-tutorial-maintainers ########## Coqchk ########## /checker/ @coq/kernel-maintainers /test-suite/coqchk/ @coq/kernel-maintainers ########## Coq lib ########## /clib/ @coq/lib-maintainers /test-suite/unit-tests/clib/ @coq/lib-maintainers /lib/ @coq/lib-maintainers ########## Proof engine ########## /engine/ @coq/engine-maintainers /engine/univ* @coq/universes-maintainers /engine/uState.* @coq/universes-maintainers ########## CoqIDE ########## /ide/ @coq/coqide-maintainers /ide/protocol/ @coq/stm-maintainers /test-suite/ide/ @coq/stm-maintainers ########## Desugaring ########## /interp/ @coq/extensible-syntax-maintainers ########## Kernel ########## /kernel/ @coq/kernel-maintainers /kernel/byterun/ @coq/vm-native-maintainers /kernel/native* @coq/vm-native-maintainers /kernel/vm* @coq/vm-native-maintainers /kernel/vconv.* @coq/vm-native-maintainers /kernel/genOpcodefiles.* @coq/vm-native-maintainers /kernel/sorts.* @coq/universes-maintainers /kernel/uGraph.* @coq/universes-maintainers /kernel/univ.* @coq/universes-maintainers ########## Library ########## /library/ @coq/library-maintainers ########## Parser ########## /coqpp/ @coq/parsing-maintainers /gramlib/ @coq/parsing-maintainers /parsing/ @coq/parsing-maintainers ########## Standard library and plugins ########## /theories/ @coq/stdlib-maintainers /theories/Classes/ @coq/typeclasses-maintainers /theories/Reals/ @coq/reals-library-maintainers /theories/Compat/ @coq/compat-maintainers /plugins/btauto/ @coq/btauto-maintainers /theories/btauto/ @coq/btauto-maintainers /plugins/cc/ @coq/cc-maintainers /theories/cc/ @coq/cc-maintainers /plugins/derive/ @coq/derive-maintainers /theories/derive/ @coq/derive-maintainers /plugins/extraction/ @coq/extraction-maintainers /theories/extraction/ @coq/extraction-maintainers /plugins/firstorder/ @coq/firstorder-maintainers /theories/firstorder/ @coq/firstorder-maintainers /plugins/funind/ @coq/funind-maintainers /theories/funind/ @coq/funind-maintainers /plugins/ltac/ @coq/ltac-maintainers /theories/ltac/ @coq/ltac-maintainers /plugins/micromega/ @coq/micromega-maintainers /theories/micromega/ @coq/micromega-maintainers /test-suite/micromega/ @coq/micromega-maintainers /plugins/nsatz/ @coq/nsatz-maintainers /theories/nsatz/ @coq/nsatz-maintainers /plugins/ring/ @coq/ring-maintainers /theories/setoid_ring/ @coq/ring-maintainers /plugins/ssrmatching/ @coq/ssreflect-maintainers /theories/ssrmatching/ @coq/ssreflect-maintainers /plugins/ssr/ @coq/ssreflect-maintainers /theories/ssr/ @coq/ssreflect-maintainers /test-suite/ssr/ @coq/ssreflect-maintainers /plugins/syntax/ @coq/parsing-maintainers /plugins/rtauto/ @coq/rtauto-maintainers /theories/rtauto/ @coq/rtauto-maintainers /user-contrib/Ltac2 @coq/ltac2-maintainers ########## Pretyper ########## /pretyping/ @coq/pretyper-maintainers /pretyping/vnorm.* @coq/vm-native-maintainers /pretyping/nativenorm.* @coq/vm-native-maintainers ########## Pretty printer ########## /printing/ @coq/extensible-syntax-maintainers ########## Proof infrastructure ########## /proofs/ @coq/engine-maintainers ########## STM ########## /stm/ @coq/stm-maintainers /test-suite/interactive/ @coq/stm-maintainers /test-suite/stm/ @coq/stm-maintainers /test-suite/vio/ @coq/stm-maintainers ########## Tactics ########## /tactics/ @coq/tactics-maintainers /tactics/class_tactics.* @coq/typeclasses-maintainers ########## Number ########## /interp/numTok.* @coq/number-maintainers /kernel/float64* @coq/number-maintainers /kernel/uint63* @coq/number-maintainers /plugins/syntax/g_number_string.mlg @coq/number-maintainers /plugins/syntax/int63_syntax_plugin.mllib @coq/number-maintainers /plugins/syntax/number.ml @coq/number-maintainers /plugins/syntax/number_string_notation_plugin.mllib @coq/number-maintainers /user-contrib/Ltac2/Int.v @coq/number-maintainers /test-suite/output/FloatExtraction* @coq/number-maintainers /test-suite/output/*Number* @coq/number-maintainers /test-suite/primitive/float/ @coq/number-maintainers /test-suite/primitive/sint63/ @coq/number-maintainers /test-suite/primitive/uint63/ @coq/number-maintainers /theories/Init/Decimal.v @coq/number-maintainers /theories/Init/Hexadecimal.v @coq/number-maintainers /theories/Init/Nat.v @coq/number-maintainers /theories/Init/Number.v @coq/number-maintainers /theories/*Arith/ @coq/number-maintainers /theories/Numbers/ @coq/number-maintainers /theories/Floats/ @coq/number-maintainers /theories/extraction/Extr*Nat* @coq/number-maintainers /theories/extraction/Extr*Z* @coq/number-maintainers /theories/extraction/ExtrOCamlFloats.v @coq/number-maintainers /theories/extraction/ExtrOCamlInt* @coq/number-maintainers ########## Tools ########## /tools/coqdoc/ @coq/coqdoc-maintainers /test-suite/coqdoc/ @coq/coqdoc-maintainers /tools/coqwc* @coq/coqdoc-maintainers /test-suite/coqwc/ @coq/coqdoc-maintainers /tools/coq_makefile* @coq/coq-makefile-maintainers /tools/CoqMakefile* @coq/coq-makefile-maintainers /test-suite/coq-makefile/ @coq/coq-makefile-maintainers /tools/TimeFileMaker.py @coq/coq-makefile-maintainers /tools/make-*-tim*.py @coq/coq-makefile-maintainers /tools/coqdep* @coq/legacy-build-maintainers @coq/build-maintainers /tools/coq_tex* @silene # Secondary maintainer @gares ########## Toplevel ########## /toplevel/ @coq/toplevel-maintainers /topbin/ @coq/toplevel-maintainers /sysinit/ @coq/toplevel-maintainers ########## Vernacular ########## /vernac/ @coq/vernac-maintainers /vernac/metasyntax.* @coq/parsing-maintainers /vernac/classes.* @coq/typeclasses-maintainers ########## Test suite ########## /test-suite/Makefile @coq/test-suite-maintainers /test-suite/README.md @coq/test-suite-maintainers /test-suite/report.sh @coq/test-suite-maintainers /test-suite/unit-tests/src/ @coq/test-suite-maintainers /test-suite/complexity/ @herbelin /test-suite/success/Compat*.v @coq/compat-maintainers ########## Developer tools ########## /dev/tools/ @coq/dev-tools-maintainers /dev/tools/update-compat.py @coq/compat-maintainers /test-suite/tools/update-compat/ @coq/compat-maintainers ########## Dune ########## /.ocamlinit @coq/build-maintainers *dune* @coq/build-maintainers *.opam @coq/build-maintainers coq-8.15.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000010061417001151100164200ustar00rootroot00000000000000 #### Description of the problem #### Coq Version coq-8.15.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000026401417001151100175210ustar00rootroot00000000000000 Fixes / closes #???? - [ ] Added / updated **test-suite**. - [ ] Added **changelog**. - [ ] Added / updated **documentation**. - [ ] Documented any new / changed **user messages**. - [ ] Updated **documented syntax** by running `make -f Makefile.make doc_gram_rsts`. - [ ] Opened **overlay** pull requests. We have a number of channels to reach the user community and the development team: - Our [Zulip chat][zulip-link], for casual and high traffic discussions. - Our [Discourse forum][discourse-link], for more structured and easily browsable discussions and Q&A. - Our historical mailing list, the [Coq-Club](https://sympa.inria.fr/sympa/info/coq-club). See also [coq.inria.fr/community](https://coq.inria.fr/community.html), which lists several other active platforms. coq-8.15.0/.github/workflows/000077500000000000000000000000001417001151100157535ustar00rootroot00000000000000coq-8.15.0/.github/workflows/check-conflicts.yml000066400000000000000000000006151417001151100215370ustar00rootroot00000000000000name: "Check conflicts" on: [push] # Only on push because @coqbot already takes care of checking for # conflicts when PRs are opened or synchronized jobs: main: runs-on: ubuntu-latest steps: - uses: eps1lon/actions-label-merge-conflict@b8bf8341285ec9a4567d4318ba474fee998a6919 with: dirtyLabel: "needs: rebase" repoToken: "${{ secrets.GITHUB_TOKEN }}" coq-8.15.0/.github/workflows/ci.yml000066400000000000000000000052411417001151100170730ustar00rootroot00000000000000name: GitHub CI on: [push, pull_request] jobs: Windows: name: Windows runs-on: windows-latest strategy: fail-fast: false matrix: architecture: # - '32' - '64' steps: - name: Set git to use LF run: | git config --global core.autocrlf false git config --global core.eol lf - name: Git checkout uses: actions/checkout@v2 - name: System Information run: | .\dev\ci\platform\coq-pf-01-sysinfo.bat - name: Download Platform env: PLATFORM: "https://github.com/coq/platform/archive/dev-ci.zip" run: | .\dev\ci\platform\coq-pf-02-download.bat - name: Build Platform env: ARCH: ${{matrix.architecture}} shell: cmd run: | .\dev\ci\platform\coq-pf-03-build.bat - name: Build Installer env: ARCH: ${{matrix.architecture}} shell: cmd run: | .\dev\ci\platform\coq-pf-04-installer.bat - name: Upload Installer uses: actions/upload-artifact@v2 with: name: windows-installer path: artifacts if-no-files-found: error macOS: runs-on: macOS-10.15 steps: - uses: actions/checkout@v2 - name: Install system dependencies run: | brew install gnu-time opam gtksourceview3 adwaita-icon-theme expat libxml2 pip3 install macpack - name: Install OCaml dependencies run: | export PKG_CONFIG_PATH=/usr/local/opt/libffi/lib/pkgconfig opam init -a -j "$NJOBS" --compiler=ocaml-base-compiler.$COMPILER opam switch set ocaml-base-compiler.$COMPILER eval $(opam env) opam update opam install -j "$NJOBS" ocamlfind${FINDLIB_VER} ounit lablgtk3-sourceview3 zarith.1.10 dune.2.8.5 opam list env: COMPILER: "4.12.0" FINDLIB_VER: ".1.8.1" OPAMYES: "true" MACOSX_DEPLOYMENT_TARGET: "10.11" NJOBS: "2" - name: Build Coq run: | eval $(opam env) ./configure -prefix "$(pwd)/_install_ci" -warn-error yes -native-compiler no -coqide opt make -j "$NJOBS" byte make -j "$NJOBS" env: MACOSX_DEPLOYMENT_TARGET: "10.11" NJOBS: "2" - name: Install Coq run: | eval $(opam env) make install install-byte - name: Run Coq Test Suite run: | eval $(opam env) export OCAMLPATH="$(pwd)/_install_ci/lib":"$OCAMLPATH" make -j "$NJOBS" test-suite PRINT_LOGS=1 env: NJOBS: "2" coq-8.15.0/.github/workflows/stale.yml000066400000000000000000000005721417001151100176120ustar00rootroot00000000000000name: Stale PRs on: schedule: # Every workday at 2am - cron: '0 2 * * 1-5' jobs: stale_prs: # Do not run on forks (we want this request to happen only once every night) if: github.repository_owner == 'coq' runs-on: ubuntu-latest steps: - run: curl -d "coq:coq:${{ secrets.DAILY_SCHEDULE_SECRET }}" https://coqbot.herokuapp.com/check-stale-pr coq-8.15.0/.gitlab-ci.yml000066400000000000000000000502571417001151100150230ustar00rootroot00000000000000image: "$IMAGE" stages: - docker - build - deploy # When a job has no dependencies, it goes to stage 1. Otherwise, we # set "needs" to contain all transitive dependencies (with "artifacts: # false" when we don't want the artifacts). We include the transitive # dependencies due to gitlab bugs sometimes starting the job even if a # transitive dep failed, see #10699 / 7b59d8c9d9b2104de7162ec0e40f6182a6830046. # some default values variables: # Format: $IMAGE-V$DATE-$hash # The $hash is the first 10 characters of the md5 of the Dockerfile. e.g. # echo $(md5sum dev/ci/docker/bionic_coq/Dockerfile | head -c 10) CACHEKEY: "bionic_coq-V2021-10-01-7c5b78d50f" IMAGE: "$CI_REGISTRY_IMAGE:$CACHEKEY" # By default, jobs run in the base switch; override to select another switch OPAM_SWITCH: "base" # Used to select special compiler switches such as flambda, 32bits, etc... OPAM_VARIANT: "" GIT_DEPTH: "10" include: - local: '/dev/bench/gitlab-bench.yml' docker-boot: stage: docker image: docker:stable services: - docker:dind before_script: [] script: - dev/tools/check-cachekey.sh - docker login -u gitlab-ci-token -p "$CI_JOB_TOKEN" "$CI_REGISTRY" - cd dev/ci/docker/bionic_coq/ - if docker pull "$IMAGE"; then echo "Image prebuilt!"; exit 0; fi - docker build -t "$IMAGE" . - docker push "$IMAGE" except: variables: - $SKIP_DOCKER == "true" - $ONLY_WINDOWS == "true" tags: - docker before_script: - cat /proc/{cpu,mem}info || true - ls -a # figure out if artifacts are around - printenv -0 | sort -z | tr '\0' '\n' - declare -A switch_table - switch_table=( ["base"]="$COMPILER" ["edge"]="$COMPILER_EDGE" ) - opam switch set -y "${switch_table[$OPAM_SWITCH]}$OPAM_VARIANT" - eval $(opam env) - opam list - opam config list - dev/tools/check-cachekey.sh ################ GITLAB CACHING ###################### # - use artifacts between jobs # ###################################################### # TODO figure out how to build doc for installed Coq .build-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true artifacts: name: "$CI_JOB_NAME" paths: - _install_ci - config/Makefile - config/coq_config.py - config/coq_config.ml - config/dune.c_flags - dmesg.txt expire_in: 1 month script: - set -e - echo 'start:coq.clean' - make clean # ensure that `make clean` works on a fresh clone - echo 'end:coq.clean' - echo 'start:coq.config' - ./configure -warn-error yes -prefix "$(pwd)/_install_ci" $COQ_EXTRA_CONF - echo 'end:coq.config' - echo 'start:coq.build' - make -j "$NJOBS" world $EXTRA_TARGET - echo 'end:coq:build' - echo 'start:coq.install' - make install $EXTRA_INSTALL - echo 'end:coq.install' - set +e # Template for building Coq + stdlib, typical use: overload the switch .dune-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true script: # flambda can be pretty stack hungry, specially with -O3 # See also https://github.com/ocaml/ocaml/issues/7842#issuecomment-596863244 # and https://github.com/coq/coq/pull/11916#issuecomment-609977375 - ulimit -s 16384 - set -e - make -f Makefile.dune world - set +e - tar cfj _build.tar.bz2 _build variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" artifacts: name: "$CI_JOB_NAME" when: always paths: - _build/log - _build.tar.bz2 expire_in: 1 month .dune-ci-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true needs: - build:edge+flambda:dune:dev script: - tar xfj _build.tar.bz2 - set -e - echo 'start:coq.test' - make -f Makefile.dune "$DUNE_TARGET" - echo 'end:coq.test' - set +e variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" artifacts: when: always name: "$CI_JOB_NAME" expire_in: 2 months # every non build job must set "needs" otherwise all build # artifacts are used together and we may get some random Coq. To that # purpose, we add a spurious dependency `not-a-real-job` that must be # overridden otherwise the CI will fail. .doc-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true needs: - not-a-real-job script: - SPHINXENV='COQBIN="'"$PWD"'/_install_ci/bin/"' - make -j "$NJOBS" SPHINXENV="$SPHINXENV" SPHINX_DEPS= refman - make install-doc-sphinx artifacts: name: "$CI_JOB_NAME" paths: - _install_ci/share/doc/coq/ expire_in: 2 months # set "needs" when using .test-suite-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true needs: - not-a-real-job script: - cd test-suite - make clean # careful with the ending / - BIN=$(readlink -f ../_install_ci/bin)/ - LIB=$(readlink -f ../_install_ci/lib/coq)/ - export OCAMLPATH=$(readlink -f ../_install_ci/lib/):"$OCAMLPATH" - COQEXTRAFLAGS="${COQEXTRAFLAGS}" make -j "$NJOBS" BIN="$BIN" COQLIB="$LIB" all artifacts: name: "$CI_JOB_NAME.logs" when: on_failure paths: - test-suite/logs # Gitlab doesn't support yet "expire_in: never" so we use the instance default # expire_in: never # set "needs" when using .validate-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true needs: - not-a-real-job script: # exit 0: workaround for https://gitlab.com/gitlab-org/gitlab/issues/202505 # the validate:quick job is sometimes started before build:quick, without artifacts # we ignore these spurious errors so if the job fails it's a real error - cd _install_ci || exit 0 - find lib/coq/ -name '*.vo' -fprint0 vofiles - xargs -0 --arg-file=vofiles bin/coqchk -o -m -coqlib lib/coq/ > ../coqchk.log 2>&1 || touch coqchk.failed - tail -n 1000 ../coqchk.log # the log is too big for gitlab so pipe to a file and display the tail - "[ ! -f coqchk.failed ]" # needs quoting for yml syntax reasons artifacts: name: "$CI_JOB_NAME.logs" paths: - coqchk.log expire_in: 2 months .ci-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true script: - set -e - echo 'start:coq.test' - make -f Makefile.ci -j "$NJOBS" "${CI_JOB_NAME#*:}" - echo 'end:coq.test' - set +e artifacts: name: "$CI_JOB_NAME" paths: - _build_ci when: always needs: - build:base .ci-template-flambda: extends: .ci-template needs: - build:edge+flambda variables: OPAM_SWITCH: "edge" OPAM_VARIANT: "+flambda" .deploy-template: stage: deploy except: variables: - $ONLY_WINDOWS == "true" before_script: - which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y ) - eval $(ssh-agent -s) - mkdir -p ~/.ssh - chmod 700 ~/.ssh - ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts - git config --global user.name "coqbot" - git config --global user.email "coqbot@users.noreply.github.com" build:base: extends: .build-template variables: COQ_EXTRA_CONF: "-native-compiler yes -coqide opt" # coqdoc for stdlib, until we know how to build it from installed Coq EXTRA_TARGET: "doc-stdlib" EXTRA_INSTALL: "install-doc-stdlib-html install-doc-printable" # no coqide for 32bit: libgtk installation problems build:base+32bit: extends: .build-template variables: OPAM_VARIANT: "+32bit" COQ_EXTRA_CONF: "-native-compiler yes" only: &full-ci variables: - $FULL_CI == "true" build:edge+flambda: extends: .build-template variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" COQ_EXTRA_CONF: "-native-compiler yes -coqide opt" build:edge+flambda:dune:dev: extends: .dune-template build:base+async: extends: .build-template variables: COQ_EXTRA_CONF: "-native-compiler yes -coqide opt" COQUSERFLAGS: "-async-proofs on" after_script: - dmesg > dmesg.txt timeout: 100m allow_failure: true # See https://github.com/coq/coq/issues/9658 only: variables: - $UNRELIABLE =~ /enabled/ artifacts: when: always build:quick: extends: .build-template variables: COQ_EXTRA_CONF: "-native-compiler no" QUICK: "1" after_script: - dmesg > dmesg.txt timeout: 100m allow_failure: true # See https://github.com/coq/coq/issues/9637 only: variables: - $UNRELIABLE =~ /enabled/ artifacts: when: always lint: stage: build except: variables: - $ONLY_WINDOWS == "true" script: dev/lint-repository.sh variables: GIT_DEPTH: "" # we need an unknown amount of history for per-commit linting OPAM_SWITCH: "edge" OPAM_VARIANT: "+flambda" pkg:opam: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true # OPAM will build out-of-tree so no point in importing artifacts script: - set -e - opam pin add --kind=path coq-core.dev . - opam pin add --kind=path coq-stdlib.dev . - opam pin add --kind=path coqide-server.dev . - opam pin add --kind=path coqide.dev . - set +e variables: OPAM_SWITCH: "edge" OPAM_VARIANT: "+flambda" only: *full-ci .nix-template: stage: build needs: [] except: variables: - $ONLY_WINDOWS == "true" interruptible: true image: nixorg/nix:latest # Minimal NixOS image which doesn't even contain git variables: GIT_STRATEGY: none # Required because we don't have git USER: root # Variable required by Cachix before_script: - cat /proc/{cpu,mem}info || true # Use current worktree as tmpdir to allow exporting artifacts in case of failure - export TMPDIR=$PWD # Install Cachix as documented at https://github.com/cachix/cachix - nix-env -iA cachix -f https://cachix.org/api/v1/install - cachix use coq artifacts: name: "$CI_JOB_NAME.logs" when: on_failure paths: - nix-build-coq.drv-0/*/test-suite/logs # Gitlab doesn't support yet "expire_in: never" so we use the instance default # expire_in: never # broken, see eg https://gitlab.com/coq/coq/-/jobs/1754045983 # pkg:nix:deploy: # extends: .nix-template # environment: # name: cachix # url: https://coq.cachix.org # script: # - nix-build https://coq.inria.fr/nix/toolbox --argstr job coq --arg override "{coq = coq:$CI_COMMIT_SHA;}" -K | cachix push coq # only: # refs: # - master # - /^v.*\..*$/ # variables: # - $CACHIX_AUTH_TOKEN # pkg:nix:deploy:channel: # extends: .deploy-template # environment: # name: cachix # url: https://coq.cachix.org # only: # refs: # Repeat conditions from pkg:nix:deploy # - master # - /^v.*\..*$/ # variables: # - $CACHIX_AUTH_TOKEN && $CACHIX_DEPLOYMENT_KEY # # if the $CACHIX_AUTH_TOKEN variable isn't set, the job it depends on doesn't exist # needs: # - pkg:nix:deploy # script: # - echo "$CACHIX_DEPLOYMENT_KEY" | tr -d '\r' | ssh-add - > /dev/null # # Remove all pr branches because they could be missing when we run git fetch --unshallow # - git branch --list 'pr-*' | xargs -r git branch -D # - git fetch --unshallow # - git branch -v # - git push git@github.com:coq/coq-on-cachix "${CI_COMMIT_SHA}":"refs/heads/${CI_COMMIT_REF_NAME}" pkg:nix: extends: .nix-template script: - nix-build "$CI_PROJECT_URL/-/archive/$CI_COMMIT_SHA.tar.gz" -K doc:refman: extends: .doc-template needs: - build:base doc:refman:dune: extends: .dune-ci-template variables: DUNE_TARGET: refman-html artifacts: paths: - _build/log - _build/default/doc/refman-html doc:refman-pdf:dune: extends: .dune-ci-template variables: DUNE_TARGET: refman-pdf artifacts: paths: - _build/log - _build/default/doc/refman-pdf # currently bugged: dune cleans up the glob files so no links # see #12699 doc:stdlib:dune: extends: .dune-ci-template variables: DUNE_TARGET: stdlib-html artifacts: paths: - _build/log - _build/default/doc/stdlib/html doc:refman:deploy: extends: .deploy-template environment: name: deployment url: https://coq.github.io/ only: variables: - $DOCUMENTATION_DEPLOY_KEY needs: - doc:ml-api:odoc - doc:refman:dune - build:base script: - echo "$DOCUMENTATION_DEPLOY_KEY" | tr -d '\r' | ssh-add - > /dev/null - git clone git@github.com:coq/doc.git _deploy - rm -rf _deploy/$CI_COMMIT_REF_NAME/api - rm -rf _deploy/$CI_COMMIT_REF_NAME/refman - rm -rf _deploy/$CI_COMMIT_REF_NAME/stdlib - mkdir -p _deploy/$CI_COMMIT_REF_NAME - cp -rv _build/default/_doc/_html _deploy/$CI_COMMIT_REF_NAME/api - cp -rv _build/default/doc/refman-html _deploy/$CI_COMMIT_REF_NAME/refman - cp -rv _install_ci/share/doc/coq/html/stdlib _deploy/$CI_COMMIT_REF_NAME/stdlib - cd _deploy/$CI_COMMIT_REF_NAME/ - git add api refman stdlib - git commit -m "Documentation of branch “$CI_COMMIT_REF_NAME” at $CI_COMMIT_SHORT_SHA" - git push # TODO: rebase and retry on failure doc:ml-api:odoc: extends: .dune-ci-template variables: DUNE_TARGET: apidoc artifacts: paths: - _build/log - _build/default/_doc/ test-suite:base: extends: .test-suite-template needs: - build:base test-suite:base+32bit: extends: .test-suite-template needs: - build:base+32bit variables: OPAM_VARIANT: "+32bit" only: *full-ci test-suite:edge+flambda: extends: .test-suite-template needs: - build:edge+flambda variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" only: *full-ci test-suite:edge:dune:dev: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true needs: - build:edge+flambda:dune:dev script: - tar xfj _build.tar.bz2 - make -f Makefile.dune test-suite variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" artifacts: name: "$CI_JOB_NAME.logs" when: on_failure paths: - _build/default/test-suite/logs # Gitlab doesn't support yet "expire_in: never" so we use the instance default # expire_in: never .test-suite:ocaml+beta+dune-template: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true script: - opam switch create $OCAMLVER --empty - eval $(opam env) - opam repo add ocaml-beta https://github.com/ocaml/ocaml-beta-repository.git - opam update - opam install ocaml-variants=$OCAMLVER - opam install dune zarith - eval $(opam env) - export COQ_UNIT_TEST=noop - make -f Makefile.dune test-suite variables: OPAM_SWITCH: base artifacts: name: "$CI_JOB_NAME.logs" when: always paths: - _build/log - _build/default/test-suite/logs expire_in: 2 week allow_failure: true # test-suite:4.12+trunk+dune: # extends: .test-suite:ocaml+beta+dune-template # variables: # OCAMLVER: 4.12.0+trunk test-suite:base+async: extends: .test-suite-template needs: - build:base variables: COQEXTRAFLAGS: "-async-proofs on -async-proofs-cache force" timeout: 100m allow_failure: true only: variables: - $UNRELIABLE =~ /enabled/ validate:base: extends: .validate-template needs: - build:base validate:base+32bit: extends: .validate-template needs: - build:base+32bit variables: OPAM_VARIANT: "+32bit" only: *full-ci validate:edge+flambda: extends: .validate-template needs: - build:edge+flambda variables: OPAM_SWITCH: edge OPAM_VARIANT: "+flambda" only: *full-ci validate:quick: extends: .validate-template needs: - build:quick only: variables: - $UNRELIABLE =~ /enabled/ # Libraries are by convention the projects that depend on Coq # but not on its ML API library:ci-argosy: extends: .ci-template library:ci-autosubst: extends: .ci-template-flambda library:ci-bbv: extends: .ci-template library:ci-bedrock2: extends: .ci-template-flambda variables: NJOBS: "1" library:ci-category_theory: extends: .ci-template needs: - build:base - plugin:ci-equations library:ci-color: extends: .ci-template-flambda needs: - build:edge+flambda - plugin:ci-bignums library:ci-compcert: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-flocq - library:ci-menhir library:ci-coq_performance_tests: extends: .ci-template library:ci-coq_tools: extends: .ci-template library:ci-coqprime: extends: .ci-template-flambda needs: - build:edge+flambda - plugin:ci-bignums library:ci-coqtail: extends: .ci-template library:ci-coquelicot: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-cross_crypto: extends: .ci-template library:ci-engine_bench: extends: .ci-template library:ci-fcsl_pcm: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-fiat_crypto: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-coqprime - plugin:ci-bignums - plugin:ci-rewriter library:ci-fiat_crypto_legacy: extends: .ci-template-flambda allow_failure: true # See https://github.com/coq/coq/wiki/Coq-Call-2020-06-24#adding-back-fiat-crypto-legacy # We cannot use flambda due to # https://github.com/ocaml/ocaml/issues/7842, see # https://github.com/coq/coq/pull/11916#issuecomment-609977375 library:ci-fiat_crypto_ocaml: extends: .ci-template needs: - build:edge+flambda - library:ci-coqprime - plugin:ci-bignums - plugin:ci-rewriter - library:ci-fiat_crypto library:ci-flocq: extends: .ci-template-flambda library:ci-menhir: extends: .ci-template-flambda library:ci-oddorder: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-fourcolor: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-corn: extends: .ci-template-flambda needs: - build:edge+flambda - plugin:ci-bignums - library:ci-math_classes plugin:ci-gappa: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-flocq library:ci-geocoq: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-hott: extends: .ci-template library:ci-iris: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-autosubst library:ci-math_classes: extends: .ci-template-flambda needs: - build:edge+flambda - plugin:ci-bignums library:ci-mathcomp: extends: .ci-template-flambda library:ci-mczify: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp library:ci-sf: extends: .ci-template library:ci-stdlib2: extends: .ci-template-flambda library:ci-tlc: extends: .ci-template library:ci-unimath: extends: .ci-template-flambda library:ci-verdi_raft: extends: .ci-template-flambda library:ci-vst: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-flocq library:ci-deriving: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp # Plugins are by definition the projects that depend on Coq's ML API plugin:ci-aac_tactics: extends: .ci-template plugin:ci-itauto: extends: .ci-template plugin:ci-bignums: extends: .ci-template-flambda plugin:ci-coq_dpdgraph: extends: .ci-template plugin:ci-coqhammer: extends: .ci-template-flambda plugin:ci-elpi: extends: .ci-template plugin:ci-equations: extends: .ci-template plugin:ci-fiat_parsers: extends: .ci-template plugin:ci-metacoq: extends: .ci-template needs: - build:base - plugin:ci-equations plugin:ci-mtac2: extends: .ci-template plugin:ci-paramcoq: extends: .ci-template plugin:ci-perennial: extends: .ci-template-flambda plugin:plugin-tutorial: stage: build except: variables: - $ONLY_WINDOWS == "true" interruptible: true script: - ./configure -profile devel -warn-error yes - make -j "$NJOBS" plugin-tutorial plugin:ci-quickchick: extends: .ci-template-flambda needs: - build:edge+flambda - library:ci-mathcomp plugin:ci-reduction_effects: extends: .ci-template plugin:ci-relation_algebra: extends: .ci-template plugin:ci-rewriter: extends: .ci-template-flambda plugin:ci-vscoq: extends: .ci-template coq-8.15.0/.ocamlformat000066400000000000000000000005111417001151100146600ustar00rootroot00000000000000version=0.15.0 profile=ocamlformat # to enable a whole directory, put "disable=false" in dir/.ocamlformat # to enable specific files put them in .ocamlformat-enable disable=true module-item-spacing=compact sequence-style=terminator cases-exp-indent=2 field-space=loose exp-grouping=preserve break-cases=fit doc-comments=before coq-8.15.0/.ocamlinit000066400000000000000000000000141417001151100143310ustar00rootroot00000000000000#rectypes;; coq-8.15.0/CODE_OF_CONDUCT.md000066400000000000000000000151101417001151100151530ustar00rootroot00000000000000# Coq Code of Conduct # The Coq development team and the user community are made up of a mixture of professionals and volunteers from all over the world. Diversity brings variety of perspectives that can be very valuable, but it can also lead to communication issues and unhappiness. Therefore, we have a few ground rules that we ask people to adhere to. These rules apply equally to core developers (who should lead by example), occasional contributors and those seeking help and guidance. Their goal is that everyone feels safe and welcome when contributing to Coq or interacting with others in Coq related forums. These rules apply to all spaces managed by the Coq development team. This includes the GitHub repository, the Discourse forum, the Zulip chat, the mailing lists, physical events like Coq working groups and workshops, and any other forums created or managed by the development team which the community uses for communication. In addition, violations of these rules outside these spaces may affect a person's ability to participate within them. - **Be friendly and patient.** - **Be welcoming.** We strive to be a community that welcomes and supports people of all backgrounds and identities. This includes, but is not limited to people of any origin, color, status, educational level, gender identity, sexual orientation, age, culture and beliefs, and mental and physical ability. - **Be considerate.** Your work will be used by other people, and you in turn will depend on the work of others. Any decision you take will affect users and colleagues, and you should take those consequences into account when making decisions. - **Be respectful.** Not all of us will agree all the time, but disagreement is no excuse for poor behavior and poor manners. We might all experience some frustration now and then, but we cannot allow that frustration to turn into a personal attack. It's important to remember that a community where people feel uncomfortable or threatened is not a productive one. Members of the Coq development team and user community should be respectful when dealing with other members as well as with people outside the community. - **Be careful in the words that you choose.** Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behavior aren't acceptable. * Violent language or threats or personal insults have no chance to resolve a dispute or to let a discussion florish. Worse, they can hurt durably, or generate durable fears. They are thus unwelcome. * Not everyone is comfortable with sexually explicit or violent material, even as a joke. In an online open multicultural world, you don't know who might be listening. So be cautious and responsible with your words. * Discussions are online and recorded for posterity; we all have our right for privacy and online gossiping as well as posting or threatening to post other people's personally identifying information is prohibited. - **Remember that what you write in a public online forum might be read by many people you don't know.** Consider what image your words will give to outsiders of the development team / the user community as a whole. Try to avoid references to private knowledge to be understandable by anyone. - **Coq online forums are only to discuss Coq-related subjects.** Unrelated political discussions or long digressions are unwelcome, even for illustration or comparison purposes. - **When we disagree, try to understand why.** Disagreements, both social and technical, happen all the time and Coq is no exception. It is important that we resolve disagreements and differing views constructively. Remember that we are different. Different people have different perspectives on issues. Being unable to understand why someone holds a viewpoint doesn't mean that they're wrong. - **It is human to make errors, and please try not to take things personally.** Please do not answer aggressively to problematic behavior and simply signal the issue. If actions have been taken with you (e.g. bans or simple demands of apology, of rephrasing or keeping personal beliefs or troubles private), please understand that they are not intended as aggression or punishment ― even if you they feel harsh to you ― but as ways to enforce a calm communication for the other participants and to give you the opportunity to change your behavior. We understand you may feel hurt, or maybe you had a bad day, so please take this opportunity to question yourself, cool down if necessary and do not persist in the exact same behavior you have been reported for. ## Enforcement ## If you believe someone is violating the code of conduct, we ask that you report it by emailing the Coq Code of Conduct enforcement team at or, at your discretion, any member of the team. Confidentiality with regard to the reporter of an incident will be maintained while dealing with it. In particular, you should seek support from the team instead of dealing by yourself with a behavior that you consider hurtful. This applies to members of the enforcement team as well, who shouldn't deal by themselves with violations in discussions in which they are a participant. Depending on the violation, the team can choose to address a private or public warning to the offender, request an apology, or ban them for a short or a long period from interacting on one or all of our forums. Except in case of serious violations, the team will always try a pedagogical approach first (the offender does not necessarily realize immediately why their behavior is wrong). We consider short bans to form part of the pedagogical approach, especially when they come with explanatory comments, as this can give some time to the offender to calm down and think about their actions. The members of the team are currently: - Matthieu Sozeau - Théo Zimmermann ## Questions? ## If you have questions, feel free to write to . ## Attribution ## This text is adapted from the [Django Code of Conduct][django-code-of-conduct] which itself was adapted from the Speak Up! Community Code of Conduct. ## License ## Creative Commons License
This work is licensed under a Creative Commons Attribution 4.0 International License . [django-code-of-conduct]: https://web.archive.org/web/20180714161115/https://www.djangoproject.com/conduct/ coq-8.15.0/CONTRIBUTING.md000066400000000000000000001773341417001151100146260ustar00rootroot00000000000000# Guide to contributing to Coq # ## Foreword ## As with any documentation, this guide is most useful if it's promptly updated to reflect changes in processes, development tools, or the Coq ecosystem. If you notice anything inaccurate or outdated, please signal it in a new issue, or fix it in a new pull request. If you find some parts are not sufficiently clear, you may open an issue as well. ## Table of contents ## - [Introduction](#introduction) - [Contributing to the ecosystem](#contributing-to-the-ecosystem) - [Asking and answering questions](#asking-and-answering-questions) - [Writing tutorials and blog posts](#writing-tutorials-and-blog-posts) - [Contributing to the wiki](#contributing-to-the-wiki) - [Creating and maintaining Coq packages](#creating-and-maintaining-coq-packages) - [Distribution of Coq packages](#distribution-of-coq-packages) - [Support for plugin and library authors](#support-for-plugin-and-library-authors) - [Standard libraries](#standard-libraries) - [Maintaining existing packages in coq-community](#maintaining-existing-packages-in-coq-community) - [Contributing to the editor support packages](#contributing-to-the-editor-support-packages) - [Contributing to the website or the package archive](#contributing-to-the-website-or-the-package-archive) - [Other ways of creating content](#other-ways-of-creating-content) - [Issues](#issues) - [Reporting a bug, requesting an enhancement](#reporting-a-bug-requesting-an-enhancement) - [Beta testing](#beta-testing) - [Helping triage existing issues](#helping-triage-existing-issues) - [Code changes](#code-changes) - [Using GitHub pull requests](#using-github-pull-requests) - [Fixing bugs and performing small changes](#fixing-bugs-and-performing-small-changes) - [Proposing large changes: Coq Enhancement Proposals](#proposing-large-changes-coq-enhancement-proposals) - [Seeking early feedback on work-in-progress](#seeking-early-feedback-on-work-in-progress) - [Taking feedback into account](#taking-feedback-into-account) - [Understanding automatic feedback](#understanding-automatic-feedback) - [Understanding reviewers' feedback](#understanding-reviewers-feedback) - [Fixing your branch](#fixing-your-branch) - [Improving the official documentation](#improving-the-official-documentation) - [Contributing to the standard library](#contributing-to-the-standard-library) - [Becoming a maintainer](#becoming-a-maintainer) - [Reviewing pull requests](#reviewing-pull-requests) - [Collaborating on a pull request](#collaborating-on-a-pull-request) - [Merging pull requests](#merging-pull-requests) - [Additional notes for pull request reviewers and assignees](#additional-notes-for-pull-request-reviewers-and-assignees) - [Joining / leaving maintainer teams](#joining--leaving-maintainer-teams) - [Core development team](#core-development-team) - [Release management](#release-management) - [Packaging Coq](#packaging-coq) - [Additional resources](#additional-resources) - [Developer documentation](#developer-documentation) - [Where to find the resources](#where-to-find-the-resources) - [Building Coq](#building-coq) - [Continuous integration](#continuous-integration) - [Code owners, issue and pull request templates](#code-owners-issue-and-pull-request-templates) - [Style guide](#style-guide) - [OCaml resources](#ocaml-resources) - [Git documentation, tips and tricks](#git-documentation-tips-and-tricks) - [GitHub documentation, tips and tricks](#github-documentation-tips-and-tricks) - [GitLab documentation, tips and tricks](#gitlab-documentation-tips-and-tricks) - [Merge script dependencies](#merge-script-dependencies) - [Coqbot](#coqbot) - [Online forum and chat to talk to developers](#online-forum-and-chat-to-talk-to-developers) - [Coq calls](#coq-calls) - [Coq remote working groups](#coq-remote-working-groups) - [Coq Users and Developers Workshops](#coq-users-and-developers-workshops) ## Introduction ## Thank you for your interest in contributing to Coq! There are many ways to contribute, and we appreciate all of them. People often begin by making small contributions, and contributions to the ecosystem, before working their way up incrementally to the core parts of the system, and start to propose larger changes, or take an active role in maintaining the system. So this is the way this contributing guide is organized. However, it is by no means necessary that you go through these steps in this order. Feel free to use this guide as a reference and quickly jump to the part that is most relevant to you at the current time. We want to make sure that contributing to Coq is a fun and positive experience for everyone, so please make sure you read and abide by our [Code of Conduct][Code-of-conduct]. ## Contributing to the ecosystem ## In this section, we present all the ways to contribute to Coq outside of the Coq repository itself. ### Asking and answering questions ### One very important way of contributing is by asking and answering questions, in order to create a body of easily-browsable, problem-oriented, additional documentation. There are two main platforms for this purpose: - [Stack Overflow][Stack-Overflow] (or more generally the [Stack Exchange][Stack-Exchange] platforms, as some Coq questions may be asked on other sites, such as TCS Stack Exchange); - Our [Discourse forum][Discourse]. In particular, our Discourse forum has several non-English categories that have yet to find their public, so do not hesitate to advertise them to people you know who might not be at ease with English. Other active places to answer questions include the [Coq-Club][] mailing list, and the Coq IRC channel (`irc://irc.freenode.net/#coq`). ### Writing tutorials and blog posts ### Writing about Coq, in the form of tutorials or blog posts, is also a very important contribution. In particular, it can help new users get interested in Coq, and learn about it, and existing users learn about advance features. Our official resources, such as the [reference manual][refman] are not suited for learning Coq, but serve as reference documentation to which you can link from your tutorials. The Coq website has a page listing known [tutorials][Coq-documentation] and the [wiki][] home page contains a list too. You can expand the former through a pull request on the [Coq website repository][Coq-website-repository], while the latter can be edited directly by anyone with a GitHub account. At the current time, we do not have a way of aggregating blog posts on a single page (like [OCaml planet][OCaml-planet]), but this would probably be something useful to get, so do not hesitate if you want to create it. Some people use [Reddit][] for this purpose. ### Contributing to the wiki ### Coq's [wiki][] is an informal source of additional documentation which anyone with a GitHub account can edit directly. In particular, it contains the Coq [FAQ][] which has not seen so many updates in the recent years. You should feel free to fix it, expand it, and even refactor it (if you are not sure if some changes would be welcome, you can open an issue to discuss them before performing them). People who watch the Coq repository will see recent wiki edits in their GitHub feed. It is recommended to review them *a posteriori* to check no mistake was introduced. The wiki is also a standard git repository, so people can follow the changes using any standard git tool. Coq's wiki is formatted using GitHub's flavored Markdown, with some wiki-specific extensions. See: - [GitHub's Markdown guide][GitHub-markdown] - [GitHub's wiki extensions][GitHub-wiki-extensions] ### Creating and maintaining Coq packages ### *Note: this sub-section is about packages extending Coq, such as plugins or libraries. A different, but also very valuable, contribution is to package Coq for your preferred package manager (see [Packaging Coq](#packaging-coq)).* Sharing reusable assets in the form of new libraries, plugins, and tools is great so that others can start building new things on top. Having an extensive and healthy package ecosystem will be key to the success of Coq. #### Distribution of Coq packages #### You can distribute your library or plugin through the [Coq package index][Coq-package-index]. Tools can be advertised on the [tools page][tools-website] of the Coq website, or the [tools page][tools-wiki] of the wiki. #### Support for plugin and library authors #### You can find advice and best practices about maintaining a Coq project on the [coq-community wiki][coq-community-wiki]. Learn how to write a Coq plugin, and about best practices, in the Coq [plugin tutorial][plugin-tutorial]. This tutorial is still a work in progress, so do not hesitate to expand it, or ask questions. If you want quick feedback on best practices, or how to talk to the Coq API, a good place to hang out is the [Coq devs & plugin devs stream][Zulip-dev] of our Zulip chat. Finally, we strongly encourage authors of plugins to submit their plugins to join Coq's continuous integration (CI) early on. Indeed, the Coq API gets continuously reworked, so this is the best way of ensuring your plugin stays compatible with new Coq versions, as this means Coq developers will fix your plugin for you. Learn more about this in the [CI README (user part)][CI-README-users]. Pure Coq libraries are also welcome to join Coq's CI, especially if they test underused / undertested features. #### Standard libraries #### There are many general purpose Coq libraries, so before you publish yours, consider whether you could contribute to an existing one instead (either the official [standard library](#contributing-to-the-standard-library), or one of the many [alternative standard libraries][other-standard-libraries]). #### Maintaining existing packages in coq-community #### Some Coq packages are not maintained by their initial authors anymore (for instance if they've moved on to new jobs or new projects) even if they were useful, or interesting. The coq-community organization is a place for volunteers to take over the maintenance of such packages. If you want to contribute by becoming a maintainer, there is [a list of packages waiting for a maintainer][coq-community-maintainer-wanted]. You can also propose a package that is not listed. Find out more about coq-community in [the manifesto's README][coq-community-manifesto]. ### Contributing to the editor support packages ### Besides CoqIDE, whose sources are available in this repository, and to which you are welcome to contribute, there are a number of alternative user interfaces for Coq, more often as an editor support package. Here are the URLs of the repositories of the various editor support packages: - Proof-General (Emacs major mode) - Company-coq (Emacs minor mode) - Coqtail (Vim) - VsCoq Reloaded (VsCode) And here are alternative user interfaces to be run in the web browser: - JsCoq (Coq executed in your browser) - Jupyter kernel for Coq Each of them has their own contribution process. ### Contributing to the website or the package archive ### The website and the package archive have their own repositories: - - You can contribute to them by using issues and pull requests on these repositories. These repositories should get their own contributing guides, but they don't have any at the time of writing this. ### Other ways of creating content ### There are many other ways of creating content and making the Coq community thrive, including many which we might not have thought about. Feel free to add more references / ideas to this sub-section. You can tweet about Coq, you can give talks about Coq both in academic, and in non-academic venues (such as developer conferences). [Codewars][] is a platform where people can try to solve some programming challenges that were proposed by other community members. Coq is supported and the community is eager to get more challenges. ## Issues ## ### Reporting a bug, requesting an enhancement ### Bug reports are enormously useful to identify issues with Coq; we can't fix what we don't know about. To report a bug, please open an issue in the [Coq issue tracker][Coq-issue-tracker] (you'll need a GitHub account). You can file a bug for any of the following: - An anomaly. These are always considered bugs, so Coq will even ask you to file a bug report! - An error you didn't expect. If you're not sure whether it's a bug or intentional, feel free to file a bug anyway. We may want to improve the documentation or error message. - Missing or incorrect documentation. It's helpful to track where the documentation should be improved, so please file a bug if you can't find or don't understand some bit of documentation. - An error message that wasn't as helpful as you'd like. Bonus points for suggesting what information would have helped you. - Bugs in CoqIDE should also be filed in the [Coq issue tracker][Coq-issue-tracker]. Bugs in the Emacs plugin should be filed against [ProofGeneral][ProofGeneral-issues], or against [company-coq][company-coq-issues] if they are specific to company-coq features. It would help if you search the existing issues before reporting a bug. This can be difficult, so consider it extra credit. We don't mind duplicate bug reports. If unsure, you are always very welcome to ask on our [Discourse forum][Discourse] or [Zulip chat][Zulip] before, after, or while writing a bug report. It is better if you can test that your bug is still present in the current testing or development version of Coq (see the [next sub-section](#beta-testing)) before reporting it, but if you can't, it should not discourage you from reporting it. When it applies, it's extremely helpful for bug reports to include sample code, and much better if the code is self-contained and complete. It's not necessary to minimize your bug or identify precisely where the issue is, since someone else can often do this if you include a complete example. We tend to include the code in the bug description itself, but if you have a very large input file then you can add it as an attachment. If you want to minimize your bug (or help minimize someone else's) for more extra credit, then you can use the [Coq bug minimizer][JasonGross-coq-tools] (specifically, the bug minimizer is the `find-bug.py` script in that repo). ### Beta testing ### Coq gets a new major release about every six months. Before a new major version is released, there is a beta-testing period, which usually lasts one month (see the [release plan][release-plan]). You can help make the upcoming release better, by testing the beta version, and trying to port your projects to it. You should report any bug you notice, but also any change of behavior that is not documented in the changelog. Then Coq developers will be able to check if what you reported is a regression that needs to be fixed, or an expected change that needs to be mentioned in the changelog. You can go even further by using the development version (`master` branch) of Coq on a day by day basis, and report problems as soon as you notice them. If you wish to do so, the easiest way to install Coq is through opam (using the `dev` version of the Coq package, available in the repository) or through [Nix][]. The documentation of the development version is [available online][master-doc], including the [unreleased changelog][unreleased-changelog]. ### Helping triage existing issues ### Coq has too many bug reports for its core developers alone to manage. You can help a lot by: - confirming that reported bugs are still active with the current version of Coq; - determining if the bug is a regression (new, and unexpected, behavior from a recent Coq version); - more generally, by reproducing a bug, on another system, configuration, another version of Coq, and by documenting what you did; - giving a judgement about whether the reported behavior is really a bug, or is expected but just improperly documented, or expected and already documented; - producing a trace if it is relevant and you know how to do it; - producing another example exhibiting the same bug, or minimizing the initial example using the bug minimizer mentioned above; - using `git bisect` to find the commit that introduced a regression; - fixing the bug if you have an idea of how to do so (see the [following section](#code-changes)). Once you have some experience with the Coq issue tracker, you can request to join the **@coq/contributors** team (any member of the **@coq/core** team can do so using [this link][add-contributor]). Being in this team will grant you the following access: - **Updating labels:** every open issue and pull request should ideally get one or several `kind:` and `part:` labels. In particular, valid issues should generally get either a `kind: bug` (the reported behavior can indeed be considered a bug, this can be completed with the `kind: anomaly`, and `kind: regression` labels), `kind: documentation` (e.g. if a reported behavior is expected but improperly documented), `kind: enhancement` (a request for enhancement of an existing feature), or `kind: feature` label (an idea for a new feature). - **Creating new labels:** if you feel a `part:` label is missing, do not hesitate to create it. If you are not sure, you may discuss it with other contributors and developers on [Zulip][Zulip-dev] first. - **Closing issues:** if a bug cannot be reproduced anymore, is a duplicate, or should not be considered a bug report in the first place, you should close it. When doing so, try putting an appropriate `resolved:` label to indicate the reason. If the bug has been fixed already, and you know in which version, you can add a milestone to it, even a milestone that's already closed, instead of a `resolved:` label. When closing a duplicate issue, try to add all the additional info that could be gathered to the original issue. - **Editing issue titles:** you may want to do so to better reflect the current understanding of the underlying issue. - **Editing comments:** feel free to do so to fix typos and formatting only (in particular, some old comments from the Bugzilla era or before are not properly formatted). You may also want to edit the OP's initial comment (a.k.a. body of the issue) to better reflect the current understanding of the issue, especially if the discussion is long. If you do so, only add to the original comment, and mark it clearly with an `EDITED by @YourNickname:`. - **Hiding comments:** when the discussion has become too long, this can be done to hide irrelevant comments (off-topic, outdated or resolved sub-issues). - **Deleting things:** please don't delete any comment or issue, our policy doesn't allow for comments to be deleted, unless done by the community moderators. You should hide them instead. An audit log is available to track deleted items if needed (but does not allow recovering them). - **Pushing a branch or a tag to the main repository:** please push changes to your own fork rather than the main repository. (Branches pushed to the main repository will be removed promptly and without notice.) Yet to be fully specified: use of priority, difficulty, `help wanted`, and `good first issue` labels, milestones, assignments, and GitHub projects. ## Code changes ## ### Using GitHub pull requests ### If you want to contribute a documentation update, bug fix or feature yourself, pull requests (PRs) on the [GitHub repository][coq-repository] are the way to contribute directly to the Coq implementation (all changes, even the smallest changes from core developers, go through PRs). You will need to create a fork of the repository on GitHub and push your changes to a new "topic branch" in that fork (instead of using an existing branch name like `master`). PRs should always target the `master` branch. Make sure that your copy of this branch is up-to-date before starting to do your changes, and that there are no conflicts before submitting your PR. If you need to fix conflicts, we generally prefer that you rebase your branch on top of `master`, instead of creating a merge commit. If you are not familiar with `git` or GitHub, Sections [Git documentation, tips and tricks](#git-documentation-tips-and-tricks), and [GitHub documentation, tips and tricks](#github-documentation-tips-and-tricks), should be helpful (and even if you are, you might learn a few tricks). Once you have submitted your PR, it may take some time to get feedback, in the form of reviews from maintainers, and test results from our continuous integration system. Our code owner system will automatically request reviews from relevant maintainers. Then, one maintainer should self-assign the PR (if that does not happen after a few days, feel free to ping the maintainers that were requested a review). The PR assignee will then become your main point of contact for handling the PR: they should ensure that everything is in order and merge when it is the case (you can ping them if the PR is ready from your side but nothing happens for a few days). After your PR is accepted and merged, it may get backported to a release branch if appropriate, and will eventually make it to a release. You do not have to worry about this, it is the role of the assignee and the release manager to do so (see Section [Release management](#release-management)). The milestone should give you an indication of when to expect your change to be released (this could be several months after your PR is merged). That said, you can start using the latest Coq `master` branch to take advantage of all the new features, improvements, and fixes. #### Fixing bugs and performing small changes #### Before fixing a bug, it is best to check that it was reported before: - If it was already reported and you intend to fix it, self-assign the issue (if you have the permission), or leave a comment marking your intention to work on it (and a contributor with write-access may then assign the issue to you). - If the issue already has an assignee, you should check with them if they still intend to work on it. If the assignment is several weeks, months, or even years (!) old, there are good chances that it does not reflect their current priorities. - If the bug has not been reported before, it can be a good idea to open an issue about it, while stating that you are preparing a fix. The issue can be the place to discuss about the bug itself while the PR will be the place to discuss your proposed fix. It is generally a good idea to add a regression test to the test-suite. See the test-suite [README][test-suite-README] for how to do so. Small fixes do not need any documentation, or changelog update. New, or updated, user-facing features, and major bug fixes do. See above on how to contribute to the documentation, and the README in [`doc/changelog`][user-changelog] for how to add a changelog entry. #### Proposing large changes: Coq Enhancement Proposals #### You are always welcome to open a PR for a change of any size. However, you should be aware that the larger the change, the higher the chances it will take very long to review, and possibly never get merged. So it is recommended that before spending a lot of time coding, you seek feedback from maintainers to see if your change would be supported, and if they have recommendations about its implementation. You can do this informally by opening an issue, or more formally by producing a design document as a [Coq Enhancement Proposal][CEP]. Another recommendation is that you do not put several unrelated changes in the same PR (even if you produced them together). In particular, make sure you split bug fixes into separate PRs when this is possible. More generally, smaller-sized PRs, or PRs changing less components, are more likely to be reviewed and merged promptly. #### Seeking early feedback on work-in-progress #### You should always feel free to open your PR before the documentation, changelog entry and tests are ready. That's the purpose of the checkboxes in the PR template which you can leave unticked. This can be a way of getting reviewers' approval before spending time on writing the documentation (but you should still do it before your PR can be merged). If even the implementation is not ready but you are still looking for early feedback on your code changes, please use the [draft PR](#draft-pull-requests) mechanism. If you are looking for feedback on the design of your change, rather than on its implementation, then please refrain from opening a PR. You may open an issue to start a discussion, or create a [Coq Enhancement Proposal][CEP] if you have a clear enough view of the design to write a document about it. ### Taking feedback into account ### #### Understanding automatic feedback #### When you open or update a PR, you get automatically some feedback: we have a bot whose job will be to push a branch to our GitLab mirror to run some continuous integration (CI) tests. The tests will run on a commit merging your branch with the base branch, so if there is a conflict and this merge cannot be performed automatically, the bot will put a `needs: rebase` label, and the tests won't run. Otherwise, a large suite of tests will be run on GitLab, plus some additional tests on GitHub Actions for Windows and macOS compatibility. If a test fails on GitLab, you will see in the GitHub PR interface, both the failure of the whole pipeline, and of the specific failed job. Most of these failures indicate problems that should be addressed, but some can still be due to synchronization issues out of your control. In particular, if you get a failure in one of the tested plugins but you didn't change the Coq API, it is probably a transient issue and you shouldn't have to worry about it. In case of doubt, ask the reviewers. ##### Test-suite failures ##### If you broke the test-suite, you should get many failed jobs, because the test-suite is run multiple times in various settings. You should get the same failure locally by running `make test-suite` or `make -f Makefile.dune test-suite`. It's helpful to run this locally and ensure the test-suite is not broken before submitting a PR as this will spare a lot of runtime on distant machines. To learn more about the test-suite, you should refer to its [README][test-suite-README]. ##### Linter failures ##### We have a linter that checks a few different things: - **Every commit can build.** This is an important requirement to allow the use of `git bisect` in the future. It should be possible to build every commit, and in principle even the test-suite should pass on every commit (but this isn't tested in CI because it would take too long). A good way to test this is to use `git rebase master --exec "make -f Makefile.dune check"`. - **No tabs or end-of-line spaces on updated lines**. We are trying to get rid of all tabs and all end-of-line spaces from the code base (except in some very special files that need them). This checks not only that you didn't introduce new ones, but also that updated lines are clean (even if they were there before). You can avoid worrying about tabs and end-of-line spaces by installing our [pre-commit git hook][git-hook], which will fix these issues at commit time. Running `./configure` once will install this hook automatically unless you already have a pre-commit hook installed. If you are encountering these issues nonetheless, you can fix them by rebasing your branch with `git rebase --whitespace=fix`. - **All files should end with a single newline**. See the section [Style guide](#style-guide) for additional style recommendations. - **Documented syntax is up-to-date**. If you update the grammar, you should run `make -f Makefile.make doc_gram_rsts` to update the documented syntax. You should then update the text describing the syntax in the documentation and commit the changes. In some cases, the documented syntax is edited to make the documentation more readable. In this case, you may have to edit `doc/tools/docgram/common.edit_mlg` to make `doc_gram_rsts` pass. See [doc_grammar's README][doc_gram] for details. Note that in the case where you added new commands or tactics, you will have to manually insert them in the documentation, the tool won't do that for you, only check that what you documented is consistent with the parser. You may run the linter yourself with `dev/lint-repository.sh`. ##### Plugin failures ##### If you did change the Coq API, then you may have broken a plugin. After ensuring that the failure comes from your change, you will have to provide a fix to the plugin, and the PR assignee will have to ensure that this fix is merged in the plugin simultaneously with your PR on the Coq repository. If your changes to the API are not straightforward, you should also document them in `dev/doc/changes.md`. The [CI README (developer part)][CI-README-developers] contains more information on how to fix plugins, test and submit your changes, and how you can anticipate the results of the CI before opening a PR. ##### Library failures ##### Such a failure can indicate either a bug in your branch, or a breaking change that you introduced voluntarily. All such breaking changes should be properly documented in the [user changelog][user-changelog]. Furthermore, a backward-compatible fix should be found, properly documented in the changelog when non-obvious, and this fix should be merged in the broken projects *before* your PR to the Coq repository can be. Note that once the breaking change is well understood, it should not feel like it is your role to fix every project that is affected: as long as reviewers have approved and are ready to integrate your breaking change, you are entitled to (politely) request project authors / maintainers to fix the breakage on their own, or help you fix it. Obviously, you should leave enough time for this to happen (you cannot expect a project maintainer to allocate time for this as soon as you request it) and you should be ready to listen to more feedback and reconsider the impact of your change. #### Understanding reviewers' feedback #### The reviews you get are highly dependent on the kind of changes you did. In any case, you should always remember that reviewers are friendly volunteers that do their best to help you get your changes in (and should abide by our [Code of Conduct][Code-of-Conduct]). But at the same time, they try to ensure that code that is introduced or updated is of the highest quality and will be easy to maintain in the future, and that's why they may ask you to perform small or even large changes. If you need a clarification, do not hesitate to ask. Here are a few labels that reviewers may add to your PR to track its status. In general, this will come in addition to comments from the reviewers, with specific requests. - [needs: rebase][needs-rebase] indicates the PR should be rebased on top of the latest version of the base branch (usually `master`). We generally ask you to rebase only when there are merge conflicts or if the PR has been opened for a long time and we want a fresh CI run. - [needs: fixing][needs-fixing] indicates the PR needs a fix, as discussed in the comments. - [needs: documentation][needs-documentation] indicates the PR introduces changes that should be documented before it can be merged. - [needs: changelog entry][needs-changelog] indicates the PR introduces changes that should be documented in the [user changelog][user-changelog]. - [needs: benchmarking][needs-benchmarking] and [needs: testing][needs-testing] indicate the PR needs testing beyond what the test suite can handle. For example, performance benchmarking is currently performed with a different infrastructure ([documented in the wiki][Benchmarking]). Unless some followup is specifically requested, you aren't expected to do this additional testing. More generally, such labels should come with a description that should allow you to understand what they mean. #### Fixing your branch #### If you have changes to perform before your PR can be merged, you might want to do them in separate commits at first to ease the reviewers' task, but we generally appreciate that they are squashed with the commits that they fix before merging. This is especially true of commits fixing previously introduced bugs or failures. ### Improving the official documentation ### The documentation is usually a good place to start contributing, because you can get used to the pull request submitting and review process, without needing to learn about the code source of Coq at the same time. The official documentation is formed of two components: - the [reference manual][refman], - the [documentation of the standard library][stdlib-doc]. The sources of the reference manual are located in the [`doc/sphinx`][refman-sources] directory. They are written in rst (Sphinx) format with some Coq-specific extensions, which are documented in the [README][refman-README] in the above directory. This README was written to be read from begin to end. As soon as your edits to the documentation are more than changing the textual content, we strongly encourage you to read this document. The documentation of the standard library is generated with [coqdoc][coqdoc-documentation] from the comments in the sources of the standard library. The [README in the `doc` directory][doc-README] contains more information about the documentation's build dependencies, and the `make` targets. You can browse through the list of open documentation issues using the [kind: documentation][kind-documentation] label, or the [user documentation GitHub project][documentation-github-project] (you can look in particular at the "Writing" and "Fixing" columns). ### Contributing to the standard library ### Contributing to the standard library is also made easier by not having to learn about Coq's internals, and its implementation language. Due to the compatibility constraints created by the many projects that depend on it, proposing breaking changes, such as changing a definition, may frequently be rejected, or at the very least might take a long time before getting approved and merged. This does not mean that you cannot try. On the other hand, contributing new lemmas on existing definitions and cleaning up existing proofs are likely to be accepted. Contributing new operations on existing types are also likely to be accepted in many cases. In case of doubt, ask in an issue before spending too much time preparing your PR. If you create a new file, it needs to be listed in `doc/stdlib/index-list.html`. Add coqdoc comments to extend the [standard library documentation][stdlib-doc]. See the [coqdoc documentation][coqdoc-documentation] to learn more. ## Becoming a maintainer ## ### Reviewing pull requests ### You can start reviewing PRs as soon as you feel comfortable doing so (anyone can review anything, although some designated reviewers will have to give a final approval before a PR can be merged, as is explained in the next sub-section). Reviewers should ensure that the code that is changed or introduced is in good shape and will not be a burden to maintain, is unlikely to break anything, or the compatibility-breakage has been identified and validated, includes documentation, changelog entries, and test files when necessary. Reviewers can use labels, or change requests to further emphasize what remains to be changed before they can approve the PR. Once reviewers are satisfied (regarding the part they reviewed), they should formally approve the PR, possibly stating what they reviewed. That being said, reviewers should also make sure that they do not make the contributing process harder than necessary: they should make it clear which comments are really required to perform before approving, and which are just suggestions. They should strive to reduce the number of rounds of feedback that are needed by posting most of their comments at the same time. If they are opposed to the change, they should clearly say so from the beginning to avoid the contributor spending time in vain. Furthermore, when reviewing a first contribution (GitHub highlights first-time contributors), be extra careful to be welcoming, whatever the decision on the PR is. When approving a PR, consider thanking the newcomer for their contribution, even if it is a very small one (in cases where, if the PR had come from a regular contributor, it would have felt OK to just merge it without comment). When rejecting a PR, take some extra steps to explain the reasons, so that it doesn't feel hurtful. Don't hesitate to still thank the contributor and possibly redirect them to smaller tasks that might be more appropriate for a newcomer. #### Collaborating on a pull request #### Beyond making suggestions to a PR author during the review process, you may want to collaborate further by checking out the code, making changes, and pushing them. There are two main ways of doing this: - **Pull requests on pull requests:** You can checkout the PR branch (GitHub provides the link to the remote to pull from and the branch name on the top and the bottom of the PR discussion thread), checkout a new personal branch from there, do some changes, commit them, push to your fork, and open a new PR on the PR author's fork. - **Pushing to the PR branch:** If the PR author has not unchecked the "Allow edit from maintainers" checkbox, and you have write-access to the repository (i.e. you are in the **@coq/contributors** team), then you can also push (and even force-push) directly to the PR branch, on the main author's fork. Obviously, don't do it without coordinating with the PR author first (in particular, in case you need to force-push). When several people have co-authored a single commit (e.g. because someone fixed something in a commit initially authored by someone else), this should be reflected by adding ["Co-authored-by:" tags][GitHub-co-authored-by] at the end of the commit message. The line should contain the co-author name and committer e-mail address. ### Merging pull requests ### Our [CODEOWNERS][] file associates a team of maintainers to each component. When a PR is opened (or a [draft PR](#draft-pull-requests) is marked as ready for review), GitHub will automatically request reviews to maintainer teams of affected components. As soon as it is the case, one available member of a team that was requested a review should self-assign the PR, and will act as its shepherd from then on. The PR assignee is responsible for making sure that all the proposed changes have been reviewed by relevant maintainers (at least one reviewer for each component that is significantly affected), that change requests have been implemented, that CI is passing, and eventually will be the one who merges the PR. *If you have already frequently contributed to a component, we would be happy to have you join one of the maintainer teams.* See the [section below](#joining--leaving-maintainer-teams) on joining / leaving maintainer teams. The complete list of maintainer teams is available [here][coq-pushers] (link only accessible to people who are already members of the Coq organization, because of a limitation of GitHub). #### Additional notes for pull request reviewers and assignees #### - NEVER USE GITHUB'S MERGE BUTTON. Instead, you should either: - post a comment containing "@coqbot: merge now"; This is the recommended method and more convenient than the previous script based method (see next bullet) e.g. for developers who do not have a GPG key and for when you do not have access to a console. "coqbot" will **not** check CI status - it is expected that the merger does this manually upfront, but coqbot will deny the merge with an error response in the following cases: - no assignee - no milestone - no `kind` label - left-over `needs` labels - you try to merge a PR which you authored (this is decided by the creator of the PR - reviewers can still do minor changes and merge) - alternatively run the [`dev/tools/merge-pr.sh`][merge-pr] script; Since "coqbot" this method is deprecated with a few exceptions, like merges to release branches - which only release managers do. This requires having configured gpg with git. - PR authors or co-authors cannot review, self-assign, or merge the PR they contributed to. However, reviewers may push small fixes to the PR branch to facilitate the PR integration. - PRs are merged when there is consensus. Consensus is defined by an explicit approval from at least one maintainer for each component that is significantly affected and an absence of dissent. As soon as a developer opposes a PR, it should not be merged without being discussed first (usually in a call or working group). - Sometimes (especially for large or potentially controversial PRs), it is a good practice to announce the intent to merge, one or several days in advance, when unsure that everyone had a chance to voice their opinion, or to finish reviewing the PR. - Only PRs targetting the `master` branch can be merged by a maintainer. For PRs targetting a release branch, the assignee should always be the release manager. - Before merging, the assignee must also select a milestone for the PR (see also Section [Release management](#release-management)). - To know which files you are a maintainer of, you can look for black shields icons in the "Files changed" tab. Alternatively, you may use the [`dev/tools/check-owners-pr.sh`][check-owners] script for the same purpose. ![shield icon](dev/doc/shield-icon.png) - When a PR has [overlays][user-overlays], then: - the overlays that are backward-compatible (normally the case for overlays fixing Coq code) should have been merged *before* the PR can be merged; it might be a good idea to ask the PR author to remove the overlay information from the PR to get a fresh CI run and ensure that all the overlays have been merged; the PR assignee may also push a commit removing the overlay information (in that case the assignee is not considered a co-author, hence no need to change the assignee) - the overlays that are not backward-compatible (normally only the case for overlays fixing OCaml code) should be merged *just after* the PR has been merged (and thus the assignee should ping the maintainers of the affected projects to ask them to merge the overlays). #### Joining / leaving maintainer teams #### We are always happy to have more people involved in the PR reviewing and merging process, so do not hesitate to propose yourself if you already have experience on a component. Maintainers can leave teams at any time (and core members can also join any team where they feel able to help) but you should always announce it to other maintainers when you do join or leave a team. ### Core development team ### The core developers are the active developers with a lengthy and significant contribution track record. They are the ones with admin powers over the Coq organization, and the ones who take part in votes in case of conflicts to take a decision (rare). One of them is designated as a development coordinator, and has to approve the changes in the core team membership (until we get a more formal joining and leaving process). The core developers are the members of the **@coq/core** team ([member list][coq-core] only visible to the Coq organization members because of a limitation of GitHub). ## Release management ## Coq's major release cycles generally span about six months, with about 4-5 months of development, and 1-2 months of stabilization / beta-releases. The release manager (RM) role is a rolling position among core developers. The [release plan][release-plan] is published on the wiki. Development of new features, refactorings, deprecations and clean-ups always happens on `master`. Stabilization starts by branching (creating a new `v...` release branch from the current `master`), which marks the beginning of a feature freeze (new features will continue to be merged into `master` but won't make it for the upcoming major release, but only for the next one). After branching, most changes are introduced in the release branch by a backporting process. PR authors and assignee can signal a desire to have a PR backported by selecting an appropriate milestone. Most of the time, the choice of milestone is between two options: the next major version that has yet to branch from `master`, or the next version (beta, final, or patch-level release) of the active release branch. In the end, it is the RM who decides whether to follow or not the recommendation of the PR assignee, and who backports PRs to the release branch. Very specific changes that are only relevant for the release branch and not for the `master` branch can result in a PR targetting the release branch instead of `master`. In this case, the RM is the only one who can merge the PR, and they may even do so if they are the author of the PR. Examples of such PRs include bug fixes to a feature that has been removed in `master`, and PRs from the RM changing the version number in preparation for the next release. Some automation is in place to help the RM in their task: a GitHub project is created at branching time to manage PRs to backport; when a PR is merged in a milestone corresponding to the release branch, our bot will add this PR in a "Request inclusion" column in this project; the RM can browse through the list of PRs waiting to be backported in this column, possibly reject some of them by simply removing the PR from the column (in which case, the bot will update the PR milestone), and proceed to backport others; when a backported PR is pushed to the release branch, the bot moves the PR from the "Request inclusion" column to a "Shipped" column. More information about the RM tasks can be found in the [release process checklist][RM-checklist]. ### Packaging Coq ### The RM role does not include the task of making Coq available through the various package managers out there: several contributors (most often external to the development team) take care of this, and we thank them for this. If your preferred package manager does not include Coq, it is a very worthy contribution to make it available there. But be careful not to let a package get outdated, as this could lead some users to install an outdated version of Coq without even being aware of it. This [Repology page][repology-coq] lists the versions of Coq which are packaged in many repositories, although it is missing information on some repositories, like opam. The Windows and macOS installers are auto-generated in our CI, and this infrastructure has dedicated maintainers within the development team. ## Additional resources ## ### Developer documentation ### #### Where to find the resources #### - You can find developer resources in the `dev` directory, and more specifically developer documentation in `dev/doc`. The [README][dev-README] in the `dev` directory lists what's available. For example, [`dev/doc/README.md`][dev-doc-README] is a beginner's guide to hacking Coq, and documentation on debugging Coq can be found in [`dev/doc/debugging.md`][debugging-doc]. - When it makes sense, the documentation is kept even closer to the sources, in README files in various directories (e.g. the test-suite [README][test-suite-README] or the refman [README][refman-README]). - Documentation of the Coq API is written directly in comments in `.mli` files. You can browse it on [the Coq website][api-doc], or rebuild it locally (`make -f Makefile.dune apidoc`, requires `odoc` and `dune`). - A plugin tutorial is located in [`doc/plugin_tutorial`][plugin-tutorial]. - The Coq [wiki][] contains additional developer resources. #### Building Coq #### The list of dependencies can be found in the first section of the [`INSTALL.md`](INSTALL.md) file. Today, the recommended method for building Coq is to use `dune`. Run `make -f Makefile.dune` to get help on the various available targets, Additional documentation can be found in [`dev/doc/build-system.dune.md`][dev-doc-dune], and in [the official Dune documentation][dune-doc]. The legacy make-based system is still available. If you wish to use it, you need to start by running `./configure -profile devel`. Most of the available targets are not documented, so you will need to ask. #### Continuous integration #### Continuous integration (CI) testing is key in ensuring that the `master` branch is kept in a well-functioning state at all times, and that no accidental compatibility breakages are introduced. Our CI is quite extensive since it includes testing many external projects, some of them taking more than an hour to compile. However, you can get partial results much more quickly (when our CI is not overloaded). The main documentation resources on our CI are: - the [README for users, i.e. plugin and library authors][CI-README-users]; - the [README for developers, and contributors][CI-README-developers]; - the README of the [user-overlays][] directory. Preparing an overlay (i.e. a patch to an external project that we test in our CI, to make it compile with the modified version of Coq in your branch) is a step that everyone goes through at some point. All you need to know to prepare an overlay manually is in the README in the [user-overlays][] directory. You might want to use some additional tooling such as the `make ci-*` targets of `Makefile.ci`, the Nix support for getting the dependencies of the external projects (see the README in [`dev/ci/nix`][dev-ci-nix], and the (so far undocumented) [`dev/tools/create_overlays.sh`][dev-tools-create_overlays.sh] script. More work is to be done on understanding how each developer proceeds to prepare overlays, and propose a simplified and documented procedure. We also have a benchmarking infrastructure, which is documented [on the wiki][Benchmarking]. ##### Restarting failed jobs ##### When CI has a few failures which look spurious, restarting the corresponding jobs is a good way to ensure this was indeed the case. You can restart jobs on Azure from the "Checks" tab on GitHub. To restart a job on GitLab CI, you should sign into GitLab (this can be done using a GitHub account); if you are part of the [Coq organization on GitLab](https://gitlab.com/coq), you should see a "Retry" button; otherwise, send a request to join the organization. #### Code owners, issue and pull request templates #### These files can be found in the [`.github`](.github) directory. The templates are particularly useful to remind contributors what information we need for them, and, in the case of PRs, to update the documentation, changelog, and test-suite when relevant. GitHub now supports setting up multiple issue templates, and we could use this to define distinct requirements for various kind of bugs, enhancement and feature requests. #### Style guide #### There exists an [old style guide][old-style-guide] whose content is still mostly relevant. Yet to be done: extract the parts that are most relevant, and put them in this section instead. We don't use a code formatter at the current time, and we are reluctant to merge changes to parts of the code that are unchanged aside from formatting. However, it is still a good idea if you don't know how to format a block of code to use the formatting that [ocamlformat][] would give #### OCaml resources #### You can find lots of OCaml resources on , including documentation, a Discourse forum, the package archive, etc. You may also want to refer to the [Dune documentation][dune-doc]. Another ressource is , especially its [community page][ocamlverse-community], which lists the various OCaml discussion platforms. #### Git documentation, tips and tricks #### Lots of resources about git, the version control system, are available on the web, starting with the [official website][git]. We recommend a setup with two configured remotes, one for the official Coq repository, called `upstream`, and one for your fork, called `origin`. Here is a way to do this for a clean clone: ``` shell git clone https://github.com/coq/coq -o upstream cd coq git remote add origin git@github.com:$YOURNAME/coq # Make sure you click the fork button on GitHub so that this repository exists cp dev/tools/pre-commit .git/hooks/ # Setup the pre-commit hook ``` Then, if you want to prepare a fix: ``` shell # Make sure we start from an up-to-date master git checkout master git pull --ff-only # If this fails, then your master branch is messy git checkout -b my-topic-branch # Modify some files git add . # Every untracked or modified file will be included in the next commit # You can also replace the dot with an explicit list of files git commit -m "My commit summary. You can add more information on multiple lines, but you need to skip a line first." git push -u origin my-topic-branch # Next time, you push to this branch, you can just do git push ``` When you push a new branch for the first time, GitHub gives you a link to open a PR. If you need to fix the last commit in your branch (typically, if your branch has a single commit on top of `master`), you can do so with ``` git add . git commit --amend --no-edit ``` If you need to fix another commit in your branch, or if you need to fix a conflict with `master`, you will need to learn about `git rebase`. GitHub provides [a short introduction][GitHub-rebase] to `git rebase`. #### GitHub documentation, tips and tricks #### GitHub has [extensive documentation][GitHub-doc] about everything you can do on the platform, and tips about using `git` as well. See in particular, [how to configure your commit e-mail address][GitHub-commit-email] and [how to open a PR from a fork][GitHub-PR-from-fork]. ##### Watching the repository ##### ["Watching" this repository][GitHub-watching] can result in a very large number of notifications. We recommend you, either, [configure your mailbox][notification-email] to handle incoming notifications efficiently, or you read your notifications within a web browser. You can configure how you receive notifications in [your GitHub settings][GitHub-notification-settings], you can use the GitHub interface to mark as read, save for later or mute threads. You can also manage your GitHub web notifications using a tool such as [Octobox][]. ##### Draft pull requests ##### [Draft PRs][GitHub-draft-PR] are a mechanism proposed by GitHub to open a pull request before it is ready for review. Opening a draft PR is a way of announcing a change and seeking early feedback without formally requesting maintainers' reviews. Indeed, you should avoid cluttering our maintainers' review request lists before a change is ready on your side. When opening a draft PR, make sure to give it a descriptive enough title so that interested developers still notice it in their notification feed. You may also advertise it by talking about it in our [developer chat][Zulip-dev]. If you know which developer would be able to provide useful feedback to you, you may also ping them. ###### Turning a PR into draft mode ###### If a PR was opened as ready for review, but it turns out that it still needs work, it can be transformed into a draft PR. In this case, previous review requests won't be removed automatically. Someone with write access to the repository should remove them manually. Afterwards, upon marking the PR as ready for review, someone with write access will have to manually add the review requests that were previously removed. #### GitLab documentation, tips and tricks #### We use GitLab mostly for its CI service. The [Coq organization on GitLab][GitLab-coq] hosts a number of CI/CD-only mirrors. If you are a regular contributor, you can request access to it from [the organization page][GitLab-coq]: this will grant you permission to restart failing CI jobs. GitLab too has [extensive documentation][GitLab-doc], in particular on configuring CI. #### Merge script dependencies #### The merge script passes option `-S` to `git merge` to ensure merge commits are signed. Consequently, it depends on the GnuPG command utility being installed and a GPG key being available. Here is a short documentation on how to use GPG, git & GitHub: https://help.github.com/articles/signing-commits-with-gpg/. The script depends on a few other utilities. If you are a Nix user, the simplest way of getting them is to run `nix-shell` first. **Note for homebrew (MacOS) users:** it has been reported that installing GnuPG is not out of the box. Installing explicitly `pinentry-mac` seems important for typing of passphrase to work correctly (see also this [Stack Overflow Q-and-A][pinentry-mac]). #### Coqbot #### Our bot sources can be found at . Its documentation is still a work-in-progress. ### Online forum and chat to talk to developers ### We have a [Discourse forum][Discourse] (see in particular the [Coq development][Discourse-development-category] category) and a [Zulip chat][Zulip] (see in particular the [Coq devs & plugin devs][Zulip-dev] stream). Feel free to join any of them and ask questions. People are generally happy to help and very reactive. Obviously, the issue tracker is also a good place to ask questions, especially if the development processes are unclear, or the developer documentation should be improved. ### Coq calls ### We try to gather every week for one hour through video-conference to discuss current and urgent matters. When longer discussions are needed, topics are left out for the next working group. See the [wiki][wiki-calls] for more information about Coq calls, as well as notes of past ones. ### Coq remote working groups ### We semi-regularly (up to every month) organize remote working groups, which can be accessed through video-conference, and are most often live streamed on [YouTube][]. Summary notes and announcements of the next working group can be found [on the wiki][wiki-WG] These working groups are where important decisions are taken, most often by consensus, but also, if it is needed, by a vote of core developers. ### Coq Users and Developers Workshops ### We have an annual gathering late Spring in France where most core developers are present, and whose objective is to help new contributors get started with the Coq codebase, provide help to plugin and library authors, and more generally have fun together. The list of past (and upcoming, when it's already planned) workshops can be found [on the wiki][wiki-CUDW]. [add-contributor]: https://github.com/orgs/coq/teams/contributors/members?add=true [api-doc]: https://coq.github.io/doc/master/api/ [Benchmarking]: https://github.com/coq/coq/wiki/Benchmarking [CEP]: https://github.com/coq/ceps [check-owners]: dev/tools/check-owners-pr.sh [CI-README-developers]: dev/ci/README-developers.md [CI-README-users]: dev/ci/README-users.md [Code-of-Conduct]: CODE_OF_CONDUCT.md [CODEOWNERS]: .github/CODEOWNERS [Codewars]: https://www.codewars.com/?language=coq [company-coq-issues]: https://github.com/cpitclaudel/company-coq/issues [Coq-Club]: https://sympa.inria.fr/sympa/arc/coq-club [coq-community-maintainer-wanted]: https://github.com/coq-community/manifesto/issues?q=is%3Aissue+is%3Aopen+label%3Amaintainer-wanted [coq-community-manifesto]: https://github.com/coq-community/manifesto [coq-community-wiki]: https://github.com/coq-community/manifesto/wiki [coq-core]: https://github.com/orgs/coq/teams/core/members [coqdoc-documentation]: https://coq.inria.fr/refman/practical-tools/utilities.html#documenting-coq-files-with-coqdoc [Coq-documentation]: https://coq.inria.fr/documentation [Coq-issue-tracker]: https://github.com/coq/coq/issues [Coq-package-index]: https://coq.inria.fr/packages [coq-pushers]: https://github.com/orgs/coq/teams/pushers/teams [coq-repository]: https://github.com/coq/coq [Coq-website-repository]: https://github.com/coq/www [debugging-doc]: dev/doc/debugging.md [dev-ci-nix]: dev/ci/nix/README.md [dev-doc-README]: dev/doc/README.md [dev-doc-dune]: dev/doc/build-system.dune.md [dev-README]: dev/README.md [dev-tools-create_overlays.sh]: dev/tools/create_overlays.sh [Discourse]: https://coq.discourse.group/ [Discourse-development-category]: https://coq.discourse.group/c/coq-development [doc_gram]: doc/tools/docgram/README.md [doc-README]: doc/README.md [documentation-github-project]: https://github.com/coq/coq/projects/3 [dune-doc]: https://dune.readthedocs.io/en/latest/ [FAQ]: https://github.com/coq/coq/wiki/The-Coq-FAQ [git]: https://git-scm.com/ [git-hook]: dev/tools/pre-commit [GitHub-co-authored-by]: https://github.blog/2018-01-29-commit-together-with-co-authors/ [GitHub-commit-email]: https://help.github.com/en/articles/setting-your-commit-email-address-in-git [GitHub-doc]: https://help.github.com/ [GitHub-draft-PR]: https://github.blog/2019-02-14-introducing-draft-pull-requests/ [GitHub-markdown]: https://guides.github.com/features/mastering-markdown/ [GitHub-notification-settings]: https://github.com/settings/notifications [GitHub-PR-from-fork]: https://help.github.com/en/articles/creating-a-pull-request-from-a-fork [GitHub-rebase]: https://help.github.com/articles/about-git-rebase/ [GitHub-watching]: https://github.com/coq/coq/subscription [GitHub-wiki-extensions]: https://help.github.com/en/articles/editing-wiki-content [GitLab-coq]: https://gitlab.com/coq [GitLab-doc]: https://docs.gitlab.com/ [JasonGross-coq-tools]: https://github.com/JasonGross/coq-tools [kind-documentation]: https://github.com/coq/coq/issues?q=is%3Aopen+is%3Aissue+label%3A%22kind%3A+documentation%22 [master-doc]: https://coq.github.io/doc/master/refman/ [merge-pr]: dev/tools/merge-pr.sh [needs-benchmarking]: https://github.com/coq/coq/labels/needs%3A%20benchmarking [needs-changelog]: https://github.com/coq/coq/labels/needs%3A%20changelog%20entry [needs-documentation]: https://github.com/coq/coq/labels/needs%3A%20documentation [needs-fixing]: https://github.com/coq/coq/labels/needs%3A%20fixing [needs-rebase]: https://github.com/coq/coq/labels/needs%3A%20rebase [needs-testing]: https://github.com/coq/coq/labels/needs%3A%20testing [Nix]: https://github.com/coq/coq/wiki/Nix [notification-email]: https://blog.github.com/2017-07-18-managing-large-numbers-of-github-notifications/#prioritize-the-notifications-you-receive [OCaml-planet]: http://ocaml.org/community/planet/ [ocamlformat]: https://github.com/ocaml-ppx/ocamlformat [ocamlverse-community]: https://ocamlverse.github.io/content/community.html [Octobox]: http://octobox.io/ [old-style-guide]: dev/doc/style.txt [other-standard-libraries]: https://github.com/coq/stdlib2/wiki/Other-%22standard%22-libraries [pinentry-mac]: https://stackoverflow.com/questions/39494631/gpg-failed-to-sign-the-data-fatal-failed-to-write-commit-object-git-2-10-0 [plugin-tutorial]: doc/plugin_tutorial [ProofGeneral-issues]: https://github.com/ProofGeneral/PG/issues [Reddit]: https://www.reddit.com/r/Coq/ [refman]: https://coq.inria.fr/distrib/current/refman/ [refman-sources]: doc/sphinx [refman-README]: doc/sphinx/README.rst [release-plan]: https://github.com/coq/coq/wiki/Release-Plan [repology-coq]: https://repology.org/project/coq/versions [RM-checklist]: dev/doc/release-process.md [Stack-Exchange]: https://stackexchange.com/filters/299857/questions-tagged-coq-on-stackexchange-sites [Stack-Overflow]: https://stackoverflow.com/questions/tagged/coq [stdlib-doc]: https://coq.inria.fr/stdlib/ [test-suite-README]: test-suite/README.md [tools-website]: https://coq.inria.fr/related-tools.html [tools-wiki]: https://github.com/coq/coq/wiki/Tools [unreleased-changelog]: https://coq.github.io/doc/master/refman/changes.html#unreleased-changes [user-changelog]: doc/changelog [user-overlays]: dev/ci/user-overlays [wiki]: https://github.com/coq/coq/wiki [wiki-calls]: https://github.com/coq/coq/wiki/Coq-Calls [wiki-CUDW]: https://github.com/coq/coq/wiki/CoqImplementorsWorkshop [wiki-WG]: https://github.com/coq/coq/wiki/Coq-Working-Groups [YouTube]: https://www.youtube.com/channel/UCbJo6gYYr0OF18x01M4THdQ [Zulip]: https://coq.zulipchat.com [Zulip-dev]: https://coq.zulipchat.com/#narrow/stream/237656-Coq-devs.20.26.20plugin.20devs coq-8.15.0/CREDITS000066400000000000000000000175651417001151100134140ustar00rootroot00000000000000The "Coq proof assistant" was jointly developed by - INRIA Formel, Coq, LogiCal, ProVal, TypiCal, Marelle, pi.r2, Ascola, Galinette projects (starting 1985), - Laboratoire de l'Informatique du Parallelisme (LIP) associated to CNRS and ENS Lyon (Sep. 1989 to Aug. 1997), - Laboratoire de Recherche en Informatique (LRI) associated to CNRS and university Paris Sud (since Sep. 1997), - Laboratoire d'Informatique de l'Ecole Polytechnique (LIX) associated to CNRS and Ecole Polytechnique (since Jan. 2003). - Laboratoire PPS associated to CNRS and University Paris Diderot (Jan. 2009 - Dec. 2015 when it was merged into IRIF). - Institut de Recherche en Informatique Fondamentale (IRIF), associated to CNRS and University Paris Diderot (since Jan. 2016). - And many contributors from various institutions. All files but the material of the reference manual are distributed under the term of the GNU Lesser General Public License Version 2.1. The material of the reference manual is distributed under the terms of the Open Publication License v1.0 or above, as indicated in file doc/LICENCE. The following directories contain independent contributions supported by the Coq development team. All of them are released under the terms of the GNU Lesser General Public License Version 2.1. plugins/cc developed by Pierre Corbineau (ENS Cachan, 2001, LRI, 2001-2005, Radboud University at Nijmegen, 2005-2008, Grenoble 1, 2010-2014) plugins/extraction developed by Pierre Letouzey (LRI, 2000-2004, PPS, 2005-now) plugins/firstorder developed by Pierre Corbineau (LRI, 2003-2008) plugins/funind developed by Pierre Courtieu (INRIA-Lemme, 2003-2004, CNAM, 2006-now), Julien Forest (INRIA-Everest, 2006, CNAM, 2007-2008, ENSIIE, 2008-now) and Yves Bertot (INRIA-Marelle, 2005-2006) plugins/micromega developed by Frédéric Besson (IRISA/INRIA, 2006-now), with some extensions by Evgeny Makarov (INRIA, 2007); sum-of-squares solver and interface to the csdp solver uses code from John Harrison (University of Cambridge, 1998) plugins/nsatz developed by Loïc Pottier (INRIA-Marelle, 2009-2011) plugins/omega developed by Pierre Crégut (France Telecom R&D, 1996) plugins/rtauto developed by Pierre Corbineau (LRI, 2005) plugins/ring developed by Benjamin Grégoire (INRIA-Everest, 2005-2006), Assia Mahboubi, Laurent Théry (INRIA-Marelle, 2006) and Bruno Barras (INRIA LogiCal, 2005-2006), plugins/ssr developed by Georges Gonthier (Microsoft Research - Inria Joint Centre, 2007-2013, Inria, 2013-now), Assia Mahboubi and Enrico Tassi (Inria, 2011-now). plugins/ssrmatching developed by Georges Gonthier (Microsoft Research - Inria Joint Centre, 2007-2011, Inria, 2013-now), and Enrico Tassi (Inria-Marelle, 2011-now) theories/ZArith started by Pierre Crégut (France Telecom R&D, 1996) theories/Strings developed by Laurent Théry (INRIA-Lemme, 2003) theories/Numbers/Cyclic developed by Benjamin Grégoire (INRIA-Everest, 2007), Laurent Théry (INRIA-Marelle, 2007-2008), Arnaud Spiwack (INRIA-LogiCal, 2007) and Pierre Letouzey (PPS, 2008) ide/utils some files come from Maxence Guesdon's Cameleon tool The development of Coq significantly benefited from feedback, suggestions or short contributions from the following non exhaustive list of persons and groups: C. Alvarado, C. Auger, F. Blanqui, P. Castéran, C. Cohen, J. Courant, J. Duprat, F. Garillot, G. Gonthier, J. Goubault, J.-P. Jouannaud, S. Lescuyer, A. Miquel, J.-F. Monin, P.-Y. Strub the Foundations Group (Radboud University, Nijmegen, The Netherlands), Laboratoire J.-A. Dieudonné (University of Nice-Sophia Antipolis), L. Lee (https://orcid.org/0000-0002-7128-9257, 2018), INRIA-Gallium project, the CS dept at Yale, the CIS dept at U. Penn, the CSE dept at Harvard, the CS dept at Princeton, the CS dept at MIT as well as a lot of users on coq-club, coqdev, coq-bugs The following people have contributed to the development of different versions of the Coq Proof assistant during the indicated time: Bruno Barras (INRIA, 1995-now) Yves Bertot (INRIA, 2000-now) Pierre Boutillier (INRIA-PPS, 2010-2015) Xavier Clerc (INRIA, 2012-2014) Tej Chajed (MIT, 2016-now) Jacek Chrzaszcz (LRI, 1998-2003) Thierry Coquand (INRIA, 1985-1989) Pierre Corbineau (LRI, 2003-2005, Nijmegen, 2005-2008, Grenoble 1, 2008-2011) Cristina Cornes (INRIA, 1993-1996) Yann Coscoy (INRIA Sophia-Antipolis, 1995-1996) Pierre Courtieu (CNAM, 2006-now) David Delahaye (INRIA, 1997-2002) Maxime Dénès (INRIA, 2013-now) Daniel de Rauglaudre (INRIA, 1996-1998, 2012, 2016) Olivier Desmettre (INRIA, 2001-2003) Gilles Dowek (INRIA, 1991-1994) Jim Fehrle (2018-now) Amy Felty (INRIA, 1993) Jean-Christophe Filliâtre (ENS Lyon, 1994-1997, LRI, 1997-2008) Emilio Jesús Gallego Arias (MINES ParisTech 2015-now) Gaetan Gilbert (INRIA-Galinette, 2016-now) Eduardo Giménez (ENS Lyon, 1993-1996, INRIA, 1997-1998) Stéphane Glondu (INRIA-PPS, 2007-2013) Benjamin Grégoire (INRIA, 2003-2011) Jason Gross (MIT 2013-now) Hugo Herbelin (INRIA, 1996-now) Sébastien Hinderer (INRIA, 2014) Gérard Huet (INRIA, 1985-1997) Konstantinos Kallas (U. Penn, 2019) Matej Košík (INRIA, 2015-2017) Leonidas Lampropoulos (University of Pennsylvania, 2018) Pierre Letouzey (LRI, 2000-2004, PPS, 2005-2008, INRIA-PPS then IRIF, 2009-2018) Yao Li (ORCID: https://orcid.org/0000-0001-8720-883X, University of Pennsylvania, 2018) Yishuai Li (ORCID: https://orcid.org/0000-0002-5728-5903 U. Penn, 2018-2019) Patrick Loiseleur (Paris Sud, 1997-1999) Andreas Lynge (Aarhus University, 2019) Evgeny Makarov (INRIA, 2007) Gregory Malecha (Harvard University 2013-2015, University of California, San Diego 2016) Cyprien Mangin (INRIA-PPS then IRIF, 2015-now) Pascal Manoury (INRIA, 1993) Claude Marché (INRIA, 2003-2004 & LRI, 2004) Micaela Mayero (INRIA, 1997-2002) Guillaume Melquiond (INRIA, 2009-now) Benjamin Monate (LRI, 2003) César Muñoz (INRIA, 1994-1995) Chetan Murthy (INRIA, 1992-1994) Julien Narboux (INRIA, 2005-2006, Strasbourg, 2007-2011) Jean-Marc Notin (CNRS, 2006-now) Catherine Parent-Vigouroux (ENS Lyon, 1992-1995) Christine Paulin-Mohring (INRIA, 1985-1989, ENS Lyon, 1989-1997, LRI, 1997-2006) Pierre-Marie Pédrot (INRIA-PPS, 2011-2015, INRIA-Ascola, 2015-2016, University of Ljubljana, 2016-2017, MPI-SWS, 2017-2018, INRIA 2018-now) Clément Pit-Claudel (MIT, 2015-now) Matthias Puech (INRIA-Bologna, 2008-2011) Yann Régis-Gianas (INRIA-PPS then IRIF, 2009-2016) Clément Renard (INRIA, 2001-2004) Talia Ringer (University of Washington, 2019) Claudio Sacerdoti Coen (INRIA, 2004-2005) Amokrane Saïbi (INRIA, 1993-1998) Vincent Semeria (2018-now) Vincent Siles (INRIA, 2007) Élie Soubiran (INRIA, 2007-2010) Matthieu Sozeau (INRIA, 2005-now) Arnaud Spiwack (INRIA-LIX-Chalmers University, 2006-2010, INRIA, 2011-2014, MINES ParisTech 2014-2015, Tweag/IO 2015-now) Paul Steckler (MIT 2016-2018) Enrico Tassi (INRIA, 2011-now) Amin Timany (Katholieke Universiteit Leuven, 2017) Benjamin Werner (INRIA, 1989-1994) Nickolai Zeldovich (MIT 2014-2016) Théo Zimmermann (ORCID: https://orcid.org/0000-0002-3580-8806, INRIA-PPS then IRIF, 2015-now) *************************************************************************** INRIA refers to: Institut National de la Recherche en Informatique et Automatique CNRS refers to: Centre National de la Recherche Scientifique LRI refers to: Laboratoire de Recherche en Informatique, UMR 8623 CNRS and Université Paris-Sud ENS Lyon refers to: Ecole Normale Supérieure de Lyon PPS refers to: Laboratoire Preuve, Programmation, Système, UMR 7126, CNRS and Université Paris 7 **************************************************************************** coq-8.15.0/INSTALL.md000066400000000000000000000122151417001151100140070ustar00rootroot00000000000000Installing From Sources ======================= To install and use Coq, we recommend relying on [the Coq platform](https://github.com/coq/platform/) or on a package manager (e.g. opam or Nix). See https://coq.inria.fr/download and https://github.com/coq/coq/wiki#coq-installation to learn more. If you need to build Coq from sources manually (e.g. to contribute to Coq or to write a Coq package), the remainder of this file explains how to do so. Build Requirements ------------------ To compile Coq yourself, you need: - [OCaml](https://ocaml.org/) (version >= 4.05.0) (This version of Coq has been tested up to OCaml 4.13.0) - The [Dune OCaml build system](https://github.com/ocaml/dune/) >= 2.5.1 - The [ZArith library](https://github.com/ocaml/Zarith) >= 1.10 - The [findlib](http://projects.camlcity.org/projects/findlib.html) library (version >= 1.8.0) - a C compiler - an IEEE-754 compliant architecture with rounding to nearest ties to even as default rounding mode (most architectures should work nowadays) - for CoqIDE, the [lablgtk3-sourceview3](https://github.com/garrigue/lablgtk) library (version >= 3.1.0), and the corresponding GTK 3.x libraries, as of today (gtk+3 >= 3.18 and gtksourceview3 >= 3.18) - [optional] GNU Make (version >= 3.81) Primitive floating-point numbers require IEEE-754 compliance (`Require Import Floats`). Common sources of incompatibility are checked at configure time, preventing compilation. In the unlikely event an incompatibility remains undetected, using `Floats` would enable proving `False` on this architecture. Note that OCaml dependencies (`zarith` and `lablgtk3-sourceview3` at this moment) must be properly registered with `findlib/ocamlfind` since Coq's build system uses `findlib` to locate them. Debian / Ubuntu users can get the necessary system packages for CoqIDE with: $ sudo apt-get install libgtksourceview-3.0-dev Opam (https://opam.ocaml.org/) is recommended to install OCaml and the corresponding packages. $ opam switch create coq --packages="ocaml-variants.4.12.0+options,ocaml-option-flambda" $ eval $(opam env) $ opam install dune ocamlfind zarith lablgtk3-sourceview3 should get you a reasonable OCaml environment to compile Coq. See the OPAM documentation for more help. Nix users can also get all the required dependencies by running: $ nix-shell Advanced users may want to experiment with the OCaml Flambda compiler as way to improve the performance of Coq. In order to profit from Flambda, a special build of the OCaml compiler that has the Flambda optimizer enabled must be installed. For OPAM users, this amounts to installing a compiler switch ending in `+flambda`, such as `4.07.1+flambda`. For other users, YMMV. Once `ocamlopt -config` reports that Flambda is available, some further optimization options can be used; see the entry about `flambda options` in the build guide for more details. Choice of Build and Installation Procedure ------------------------------------------ There are two partially overlapping infrastructures available to build Coq. They are available through `Makefile.make` (legacy / hybrid build) and `Makefile.dune` (full Dune build). You can use the `COQ_USE_DUNE` environment variable to change the one to use by default. This is useful for Coq development, where we recommend to rely mainly on `Makefile.dune`. Note that mixing the two systems is not perfectly supported and may lead to confusing behavior. In both cases, the OCaml parts are built using [Dune](https://github.com/ocaml/dune). The main difference between the two systems is how the `.vo` files are built. In the case of `Makefile.make`, `.vo` files are built with a legacy Makefile, similar to what `coq_makefile` would do. In the case of `Makefile.dune`, `.vo` files are built with Dune, thanks to its recently-added, and still experimental, Coq mode. See the documentation for the two infrastructures: - [Legacy build](dev/doc/INSTALL.make.md) - [Full Dune build](dev/doc/build-system.dune.md) See also [`dev/doc/README.md`](dev/doc/README.md). Run-time dependencies of native compilation ------------------------------------------- The OCaml compiler and findlib are build-time dependencies, but also run-time dependencies if you wish to use the native compiler. OCaml toolchain advisory ------------------------ When loading plugins or `vo` files, you should make sure that these were compiled with the same OCaml setup (version, flags, dependencies...) as Coq. Distribution of pre-compiled plugins and `.vo` files is only possible if users are guaranteed to have the same Coq version compiled with the same OCaml toolchain. An OCaml setup mismatch is the most probable cause for an `Error while loading ...: implementation mismatch on ...`. coq_environment.txt ------------------- Coq binaries which honor environment variables, such as `COQLIB`, can be seeded values for these variables by placing a text file named `coq_environment.txt` next to them. The file can contain assignments like `COQLIB="some path"`, that is a variable name followed by `=` and a string that follows OCaml's escaping conventions. This feature can be used by installers of binary package to make Coq aware of its installation path. coq-8.15.0/LICENSE000066400000000000000000000574751417001151100134050ustar00rootroot00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS coq-8.15.0/Makefile000066400000000000000000000016051417001151100140200ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## timings.csv TIMED ?= # When $(TIMED) is set, the time command used by default is $(STDTIME) # (see below), unless the following variable is non-empty. For instance, # it could be set to "'/usr/bin/env time -p'". TIMECMD ?= # When non-empty, -time is passed to coqc and the output is recorded # in a timing file for each .v file. If set to "before" or "after", # the file name for foo.v is foo.v.$(TIMING)-timing; otherwise, it is # foo.v.timing TIMING ?= # Non-empty runs the checker on all produced .vo files: VALIDATE ?= # When non-empty, passed as extra arguments to coqtop/coqc: COQUSERFLAGS ?= # Option for changing sorting of timing output file TIMING_SORT_BY ?= auto # Option for changing the fuzz parameter on the output file TIMING_FUZZ ?= 0 # Option for changing whether to use real or user time for timing tables TIMING_REAL?= # Option for including the memory column(s) TIMING_INCLUDE_MEM?= # Option for sorting by the memory column TIMING_SORT_BY_MEM?= # Output file names for timed builds TIME_OF_BUILD_FILE ?= time-of-build.log TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log TIME_OF_BUILD_AFTER_FILE ?= time-of-build-after.log TIME_OF_PRETTY_BUILD_FILE ?= time-of-build-pretty.log TIME_OF_PRETTY_BOTH_BUILD_FILE ?= time-of-build-both.log TIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line BEFORE ?= AFTER ?= # Number of parallel jobs for -schedule-vio2vo NJOBS ?= 2 ########################################################################### # Includes ########################################################################### .PHONY: NOARG NOARG: world byte include Makefile.common include Makefile.vofiles include Makefile.doc ## provides the 'documentation' rule include Makefile.ide ## provides the 'coqide' rule include Makefile.install include Makefile.dev ########################################################################### # Default starting rule ########################################################################### .PHONY: world byte coq world.timing.diff coq.timing.diff states world: coq coqide documentation revision byte: world coq: coqlib coqbinaries tools $(BCONTEXT)/coq-core.install $(BCONTEXT)/coqide-server.install world.timing.diff: coq.timing.diff coq.timing.diff: coqlib.timing.diff states: $(VO_OUT_DIR)theories/Init/Prelude.$(VO) .PHONY: coqbinaries tools coqbinaries: $(TOPBINOPT) $(COQC) $(COQTOPEXE) $(CHICKEN) $(CSDPCERT) $(COQNATIVE) tools: $(TOOLS) $(OCAMLLIBDEP) $(COQDEP) $(DOC_GRAM) ########################################################################### # Timing targets ########################################################################### ifeq (0,$(TIMING_REAL)) TIMING_REAL_ARG := TIMING_USER_ARG := --user else ifeq (1,$(TIMING_REAL)) TIMING_REAL_ARG := --real TIMING_USER_ARG := else TIMING_REAL_ARG := TIMING_USER_ARG := endif endif ifeq (0,$(TIMING_INCLUDE_MEM)) TIMING_INCLUDE_MEM_ARG := --no-include-mem else TIMING_INCLUDE_MEM_ARG := endif ifeq (1,$(TIMING_SORT_BY_MEM)) TIMING_SORT_BY_MEM_ARG := --sort-by-mem else TIMING_SORT_BY_MEM_ARG := endif make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: $(HIDE)rm -f pretty-timed-success.ok $(HIDE)($(MAKE) --no-print-directory $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed print-pretty-timed:: $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) print-pretty-timed-diff:: $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) ifeq (,$(BEFORE)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else ifeq (,$(AFTER)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else print-pretty-single-time-diff:: $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) endif endif pretty-timed: $(HIDE)$(MAKE) --no-print-directory make-pretty-timed $(HIDE)$(MAKE) --no-print-directory print-pretty-timed .PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff ifneq (,$(TIMING)) TIMING_ARG=-time ifeq (after,$(TIMING)) TIMING_EXT=after-timing else ifeq (before,$(TIMING)) TIMING_EXT=before-timing else TIMING_EXT=timing endif endif else TIMING_ARG= endif ########################################################################### # Build the .v deps VFILED=.vfiles.d -include $(VFILED) # All dependency includes must be declared secondary, otherwise make will # delete them if it decided to build them by dependency instead of because # of include, and they will then be automatically deleted, leading to an # infinite loop. .SECONDARY: $(VFILED) ########################################################################### # Compilation options ########################################################################### # Default timing command # Use /usr/bin/env time on linux, gtime on Mac OS TIMEFMT?="$@ (real: %e, user: %U, sys: %S, mem: %M ko)" ifneq (,$(TIMED)) ifeq (0,$(shell /usr/bin/env time -f "" true >/dev/null 2>/dev/null; echo $$?)) STDTIME?=/usr/bin/env time -f $(TIMEFMT) else ifeq (0,$(shell gtime -f "" true >/dev/null 2>/dev/null; echo $$?)) STDTIME?=gtime -f $(TIMEFMT) else STDTIME?=time endif endif else STDTIME?=/usr/bin/env time -f $(TIMEFMT) endif TIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD)) # NB: do not use a variable named TIME, since this variable controls # the output format of the unix command time. For instance: # TIME="%C (%U user, %S sys, %e total, %M maxres)" PLUGININCLUDES=$(addprefix -I _build/default/, $(wildcard plugins/*)) DUNEPLUGININCLUDES=$(addprefix -I _build/default/, $(wildcard plugins/*)) NATIVEINCLUDES=$(addprefix -nI _build/default/, kernel/.kernel.objs/byte) ifdef NATIVECOMPUTE COQOPTS += -w -deprecated-native-compiler-option -native-compiler ondemand endif COQOPTS += $(COQWARNERROR) $(COQUSERFLAGS) # Beware this depends on the makefile being in a particular dir, we # should pass an absolute path here but windows is tricky # c.f. https://github.com/coq/coq/pull/9560 BOOTCOQC=$(TIMER) $(COQC) -coqlib $(VO_OUT_DIR) -q $(COQOPTS) $(DUNEPLUGININCLUDES) ########################################################################### # Infrastructure for the rest of the Makefile ########################################################################### VO_TOOLS_DEP = $(COQC) ifeq ($(BEST),byte) VO_TOOLS_DEP += $(CONTEXT)/lib/stublibs/dllcoqrun_stubs.so endif ifdef NATIVECOMPUTE VO_TOOLS_DEP += $(COQNATIVE) endif ifdef VALIDATE VO_TOOLS_DEP += $(CHICKEN) endif ## When a rule redirects stdout of a command to the target file : cmd > $@ ## then the target file will be created even if cmd has failed. ## Hence relaunching make will go further, as make thinks the target has been ## done ok. To avoid this, we use the following special variable: .DELETE_ON_ERROR: ########################################################################### # tests ########################################################################### .PHONY: validate check test-suite VALIDOPTS=$(if $(VERBOSE),,-silent) -o -m -coqlib $(VO_OUT_DIR) validate: $(CHICKEN) | $(ALLVO:.$(VO)=.vo) $(SHOW)'COQCHK ' $(HIDE)$(CHICKEN) $(VALIDOPTS) $(ALLVO) MAKE_TSOPTS=-C test-suite -s VERBOSE=$(VERBOSE) check: validate test-suite test-suite: world $(MAKE) $(MAKE_TSOPTS) clean $(MAKE) $(MAKE_TSOPTS) all ########################################################################### # Compilation of .v files ########################################################################### # NB: for make world, no need to mention explicitly the .cmxs of the plugins, # since they are all mentioned in at least one Declare ML Module in some .v coqlib: stdlib-vo contrib-vo ifdef QUICK $(SHOW)'COQC -schedule-vio2vo $(NJOBS) theories/**.vio' $(HIDE)$(BOOTCOQC) -schedule-vio2vo $(NJOBS) $(THEORIESVO) $(CONTRIBVO) endif coqlib.timing.diff: stdlib.timing.diff stdlib-vo: $(THEORIESVO) contrib-vo: $(CONTRIBVO) stdlib.timing.diff: $(ALLVO:.$(VO)=.v.timing.diff) .PHONY: coqlib stdlib-vo contrib-vo coqlib.timing.diff stdlib.timing.diff # The .vo files in Init are built with the -noinit option ifneq (,$(TIMING)) TIMING_EXTRA = > $<.$(TIMING_EXT) else TIMING_EXTRA = endif # Rules for sources; it is standard to copy them to build dir so we # work in a "sandbox". $(VO_OUT_DIR)theories/%.v: theories/%.v | $(VO_OUT_DIR) $(HIDE)mkdir -p $(VO_OUT_DIR)$(shell dirname $<) $(HIDE)cp -a $< $@ $(VO_OUT_DIR)user-contrib/%.v: user-contrib/%.v | $(VO_OUT_DIR) $(HIDE)mkdir -p $(VO_OUT_DIR)$(shell dirname $<) $(HIDE)cp -a $< $@ $(VO_OUT_DIR)theories/Init/%.vo $(VO_OUT_DIR)theories/Init/%.glob: $(VO_OUT_DIR)theories/Init/%.v $(VO_TOOLS_DEP) | $(VO_OUT_DIR) $(SHOW)'COQCBOOT theories/Init/$*.v' $(HIDE)rm -f $(VO_OUT_DIR)theories/Init/$*.glob $(HIDE)mkdir -p $(shell dirname $<) $(HIDE)$(BOOTCOQC) $< -o $@ -noinit -R $(VO_OUT_DIR)theories Coq $(TIMING_ARG) $(TIMING_EXTRA) ifdef NATIVECOMPUTE $(SHOW)'COQNATIVE $*.vo' $(HIDE)$(COQNATIVE) $(NATIVEINCLUDES) -coqlib $(VO_OUT_DIR) -R $(VO_OUT_DIR)theories Coq $(VO_OUT_DIR)theories/Init/$*.vo endif $(VO_OUT_DIR)theories/Init/%.vio: theories/Init/%.v $(VO_TOOLS_DEP) | $(VO_OUT_DIR) $(SHOW)'COQC -quick -noinit $<' $(HIDE)mkdir -p $(VO_OUT_DIR)$(shell dirname $<) $(HIDE)$(BOOTCOQC) $< -o $@ -noinit -R $(VO_OUT_DIR)theories Coq -vio -noglob # The general rule for building .vo files : $(VO_OUT_DIR)%.vo $(VO_OUT_DIR)%.glob: $(VO_OUT_DIR)%.v $(VO_OUT_DIR)theories/Init/Prelude.vo $(VO_TOOLS_DEP) | $(VO_OUT_DIR) $(SHOW)'COQC $*.v' $(HIDE)rm -f $*.glob $(HIDE)mkdir -p $(shell dirname $<) $(HIDE)$(BOOTCOQC) $< -o $@ $(TIMING_ARG) $(TIMING_EXTRA) ifdef NATIVECOMPUTE $(SHOW)'COQNATIVE $*.vo' $(HIDE)$(COQNATIVE) $(NATIVEINCLUDES) -coqlib $(VO_OUT_DIR) -R $(VO_OUT_DIR)theories Coq $(VO_OUT_DIR)$*.vo endif ifdef VALIDATE $(SHOW)'COQCHK $(call vo_to_mod,$@)' $(HIDE)$(CHICKEN) $(VALIDOPTS) -norec $(call vo_to_mod,$@) \ || ( RV=$$?; rm -f "$@"; exit $${RV} ) endif $(VO_OUT_DIR)%.vio: %.v $(VO_OUT_DIR)theories/Init/Prelude.vio $(VO_TOOLS_DEP) $(SHOW)'COQC -vio $<' $(HIDE)mkdir -p $(shell dirname $<) $(HIDE)$(BOOTCOQC) $< -vio -noglob -o $@ $(VO_OUT_DIR)%.v.timing.diff: %.v.before-timing %.v.after-timing $(SHOW)'PYTHON TIMING-DIFF $*.v.{before,after}-timing' $(HIDE)$(MAKE) --no-print-directory print-pretty-single-time-diff BEFORE=$*.v.before-timing AFTER=$*.v.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" # Dependencies of .v files ifeq ($(BEST),byte) DYNDEP=-dyndep byte else DYNDEP=-dyndep opt endif # can easily produce too long command line with many .v files prefixed by build_vo # so we use find + xargs to split coqdep invocation if necessary # OSX sed doesn't support -null-data so can't use -print0 $(VFILED): $(BUILD_VFILES) $(COQDEP) | all-src $(SHOW)'COQDEP VFILES' $(HIDE)find theories $(addprefix user-contrib/, $(USERCONTRIBDIRS)) -type f -name '*.v' | \ sed 's|^|$(VO_OUT_DIR)|' | \ xargs $(COQDEP) -boot $(DYNDEP) -R $(VO_OUT_DIR)theories Coq -Q $(VO_OUT_DIR)user-contrib "" $(PLUGININCLUDES) > "$@" # To speed-up things a bit, let's dissuade make to attempt rebuilding makefiles Makefile $(wildcard Makefile.*) config/Makefile : ; # Final catch-all rule. # Usually, 'make' would display such an error itself. # But if the target has some declared dependencies (e.g. in a .d) # but no building rule, 'make' succeeds silently (see bug #4812). %: @echo "Error: no rule to make target $@ (or missing .PHONY)" && false # For emacs: # Local Variables: # mode: makefile-gmake # End: coq-8.15.0/Makefile.ci000066400000000000000000000056571417001151100144250ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## /dev/null)) FLOCK:=_build/default/tools/flock/coq_flock.exe _DBUILD_DEPS+=$(FLOCK) $(FLOCK): tools/flock/coq_flock.ml tools/flock/flock.c $(SHOW)'DUNE $@' $(HIDE)dune build --root . $@ else FLOCK:=flock endif # Standard DESTDIR variable as used in Debian and coq_makefile, will # have make install prefixing all install targets with DESTDIR DESTDIR ?= ########################################################################### # VO build directory ########################################################################### # slash at the end allows us to define this variable as empty, be # careful, the slash is mandatory. # # Note, we can use this as _build/default , but unfortunately dune # will remove the .vos files as they are not recognized as targets BUILD_OUT_DIR=_build_vo/default/ VO_OUT_DIR=$(BUILD_OUT_DIR)/lib/coq/ LEGACY_BIN_DIR=bin $(BUILD_OUT_DIR) $(VO_OUT_DIR): $(SHOW)'MKDIR BUILD_OUT' $(HIDE)mkdir -p $(BUILD_OUT_DIR) $(HIDE)mkdir -p $(BUILD_OUT_DIR)/lib/coq $(HIDE)ln -s $(shell pwd)/_build/install/default/bin/ $(LEGACY_BIN_DIR) $(HIDE)ln -s $(shell pwd)/_build/install/default/bin/ $(BUILD_OUT_DIR)/bin $(HIDE)ln -s $(shell pwd)/_build/install/default/lib/coq-core/ $(BUILD_OUT_DIR)/lib/coq-core $(HIDE)ln -s $(shell pwd)/_build/install/default/lib/coqide-server/ $(BUILD_OUT_DIR)/lib/coqide-server $(HIDE)ln -s $(shell pwd)/_build/install/default/lib/stublibs/ $(BUILD_OUT_DIR)/lib/stublibs ########################################################################### # Executables ########################################################################### ifeq ($(BEST),byte) COQC:=$(CBIN)/coqc.byte$(EXE) export CAML_LD_LIBRARY_PATH:=$(shell echo $(CONTEXT)/lib/stublibs:$$CAML_LD_LIBRARY_PATH) else COQC:=$(CBIN)/coqc$(EXE) endif COQTOPEXE:=$(CBIN)/coqtop$(EXE) TOPBINOPT:=$(addsuffix .opt$(EXE), $(addprefix $(CBIN)/, coqproofworker coqtacticworker coqqueryworker)) $(COQTOPEXE) COQDEP:=$(CBIN)/coqdep$(EXE) DOC_GRAM:=_build/default/doc/tools/docgram/doc_grammar.exe COQMAKEFILE:=$(CBIN)/coq_makefile$(EXE) COQMAKEFILEIN:=$(BCONTEXT)/tools/CoqMakefile.in COQTEX:=$(CBIN)/coq-tex$(EXE) COQWC:=$(CBIN)/coqwc$(EXE) COQDOC:=$(CBIN)/coqdoc$(EXE) COQNATIVE:=$(CBIN)/coqnative$(EXE) COQDOCSTY:=$(CONTEXT)/lib/coq-core/tools/coqdoc/coqdoc.sty COQDOCCSS:=$(CONTEXT)/lib/coq-core/tools/coqdoc/coqdoc.css COQWORKMGR:=$(CBIN)/coqworkmgr$(EXE) COQMAKE_ONE_TIME_FILE:=tools/make-one-time-file.py COQTIME_FILE_MAKER:=tools/TimeFileMaker.py COQMAKE_BOTH_TIME_FILES:=tools/make-both-time-files.py COQMAKE_BOTH_SINGLE_TIMING_FILES:=tools/make-both-single-timing-files.py VOTOUR:=$(CBIN)/votour$(EXE) OCAMLLIBDEP:=$(CBIN)/ocamllibdep$(EXE) USERCONTRIBDIRS:=Ltac2 CHICKEN:=$(CBIN)/coqchk$(EXE) TOOLS:=$(VOTOUR) $(COQDOC) $(COQDOCSTY) $(COQDOCCSS) $(COQWC) $(COQMAKEFILE) $(COQMAKEFILEIN) $(COQNATIVE) CSDPCERT:=$(CBIN)/csdpcert$(EXE) ifeq ($(origin COQ_SRC_DIR),undefined) COQ_SRC_DIR=. endif COQ_CM_LIBS=coqpp lib clib kernel library engine pretyping gramlib interp printing parsing proofs tactics vernac stm toplevel topbin tools ML_SOURCE_DIRS=$(addprefix $(COQ_SRC_DIR)/,$(COQ_CM_LIBS)) ALL_ML_SOURCE_FILES=$(shell find $(ML_SOURCE_DIRS) -name '*.ml' -or -name '*.mli' -or -name '*.c' -or -name '*.h') DOCGRAM_SOURCE_FILES=$(shell find $(addprefix $(COQ_SRC_DIR)/, doc/tools/docgram) -name '*.ml' -or -name '*.mlg') # Override for developer build [to get warn-as-error for example] _DDISPLAY?=quiet _DPROFILE?=$(CONFIGURE_DPROFILE) _DOPT:=--display=$(_DDISPLAY) $(_DPROFILE) _DBUILD:=$(FLOCK) .dune.lock dune build $(_DOPT) # We rerun dune when any of the source files have changed # touch is needed for all targets in `_build` as make won't track # symlink's date correctly [and dune won't update the mtime as it # doesn't use it as the main criteria], so if we didn't touch an # updated mtime in a source file may not trigger a dune rebuild it it # didn't change hash, thus the mtime of the target would confuse make. $(CBIN)/%: $(ALL_ML_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ $(CSHARE)/%: $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ ALL_PLUGIN_SOURCE_FILES=$(shell find $(COQ_SRC_DIR)/plugins -name '*.ml' -or -name '*.mli' -or -name '*.mlg') $(ALL_ML_SOURCE_FILES) _build/default/plugins/%.cmxs: $(ALL_PLUGIN_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/default/plugins/%.cma: $(ALL_PLUGIN_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ # Only used for the test-suite in local mode _build/default/tools/%: $(ALL_ML_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/install/default/lib/coq-core/plugins/%.cmxs: $(ALL_PLUGIN_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/install/default/lib/stublibs/%.so: $(ALL_ML_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/install/default/lib/coq/%.cmi: $(ALL_ML_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/install/default/lib/coq-core/tools/coqdoc/coqdoc.sty: $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/install/default/lib/coq-core/tools/coqdoc/coqdoc.css: $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ _build/default/%.install: $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ .PHONY: all-src ALL_SOURCE_FILES=$(ALL_PLUGIN_SOURCE_FILES) # NB includes $(ALL_ML_SOURCE_FILES) all-src: $(ALL_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE sources' $(HIDE)$(_DBUILD) @all-src # For docgram _build/default/doc/tools/%: $(ALL_ML_SOURCE_FILES) $(DOCGRAM_SOURCE_FILES) $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ PLUGINTUTO := doc/plugin_tutorial revision: $(_DBUILD_DEPS) $(SHOW)'DUNE $@' $(HIDE)$(_DBUILD) $@ $(HIDE)touch $@ # For emacs: # Local Variables: # mode: makefile # End: coq-8.15.0/Makefile.dev000066400000000000000000000060141417001151100145740ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## doc/common/version.tex ### Changelog $(DOC_BUILD_DIR)/unreleased.rst: $(wildcard doc/changelog/00-title.rst doc/changelog/*/*.rst) $(SHOW)'AGGREGATE $@' $(HIDE)mkdir -p $(DOC_BUILD_DIR) $(HIDE)cat doc/changelog/00-title.rst doc/changelog/*/*.rst > $@ ###################################################################### # Standard library ###################################################################### DOCLIBS=-R $(VO_OUT_DIR)theories Coq -Q $(VO_OUT_DIR)user-contrib/Ltac2 Ltac2 ### Standard library (browsable html format) ifdef QUICK $(DOC_STDLIB_HTML_DIR)/genindex.html: else $(DOC_STDLIB_HTML_DIR)/genindex.html: | $(COQDOC) $(COQDOCCSS) $(ALLVO) endif - rm -rf $(DOC_STDLIB_HTML_DIR) $(MKDIR) -p $(DOC_STDLIB_HTML_DIR) $(SHOW)'COQDOC VFILES' $(HIDE)export COQLIB=$(CONTEXT)/lib/coq-core && $(COQDOC) -q -d $(DOC_STDLIB_HTML_DIR) --with-header doc/common/styles/html/$(HTMLSTYLE)/header.html --with-footer doc/common/styles/html/$(HTMLSTYLE)/footer.html --multi-index --html -g \ $(DOCLIBS) $(BUILD_VFILES) mv $(DOC_STDLIB_HTML_DIR)/index.html $(DOC_STDLIB_HTML_DIR)/genindex.html $(DOC_STDLIB_DIR)/index-list.html.template: doc/stdlib/index-list.html.template $(HIDE)mkdir -p $(DOC_STDLIB_DIR) $(HIDE)cp -a $< $@ $(DOC_STDLIB_DIR)/index-list.html: $(DOC_STDLIB_DIR)/index-list.html.template doc/stdlib/make-library-index ./doc/stdlib/make-library-index $(DOC_STDLIB_DIR)/index-list.html doc/stdlib/hidden-files $(DOC_STDLIB_HTML_DIR)/index.html: $(DOC_STDLIB_HTML_DIR)/genindex.html $(DOC_STDLIB_DIR)/index-list.html $(MKDIR) -p $(DOC_STDLIB_HTML_DIR) cat doc/common/styles/html/$(HTMLSTYLE)/header.html $(DOC_STDLIB_DIR)/index-list.html > $@ cat doc/common/styles/html/$(HTMLSTYLE)/footer.html >> $@ ### Standard library (light version, full version is definitely too big) ifdef QUICK $(DOC_STDLIB_DIR)/Library.coqdoc.tex: else $(DOC_STDLIB_DIR)/Library.coqdoc.tex: | $(COQDOC) $(COQDOCSTY) $(THEORIESLIGHTVO) endif export COQLIB=$(CONTEXT)/lib/coq-core && $(COQDOC) -q -boot --gallina --body-only --latex --stdout \ -R $(VO_OUT_DIR)theories Coq $(THEORIESLIGHTVO:.$(VO)=.v) >> $@ SHOW_LATEX_MESSAGES:=$(shell pwd)/doc/tools/show_latex_messages $(DOC_STDLIB_DIR)/Library.dvi: $(DOCCOMMON) $(DOC_STDLIB_DIR)/Library.coqdoc.tex $(DOC_STDLIB_DIR)/Library.tex (cd $(DOC_STDLIB_DIR);\ $(LATEX) -interaction=batchmode Library;\ $(LATEX) -interaction=batchmode Library > /dev/null;\ $(SHOW_LATEX_MESSAGES) -no-overfull Library.log) $(DOC_STDLIB_DIR)/Library.pdf: $(DOCCOMMON) $(DOC_STDLIB_DIR)/Library.coqdoc.tex $(DOC_STDLIB_DIR)/Library.dvi (cd $(DOC_STDLIB_DIR);\ $(PDFLATEX) -interaction=batchmode Library;\ $(SHOW_LATEX_MESSAGES) -no-overfull Library.log) $(DOC_STDLIB_DIR)/Library.tex: doc/stdlib/Library.tex $(HIDE)cp -a $< $@ ### Standard library (full version if you're crazy enough to try) $(DOC_STDLIB_DIR)/FullLibrary.tex: $(DOC_STDLIB_DIR)/Library.tex sed -e 's/Library.coqdoc/FullLibrary.coqdoc/g;s/\\begin{document}/\\newcommand{\\textlambda}{\\ensuremath{\\lambda}}\\newcommand{\\textPi}{\\ensuremath{\\Pi}}\\begin{document}/' $< > $@ ifdef QUICK $(DOC_STDLIB_DIR)/FullLibrary.coqdoc.tex: export COQLIB=$(CONTEXT)/lib/coq-core && $(COQDOC) -q -boot --gallina --body-only --latex --stdout --utf8 \ $(DOCLIBS) $(BUILD_VFILES) > $@ sed -i.tmp -e 's///g' $@ && rm $@.tmp else $(DOC_STDLIB_DIR)/FullLibrary.coqdoc.tex: $(COQDOC) $(COQDOCSTY) $(ALLVO) export COQLIB=$(CONTEXT)/lib/coq-core && $(COQDOC) -q -boot --gallina --body-only --latex --stdout --utf8 \ $(DOCLIBS) $(BUILD_VFILES) > $@ sed -i.tmp -e 's///g' $@ && rm $@.tmp endif $(DOC_STDLIB_DIR)/FullLibrary.dvi: $(DOCCOMMON) $(DOC_STDLIB_DIR)/FullLibrary.coqdoc.tex $(DOC_STDLIB_DIR)/FullLibrary.tex (cd $(DOC_STDLIB_DIR);\ $(LATEX) -interaction=batchmode FullLibrary;\ $(LATEX) -interaction=batchmode FullLibrary > /dev/null;\ $(SHOW_LATEX_MESSAGES) -no-overfull FullLibrary.log) $(DOC_STDLIB_DIR)/FullLibrary.pdf: $(DOCCOMMON) $(DOC_STDLIB_DIR)/FullLibrary.coqdoc.tex $(DOC_STDLIB_DIR)/FullLibrary.dvi (cd $(DOC_STDLIB_DIR);\ $(PDFLATEX) -interaction=batchmode FullLibrary;\ $(SHOW_LATEX_MESSAGES) -no-overfull FullLibrary.log) ###################################################################### # Install all documentation files ###################################################################### .PHONY: install-doc install-doc-meta install-doc-html install-doc-printable \ install-doc-sphinx install-doc-stdlib-html # Due to Windows paths not starting with / we can't just set the # default to / and always use --destdir ifeq ($(DESTDIR),) DOCDIRDEST=$(DOCDIR)/coq else DOCDIRDEST=$(DESTDIR)/$(DOCDIR)/coq endif install-doc: install-doc-meta install-doc-html install-doc-printable install-doc-meta: $(MKDIR) $(DOCDIRDEST) $(INSTALLLIB) doc/LICENSE $(DOCDIRDEST)/LICENSE.doc install-doc-html: install-doc-stdlib-html install-doc-sphinx install-doc-stdlib-html: $(MKDIR) $(DOCDIRDEST)/html/stdlib $(INSTALLLIB) $(DOC_STDLIB_HTML_DIR)/* $(DOCDIRDEST)/html/stdlib install-doc-printable: $(MKDIR) $(DOCDIRDEST)/ps $(DOCDIRDEST)/pdf $(INSTALLLIB) $(DOC_STDLIB_DIR)/Library.pdf $(DOCDIRDEST)/pdf $(INSTALLLIB) $(DOC_STDLIB_DIR)/Library.ps $(DOCDIRDEST)/ps install-doc-sphinx: $(MKDIR) $(DOCDIRDEST)/sphinx (for d in html latex; do \ for f in `cd doc/sphinx/_build/$$d && find . -type f`; do \ $(MKDIR) $$(dirname $(DOCDIRDEST)/sphinx/$$d/$$f);\ $(INSTALLLIB) doc/sphinx/_build/$$d/$$f $(DOCDIRDEST)/sphinx/$$d/$$f;\ done; done) ###################################################################### # doc_grammar tool ###################################################################### # List mlg files explicitly to avoid ordering problems (across # different installations / make versions). DOC_MLGS := \ parsing/g_constr.mlg parsing/g_prim.mlg \ toplevel/g_toplevel.mlg \ vernac/g_proofs.mlg vernac/g_vernac.mlg \ plugins/btauto/g_btauto.mlg \ plugins/cc/g_congruence.mlg \ plugins/derive/g_derive.mlg \ plugins/extraction/g_extraction.mlg \ plugins/firstorder/g_ground.mlg \ plugins/funind/g_indfun.mlg \ plugins/ltac/coretactics.mlg plugins/ltac/extraargs.mlg plugins/ltac/extratactics.mlg \ plugins/ltac/g_auto.mlg plugins/ltac/g_class.mlg plugins/ltac/g_eqdecide.mlg \ plugins/ltac/g_ltac.mlg plugins/ltac/g_obligations.mlg plugins/ltac/g_rewrite.mlg \ plugins/ltac/g_tactic.mlg plugins/ltac/profile_ltac_tactics.mlg \ plugins/micromega/g_micromega.mlg plugins/micromega/g_zify.mlg \ plugins/nsatz/g_nsatz.mlg \ plugins/ring/g_ring.mlg \ plugins/rtauto/g_rtauto.mlg \ plugins/ssr/ssrparser.mlg plugins/ssr/ssrvernac.mlg \ plugins/ssrmatching/g_ssrmatching.mlg \ plugins/syntax/g_number_string.mlg \ plugins/ltac2/g_ltac2.mlg DOC_EDIT_MLGS := $(wildcard doc/tools/docgram/*.edit_mlg) DOC_RSTS := $(wildcard doc/sphinx/*/*.rst) $(wildcard doc/sphinx/*/*/*.rst) doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS) $(SHOW)'DOC_GRAM' $(HIDE)$(DOC_GRAM) -short -no-warn $(DOC_MLGS) #todo: add a dependency of sphinx on updated_rsts when we're ready doc/tools/docgram/orderedGrammar doc/tools/docgram/updated_rsts: doc/tools/docgram/fullGrammar $(DOC_GRAM) $(DOC_EDIT_MLGS) $(SHOW)'DOC_GRAM_RSTS' $(HIDE)$(DOC_GRAM) $(DOCGRAMWARNFLAG) -check-cmds -check-tacs $(DOC_MLGS) $(DOC_RSTS) .PRECIOUS: doc/tools/docgram/orderedGrammar doc/tools/docgram/updated_rsts: doc/tools/docgram/orderedGrammar .PHONY: doc_gram doc_gram_verify doc_gram_rsts doc_gram: doc/tools/docgram/fullGrammar doc_gram_verify: $(DOC_GRAM) $(DOC_MLGS) $(SHOW)'DOC_GRAM_VERIFY' $(HIDE)$(DOC_GRAM) -no-warn -verify $(DOC_MLGS) $(DOC_RSTS) doc_gram_rsts: doc/tools/docgram/updated_rsts # For emacs: # Local Variables: # mode: makefile # End: coq-8.15.0/Makefile.dune000066400000000000000000000145451417001151100147610ustar00rootroot00000000000000# -*- mode: makefile -*- # Dune Makefile for Coq .PHONY: help help-install states world watch check # Main developer targets .PHONY: refman-html refman-pdf stdlib-html apidoc # Documentation targets .PHONY: test-suite dev-targets .PHONY: fmt ocheck ireport clean # Maintenance targets .PHONY: voboot release install # Added just not to break old scripts # use DUNEOPT=--display=short for a more verbose build # DUNEOPT=--display=short help: @echo "" @echo "Welcome to Coq's Dune-based build system. Common developer targets are:" @echo "" @echo " - states: build a minimal functional coqtop" @echo " - world: build all public binaries and libraries" @echo " - watch: build all public binaries and libraries [continuous build]" @echo " - check: build all ML files as fast as possible" @echo " - test-suite: run Coq's test suite [env NJOBS=N to set job parallelism]" @echo "" @echo " Note: running ./configure is not recommended," @echo " see dev/doc/build-system.dune.md for more info" @echo " Note: these targets produce a developer build, not suitable" @echo " for distribution to end-users or install" @echo "" @echo " To run an \$$app \\in {coqc,coqtop,coqbyte,coqide}:" @echo "" @echo " - use 'dune exec -- dev/shim/\$$app-prelude args'" @echo " Example: 'dune exec -- dev/shim/coqc-prelude file.v'" @echo "" @echo " Documentation targets:" @echo "" @echo " - refman-html: build Coq's reference manual [HTML version]" @echo " - refman-pdf: build Coq's reference manual [PDF version]" @echo " - stdlib-html: build Coq's Stdlib documentation [HTML version]" @echo " - apidoc: build ML API documentation" @echo "" @echo " Miscellaneous targets:" @echo "" @echo " - fmt: run ocamlformat on the codebase" @echo " - ocheck: build for all supported OCaml versions [requires OPAM]" @echo " - ireport: build with optimized flambda settings and emit an inline report" @echo " - clean: remove build directory and autogenerated files" @echo " - help: show this message" @echo "" @echo " Type 'make help-install' for installation instructions" @echo " Type 'make dev-targets' for more developer targets" dev-targets: @echo "" @echo "In order to get a functional Coq install layout, the world target is required." @echo "However, This is often inconvenient for developers, due to the large amount of" @echo "files that world will build. We provide some useful subtargets here:" @echo "" @echo " - theories-foo: for each directory theories/Foo, build the vo files for it and set them up in _build/install/default." @echo " For instance the init target builds the prelude, combined with coq-core.install it produces a fully functional layout in _build/install/default." help-install: @echo "" @echo "The Dune-based Coq build is split in packages; see Dune and dev/doc" @echo "documentation for more details. A quick install of Coq alone can done with" @echo "" @echo " $ ./configure -prefix " @echo " $ dune build -p coq-core,coq-stdlib && dune install coq-core coq-stdlib" @echo "" @echo " Provided opam/dune packages are:" @echo "" @echo " - coq-core: base Coq package, toplevel compilers, plugins, tools, no stdlib, no GTK" @echo " - coq-stdlib: Coq's standard library" @echo " - coqide-server: XML protocol language server" @echo " - coqide: CoqIDE gtk application" @echo " - coq: meta package depending on coq-core coq-stdlib" @echo "" @echo " To build a package, you can use:" @echo "" @echo " - 'dune build package.install' : build package in developer mode" @echo " - 'dune build -p package' : build package in release mode" @echo "" @echo " Packages _must_ be installed only if built using release mode, to install a package use: " @echo "" @echo " - 'dune install $install_opts package'" @echo "" @echo " Note that building a package in release mode ignores other packages present in" @echo " the worktree. See Dune documentation for more information." voboot: @echo "This target is empty and not needed anymore" states: dune build $(DUNEOPT) dev/shim/coqtop-prelude NONDOC_INSTALL_TARGETS:=coq-core.install coq-stdlib.install coqide-server.install coqide.install world: dune build $(DUNEOPT) $(NONDOC_INSTALL_TARGETS) watch: dune build $(DUNEOPT) $(NONDOC_INSTALL_TARGETS) -w check: dune build $(DUNEOPT) @check test-suite: dune runtest --no-buffer $(DUNEOPT) refman-html: dune build --no-buffer @refman-html refman-pdf: dune build --no-buffer @refman-pdf stdlib-html: dune build @stdlib-html apidoc: dune build $(DUNEOPT) @doc release: @echo "release target is deprecated, use dune directly" dune build $(DUNEOPT) -p coq # We define this target as to override Make's built-in one install: @echo "To install Coq using dune, use 'dune build -p P && dune install P'" @echo "where P is any of the packages defined by opam files in the root dir" @false fmt: dune build @fmt --auto-promote ocheck: dune build $(DUNEOPT) @install --workspace=dev/dune-workspace.all ireport: dune clean dune build $(DUNEOPT) @install --profile=ireport clean: dune clean # ci-* targets CI_PURE_DUNE:=1 export CI_PURE_DUNE include Makefile.ci # Custom targets to create subsets of the world target but with less # compiled files. This is desired when we want to have our Coq Dune # build with Coq developments that are not dunerized and thus still # expect an install layout with a working Coq setup, but smaller than # world. # # Unfortunately, Dune still lacks the capability to refer to install # targets in rules, see https://github.com/ocaml/dune/issues/3192 ; # thus we can't simply yet use `%{pkg:coq:theories/Arith/Arith.vo` to # have the rule install the target, we thus imitate such behavior # using make as a helper. # $(1) is the directory (theories/Foo/) # $(2) is the name (foo) define subtarget = .PHONY: theories-$(2) $(2)_FILES=$$(wildcard $(1)*.v) $(2)_FILES_PATH=$$(addprefix _build/install/default/lib/coq/, $$($(2)_FILES:.v=.vo)) theories-$(2): @echo "DUNE $(1)*.vo" @dune build $$($(2)_FILES_PATH) endef $(foreach subdir,$(wildcard theories/*/),$(eval $(call subtarget,$(subdir),$(shell echo $(subst /,,$(subst theories/,,$(subdir))) | tr A-Z a-z)))) # Other common dev targets: # # dune build coq-core.install # dune build coq.install # dune build coqide.install # # Packaging / OPAM targets: # # dune -p coq @install # dune -p coqide @install coq-8.15.0/Makefile.ide000066400000000000000000000044301417001151100145570ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## = 2.9, Coq's configure will set # the DUNE_29_PLUS=yes` variable to use the call with the right options. # # Additionally, you can also add --libdir=$(COQLIBINSTALL) if required # by your distribution, but we recommend using the default configured # with Dune. install-coq: install-dune install-library # This used to be overriden to fix very old bugs with install INSTALLSH=./install.sh ifeq ($(DESTDIR),) COQLIBINSTALLDEST=$(COQLIBINSTALL) else COQLIBINSTALLDEST=$(DESTDIR)/$(COQLIBINSTALL) endif # NB: some files don't produce native files (eg Ltac2 files) as they # don't have any Coq definitions. Makefile can't predict that so we # use || true vos build is bugged in -quick mode, see #11195 install-library: $(SHOW)'INSTALL VOFILES' $(HIDE)$(MKDIR) $(COQLIBINSTALLDEST) $(HIDE)$(INSTALLSH) $(VO_OUT_DIR) $(COQLIBINSTALLDEST) $(ALLVO:.$(VO)=.vo) $(HIDE)$(INSTALLSH) $(VO_OUT_DIR) $(COQLIBINSTALLDEST) $(ALLVO:.$(VO)=.vos) || true ifneq ($(NATIVECOMPUTE),) $(HIDE)$(INSTALLSH) $(VO_OUT_DIR) $(COQLIBINSTALLDEST) $(NATIVEFILES) || true endif $(HIDE)$(INSTALLSH) $(VO_OUT_DIR) $(COQLIBINSTALLDEST) $(VFILES) $(HIDE)$(INSTALLSH) $(VO_OUT_DIR) $(COQLIBINSTALLDEST) $(GLOBFILES) $(HIDE)$(MKDIR) $(COQLIBINSTALLDEST)/user-contrib # TODO ifeq ($(HASCOQIDE),no) install-coqide: else ifdef DUNE_29_PLUS install-coqide: $(BCONTEXT)/coqide.install dune install $(_DOPT) $(DESTDIRARG) --mandir="$(MANDIR)" --prefix="$(COQPREFIX)" --etcdir="$(CONFIGDIR)" --docdir="$(DOCDIR)" coqide else dune install $(_DOPT) $(DESTDIRARG) --mandir="$(MANDIR)" --prefix="$(COQPREFIX)" coqide endif endif # For emacs: # Local Variables: # mode: makefile # End: coq-8.15.0/Makefile.make000066400000000000000000000205451417001151100147400ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## = 3.81. # # This Makefile is now separated into Makefile.{common,build,doc}. # You won't find Makefiles in sub-directories and this is done on purpose. # If you are not yet convinced of the advantages of a single Makefile, please # read # http://aegis.sourceforge.net/auug97.pdf # before complaining. # # When you are working in a subdir, you can compile without moving to the # upper directory using "make -C ..", and the output is still understood # by Emacs' next-error. # # Specific command-line options to this Makefile: # # make VERBOSE=1 # restore the raw echoing of commands # make NO_RECALC_DEPS=1 # avoid recomputing dependencies # # Nota: the 1 above can be replaced by any non-empty value # # ---------------------------------------------------------------------- # See dev/doc/build-system*.txt for more details/FAQ about this Makefile # ---------------------------------------------------------------------- ########################################################################### # File lists ########################################################################### # NB: due to limitations in Win32, please refrain using 'export' too much # to communicate between make sub-calls (in Win32, 8kb max per env variable, # 32kb total) # !! Before using FIND_SKIP_DIRS, please read how you should in the !! # !! FIND_SKIP_DIRS section of dev/doc/build-system.dev.txt !! # "-not -name ." to avoid skipping everything since we "find ." # "-type d" to be able to find .merlin.in files FIND_SKIP_DIRS:=-not -name . '(' \ -name '.*' -type d -o \ -name 'debian' -o \ -name "$${GIT_DIR}" -o \ -name '_build' -o \ -name '_build_ci' -o \ -name '_build_boot' -o \ -name '_install_ci' -o \ -name 'gramlib' -o \ -name 'user-contrib' -o \ -name 'test-suite' -o \ -name 'plugin_tutorial' \ ')' -prune -o define find $(shell find . user-contrib/Ltac2 $(FIND_SKIP_DIRS) '(' -name $(1) ')' -print | sed 's|^\./||') endef define findindir $(shell find $(1) $(FIND_SKIP_DIRS) '(' -name $(2) ')' -print | sed 's|^\./||') endef ########################################################################### # Starting rules ########################################################################### .PHONY: NOARG help noconfig submake .DEFAULT_GOAL:=NOARG NOARG: world byte include Makefile.common help: @echo "Please use either:" @echo " ./configure" @echo " make world" @echo " make install" @echo " make clean" @echo "or make archclean" @echo "For make to be verbose, add VERBOSE=1" UNSAVED_FILES:=$(shell find . -name '.\#*v' -o -name '.\#*.ml' -o -name '.\#*.ml?') ifdef UNSAVED_FILES $(error You have unsaved changes in your editor (emacs?) [$(UNSAVED_FILES)]; \ cancel them or save before proceeding. Or your editor crashed. \ Then, you may want to consider whether you want to restore the autosaves) #If you try to simply remove this explicit test, the compilation may #fail later. In particular, if a .#*.v file exists, coqdep fails to #run. endif # Apart from clean and a few misc files, everything will be done in a # sub-call to make on Makefile.build. This way, we avoid doing here # the -include of .d : since they trigger some compilations, we do not # want them for a mere clean. Moreover, we regroup this sub-call in a # common target named 'submake'. This way, multiple user-given goals # (cf the MAKECMDGOALS variable) won't trigger multiple (possibly # parallel) make sub-calls ifdef COQ_CONFIGURED %:: submake ; else %:: noconfig ; endif MAKE_OPTS := --warn-undefined-variable --no-builtin-rules submake: $(MAKE) $(MAKE_OPTS) -f Makefile.build $(MAKECMDGOALS) noconfig: @echo 'Please run ./configure first to continue building with make' >&2 @echo 'or pass option "-f Makefile.dune" to let dune manage the build' >&2; exit 1 # To speed-up things a bit, let's dissuade make to attempt rebuilding makefiles Makefile $(wildcard Makefile.*) config/Makefile : ; ########################################################################### # Cleaning ########################################################################### .PHONY: clean cleankeepvo archclean depclean cleanconfig distclean voclean timingclean clean: depclean voclean legacyclean dune clean cleankeepvo: depclean dune clean archclean: voclean plugin-tutorialclean rm -rf _build _build_boot depclean: find . $(FIND_SKIP_DIRS) '(' -name '*.d' ')' -exec rm -f {} + OUT_DIRS=$(wildcard $(VO_OUT_DIR)) cacheclean: find $(OUT_DIRS) test-suite -name '.*.aux' -exec rm -f {} + cleanconfig: rm -f config/Makefile config/coq_config.ml dev/ocamldebug-coq distclean: clean cleanconfig cacheclean timingclean voclean: find $(OUT_DIRS) test-suite \( -name '*.vo' -o -name '*.vio' -o -name '*.vos' -o -name '*.vok' -o -name '*.glob' \) -exec rm -f {} + find $(OUT_DIRS) test-suite -name .coq-native -empty -exec rm -rf {} + timingclean: find $(OUT_DIRS) test-suite \( -name '*.v.timing' -o -name '*.v.before-timing' \ -o -name "*.v.after-timing" -o -name "*.v.timing.diff" -o -name "time-of-build.log" \ -o -name "time-of-build-before.log" -o -name "time-of-build-after.log" \ -o -name "time-of-build-pretty.log" -o -name "time-of-build-both.log" \) -exec rm -f {} + plugin-tutorialclean: +$(MAKE) -C $(PLUGINTUTO) clean # remove files from previous make system that Dune will complain about as unhygenic legacyclean: rm -f META.coq META.coq-core revision ide/coqide/default.bindings ide/coqide/default_bindings_src.exe \ kernel/genOpcodeFiles.exe config/coq_config.ml config/coq_config.py config/Makefile config/dune.c_flags rm -f coqpp/coqpp_parse.ml rm -f coqpp/coqpp_parse.mli coqpp/coqpp_lex.ml ide/coqide/coq_lex.ml \ ide/coqide/coqide_os_specific.ml ide/coqide/protocol/xml_lexer.ml ide/coqide/utf8_convert.ml \ ide/coqide/config_lexer.ml kernel/byterun/coq_arity.h \ kernel/byterun/coq_instruct.h kernel/byterun/coq_jumptbl.h kernel/float64.ml kernel/uint63.ml \ kernel/vmopcodes.ml parsing/g_constr.ml parsing/g_prim.ml toplevel/g_toplevel.ml \ user-contrib/Ltac2/g_ltac2.ml \ vernac/g_proofs.ml plugins/btauto/g_btauto.ml plugins/cc/g_congruence.ml \ plugins/derive/g_derive.ml plugins/extraction/g_extraction.ml plugins/firstorder/g_ground.ml \ plugins/funind/g_indfun.ml plugins/ltac/coretactics.ml plugins/ltac/extraargs.ml plugins/ltac/extratactics.ml \ plugins/ltac/g_auto.ml plugins/ltac/g_class.ml plugins/ltac/g_eqdecide.ml plugins/ltac/g_obligations.ml \ plugins/ltac/profile_ltac_tactics.ml \ plugins/micromega/g_micromega.ml plugins/micromega/g_zify.ml plugins/ltac/g_ltac.ml plugins/nsatz/g_nsatz.ml \ plugins/ring/g_ring.ml plugins/ltac/g_tactic.ml \ plugins/ltac/g_rewrite.ml plugins/rtauto/g_rtauto.ml \ plugins/ssr/ssrvernac.ml plugins/ssrmatching/g_ssrmatching.ml plugins/ssr/ssrparser.ml \ plugins/syntax/g_number_string.ml tools/coqdep_lexer.ml \ tools/coqwc.ml tools/coqdoc/cpretty.ml tools/ocamllibdep.ml vernac/g_vernac.ml find theories \( -name '*.vo' -o -name '*.vio' -o -name '*.vos' -o -name '*.vok' \ -o -name '*.glob' -o -name '*.aux' \) -exec rm -f {} + find . \( -name '*.cmi' -o -name '*.cmx' -o -name '*.o' -o -name '*.a' \ -o -name '*.cmxa' -o -name '*.cmxs' -o -name '*.aux' \) -exec rm -f {} + # Ensure that every compiled file around has a known source file. # This should help preventing weird compilation failures caused by leftover # compiled files after deleting or moving some source files. EXISTINGVO:=$(call find, '*.vo') KNOWNVO:=$(patsubst %.v,%.vo,$(call find, '*.v')) ALIENVO:=$(filter-out $(KNOWNVO),$(EXISTINGVO)) ########################################################################### # Continuous Integration Tests ########################################################################### include Makefile.ci coq-8.15.0/Makefile.vofiles000066400000000000000000000050451417001151100154700ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## let prelude = "theories/Init/Prelude.vo" in Util.check_file_else ~dir:Coq_config.coqlibsuffix ~file:prelude (fun () -> if Sys.file_exists (Filename.concat Coq_config.coqlib prelude) then Coq_config.coqlib else fail ())) (* Build layout uses coqlib = coqcorelib *) let guess_coqcorelib lib = if Sys.file_exists (Path.relative lib "plugins") then lib else Path.relative lib "../coq-core" (* Should we fail on double initialization? That seems a way to avoid mis-use for example when we pass command line arguments *) let init () = let lib = guess_coqlib () in let core = Util.getenv_else "COQCORELIB" (fun () -> guess_coqcorelib lib) in { core ; lib } let init () = let { core; lib } = init () in (* debug *) if false then Format.eprintf "core = %s@\n lib = %s@\n%!" core lib; { core; lib } let env_ref = ref None let init () = match !env_ref with | None -> let env = init () in env_ref := Some env; env | Some env -> env let set_coqlib lib = let env = { lib; core = guess_coqcorelib lib } in env_ref := Some env let coqlib { lib; _ } = lib let corelib { core; _ } = core let plugins { core; _ } = Path.relative core "plugins" let stdlib { lib; _ } = Path.relative lib "theories" let user_contrib { lib; _ } = Path.relative lib "user-contrib" let tool { core; _ } tool = Path.(relative (relative core "tools") tool) let revision { core; _ } = Path.relative core "revision" let native_cmi { core; _ } lib = let install_path = Path.relative core lib in if Sys.file_exists install_path then install_path else (* Dune build layout, we need to improve this *) let obj_dir = Format.asprintf ".%s.objs" lib in Filename.(concat (concat (concat core lib) obj_dir) "byte") coq-8.15.0/boot/env.mli000066400000000000000000000076661417001151100146330ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string -> t (** We should gradually add some more functions to handle common dirs here such the theories directories or share files. Abstracting it hereere does allow to use system-specific functionalities *) (** [exists file] checks if [file] exists *) val exists : t -> bool (** String representation *) val to_string : t -> string end (** Coq runtime enviroment, including location of Coq's stdlib *) type t (** [init ()] will initialize the Coq environment. *) val init : unit -> t (** [stdlib directory] *) val stdlib : t -> Path.t (** [plugins directory] *) val plugins : t -> Path.t (** [user contrib directory] *) val user_contrib : t -> Path.t (** [tool-specific directory] *) val tool : t -> string -> Path.t (** .cmi files needed for native compilation *) val native_cmi : t -> string -> Path.t (** The location of the revision file *) val revision : t -> Path.t (** coq-core/lib directory, not sure if to keep this *) val corelib : t -> Path.t (** coq/lib directory, not sure if to keep this *) val coqlib : t -> Path.t (** Internal, should be set automatically by passing cmdline args to init; note that this will set both [coqlib] and [corelib] for now. *) val set_coqlib : string -> unit coq-8.15.0/boot/path.ml000066400000000000000000000013271417001151100146120ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Some(name,value)) with _ -> None let with_ic file f = let ic = open_in file in try let rc = f ic in close_in ic; rc with e -> close_in ic; raise e let getenv_from_file name = let base = Filename.dirname Sys.executable_name in try with_ic (base ^ "/coq_environment.txt") (fun ic -> let rec find () = let l = input_line ic in match parse_env_line l with | Some(n,v) when n = name -> v | _ -> find () in find ()) with | Sys_error s -> raise Not_found | End_of_file -> raise Not_found let system_getenv name = try Sys.getenv name with Not_found -> getenv_from_file name let getenv_else s dft = try system_getenv s with Not_found -> dft () (** Add a local installation suffix (unless the suffix is itself absolute in which case the prefix does not matter) *) let use_suffix prefix suffix = if String.length suffix > 0 && suffix.[0] = '/' then suffix else Filename.concat prefix suffix let canonical_path_name p = let current = Sys.getcwd () in try Sys.chdir p; let p' = Sys.getcwd () in Sys.chdir current; p' with Sys_error _ -> (* We give up to find a canonical name and just simplify it... *) Filename.concat current p let coqbin = canonical_path_name (Filename.dirname Sys.executable_name) (** The following only makes sense when executables are running from source tree (e.g. during build or in local mode). *) let coqroot = Filename.dirname coqbin (** [check_file_else ~dir ~file oth] checks if [file] exists in the installation directory [dir] given relatively to [coqroot], which maybe has been relocated. If the check fails, then [oth ()] is evaluated. Using file system equality seems well enough for this heuristic *) let check_file_else ~dir ~file oth = let path = use_suffix coqroot dir in if Sys.file_exists (Filename.concat path file) then path else oth () coq-8.15.0/checker/000077500000000000000000000000001417001151100137625ustar00rootroot00000000000000coq-8.15.0/checker/analyze.ml000066400000000000000000000316541417001151100157700ustar00rootroot00000000000000(** Headers *) let prefix_small_block = 0x80 let prefix_small_int = 0x40 let prefix_small_string = 0x20 [@@@ocaml.warning "-32"] let code_int8 = 0x00 let code_int16 = 0x01 let code_int32 = 0x02 let code_int64 = 0x03 let code_shared8 = 0x04 let code_shared16 = 0x05 let code_shared32 = 0x06 let code_double_array32_little = 0x07 let code_block32 = 0x08 let code_string8 = 0x09 let code_string32 = 0x0A let code_double_big = 0x0B let code_double_little = 0x0C let code_double_array8_big = 0x0D let code_double_array8_little = 0x0E let code_double_array32_big = 0x0F let code_codepointer = 0x10 let code_infixpointer = 0x11 let code_custom = 0x12 let code_block64 = 0x13 let code_shared64 = 0x14 let code_string64 = 0x15 let code_double_array64_big = 0x16 let code_double_array64_little = 0x17 let code_custom_len = 0x18 let code_custom_fixed = 0x19 [@@@ocaml.warning "-37"] type code_descr = | CODE_INT8 | CODE_INT16 | CODE_INT32 | CODE_INT64 | CODE_SHARED8 | CODE_SHARED16 | CODE_SHARED32 | CODE_DOUBLE_ARRAY32_LITTLE | CODE_BLOCK32 | CODE_STRING8 | CODE_STRING32 | CODE_DOUBLE_BIG | CODE_DOUBLE_LITTLE | CODE_DOUBLE_ARRAY8_BIG | CODE_DOUBLE_ARRAY8_LITTLE | CODE_DOUBLE_ARRAY32_BIG | CODE_CODEPOINTER | CODE_INFIXPOINTER | CODE_CUSTOM | CODE_BLOCK64 | CODE_SHARED64 | CODE_STRING64 | CODE_DOUBLE_ARRAY64_BIG | CODE_DOUBLE_ARRAY64_LITTLE | CODE_CUSTOM_LEN | CODE_CUSTOM_FIXED let code_max = 0x19 let magic_number = "\132\149\166\190" (** Memory reification *) module LargeArray : sig type 'a t val empty : 'a t val length : 'a t -> int val make : int -> 'a -> 'a t val get : 'a t -> int -> 'a val set : 'a t -> int -> 'a -> unit end = struct let max_length = Sys.max_array_length type 'a t = 'a array array * 'a array (** Invariants: - All subarrays of the left array have length [max_length]. - The right array has length < [max_length]. *) let empty = [||], [||] let length (vl, vr) = (max_length * Array.length vl) + Array.length vr let make n x = let k = n / max_length in let r = n mod max_length in let vl = Array.init k (fun _ -> Array.make max_length x) in let vr = Array.make r x in (vl, vr) let get (vl, vr) n = let k = n / max_length in let r = n mod max_length in let len = Array.length vl in if k < len then vl.(k).(r) else if k == len then vr.(r) else invalid_arg "index out of bounds" let set (vl, vr) n x = let k = n / max_length in let r = n mod max_length in let len = Array.length vl in if k < len then vl.(k).(r) <- x else if k == len then vr.(r) <- x else invalid_arg "index out of bounds" end type repr = | RInt of int | Rint64 of Int64.t | RFloat64 of float | RBlock of (int * int) (* tag × len *) | RString of string | RPointer of int | RCode of int type data = | Int of int (* value *) | Ptr of int (* pointer *) | Atm of int (* tag *) | Fun of int (* address *) type obj = | Struct of int * data array (* tag × data *) | Int64 of Int64.t (* Primitive integer *) | Float64 of float (* Primitive float *) | String of string module type Input = sig type t val input_byte : t -> int val input_binary_int : t -> int end module type S = sig type input val parse : input -> (data * obj LargeArray.t) end module Make(M : Input) = struct open M type input = M.t let current_offset = ref 0 let input_byte chan = let () = incr current_offset in input_byte chan let input_binary_int chan = let () = current_offset := !current_offset + 4 in input_binary_int chan let input_char chan = Char.chr (input_byte chan) let input_string len chan = String.init len (fun _ -> input_char chan) let parse_header chan = let () = current_offset := 0 in let magic = input_string 4 chan in let length = input_binary_int chan in let objects = input_binary_int chan in let size32 = input_binary_int chan in let size64 = input_binary_int chan in (magic, length, size32, size64, objects) let input_int8s chan = let i = input_byte chan in if i land 0x80 = 0 then i else i lor ((-1) lsl 8) let input_int8u = input_byte let input_int16s chan = let i = input_byte chan in let j = input_byte chan in let ans = (i lsl 8) lor j in if i land 0x80 = 0 then ans else ans lor ((-1) lsl 16) let input_int16u chan = let i = input_byte chan in let j = input_byte chan in (i lsl 8) lor j let input_int32s chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let ans = (i lsl 24) lor (j lsl 16) lor (k lsl 8) lor l in if i land 0x80 = 0 then ans else ans lor ((-1) lsl 31) let input_int32u chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in (i lsl 24) lor (j lsl 16) lor (k lsl 8) lor l let input_int64s chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let m = input_byte chan in let n = input_byte chan in let o = input_byte chan in let p = input_byte chan in let ans = (i lsl 56) lor (j lsl 48) lor (k lsl 40) lor (l lsl 32) lor (m lsl 24) lor (n lsl 16) lor (o lsl 8) lor p in if i land 0x80 = 0 then ans else ans lor ((-1) lsl 63) let input_int64u chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let m = input_byte chan in let n = input_byte chan in let o = input_byte chan in let p = input_byte chan in (i lsl 56) lor (j lsl 48) lor (k lsl 40) lor (l lsl 32) lor (m lsl 24) lor (n lsl 16) lor (o lsl 8) lor p let input_header32 chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let tag = l in let len = (i lsl 14) lor (j lsl 6) lor (k lsr 2) in (tag, len) let input_header64 chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let m = input_byte chan in let n = input_byte chan in let o = input_byte chan in let p = input_byte chan in let tag = p in let len = (i lsl 46) lor (j lsl 38) lor (k lsl 30) lor (l lsl 22) lor (m lsl 14) lor (n lsl 6) lor (o lsr 2) in (tag, len) let input_cstring chan : string = let buff = Buffer.create 17 in let rec loop () = match input_char chan with | '\o000' -> Buffer.contents buff | c -> Buffer.add_char buff c |> loop in loop () let input_intL chan : int64 = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let m = input_byte chan in let n = input_byte chan in let o = input_byte chan in let p = input_byte chan in let ( lsl ) x y = Int64.(shift_left (of_int x) y) in let ( lor ) = Int64.logor in (i lsl 56) lor (j lsl 48) lor (k lsl 40) lor (l lsl 32) lor (m lsl 24) lor (n lsl 16) lor (o lsl 8) lor (Int64.of_int p) let input_double_big chan : float = Int64.float_of_bits (input_intL chan) let input_double_little chan : float = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let m = input_byte chan in let n = input_byte chan in let o = input_byte chan in let p = input_byte chan in let ( lsl ) x y = Int64.(shift_left (of_int x) y) in let ( lor ) = Int64.logor in let bits = (p lsl 56) lor (o lsl 48) lor (n lsl 40) lor (m lsl 32) lor (l lsl 24) lor (k lsl 16) lor (j lsl 8) lor (Int64.of_int i) in Int64.float_of_bits bits let parse_object chan = let data = input_byte chan in if prefix_small_block <= data then let tag = data land 0x0F in let len = (data lsr 4) land 0x07 in RBlock (tag, len) else if prefix_small_int <= data then RInt (data land 0x3F) else if prefix_small_string <= data then let len = data land 0x1F in RString (input_string len chan) else if data > code_max then assert false else match (Obj.magic data) with | CODE_INT8 -> RInt (input_int8s chan) | CODE_INT16 -> RInt (input_int16s chan) | CODE_INT32 -> RInt (input_int32s chan) | CODE_INT64 -> RInt (input_int64s chan) | CODE_SHARED8 -> RPointer (input_int8u chan) | CODE_SHARED16 -> RPointer (input_int16u chan) | CODE_SHARED32 -> RPointer (input_int32u chan) | CODE_BLOCK32 -> RBlock (input_header32 chan) | CODE_BLOCK64 -> RBlock (input_header64 chan) | CODE_STRING8 -> let len = input_int8u chan in RString (input_string len chan) | CODE_STRING32 -> let len = input_int32u chan in RString (input_string len chan) | CODE_CODEPOINTER -> let addr = input_int32u chan in for _i = 0 to 15 do ignore (input_byte chan); done; RCode addr | CODE_CUSTOM | CODE_CUSTOM_FIXED -> begin match input_cstring chan with | "_j" -> Rint64 (input_intL chan) | s -> Printf.eprintf "Unhandled custom code: %s" s; assert false end | CODE_DOUBLE_BIG -> RFloat64 (input_double_big chan) | CODE_DOUBLE_LITTLE -> RFloat64 (input_double_little chan) | CODE_DOUBLE_ARRAY32_LITTLE | CODE_DOUBLE_ARRAY8_BIG | CODE_DOUBLE_ARRAY8_LITTLE | CODE_DOUBLE_ARRAY32_BIG | CODE_INFIXPOINTER | CODE_SHARED64 | CODE_STRING64 | CODE_DOUBLE_ARRAY64_BIG | CODE_DOUBLE_ARRAY64_LITTLE | CODE_CUSTOM_LEN -> Printf.eprintf "Unhandled code %04x\n%!" data; assert false let parse chan = let (magic, len, _, _, size) = parse_header chan in let () = assert (magic = magic_number) in let memory = LargeArray.make size (Struct ((-1), [||])) in let current_object = ref 0 in let fill_obj = function | RPointer n -> let data = Ptr (!current_object - n) in data, None | RInt n -> let data = Int n in data, None | RString s -> let data = Ptr !current_object in let () = LargeArray.set memory !current_object (String s) in let () = incr current_object in data, None | RBlock (tag, 0) -> (* Atoms are never shared *) let data = Atm tag in data, None | RBlock (tag, len) -> let data = Ptr !current_object in let nblock = Array.make len (Atm (-1)) in let () = LargeArray.set memory !current_object (Struct (tag, nblock)) in let () = incr current_object in data, Some nblock | RCode addr -> let data = Fun addr in data, None | Rint64 i -> let data = Ptr !current_object in let () = LargeArray.set memory !current_object (Int64 i) in let () = incr current_object in data, None | RFloat64 f -> let data = Ptr !current_object in let () = LargeArray.set memory !current_object (Float64 f) in let () = incr current_object in data, None in let rec fill block off accu = if Array.length block = off then match accu with | [] -> () | (block, off) :: accu -> fill block off accu else let data, nobj = fill_obj (parse_object chan) in let () = block.(off) <- data in let block, off, accu = match nobj with | None -> block, succ off, accu | Some nblock -> nblock, 0, ((block, succ off) :: accu) in fill block off accu in let ans = [|Atm (-1)|] in let () = fill ans 0 [] in (ans.(0), memory) end module IChannel = struct type t = in_channel let input_byte = input_byte let input_binary_int = input_binary_int end module IString = struct type t = (string * int ref) let input_byte (s, off) = let ans = Char.code (s.[!off]) in let () = incr off in ans let input_binary_int chan = let i = input_byte chan in let j = input_byte chan in let k = input_byte chan in let l = input_byte chan in let ans = (i lsl 24) lor (j lsl 16) lor (k lsl 8) lor l in if i land 0x80 = 0 then ans else ans lor ((-1) lsl 31) end module PChannel = Make(IChannel) module PString = Make(IString) let parse_channel = PChannel.parse let parse_string s = PString.parse (s, ref 0) let instantiate (p, mem) = let len = LargeArray.length mem in let ans = LargeArray.make len (Obj.repr 0) in (* First pass: initialize the subobjects *) for i = 0 to len - 1 do let obj = match LargeArray.get mem i with | Struct (tag, blk) -> Obj.new_block tag (Array.length blk) | Int64 i -> Obj.repr i | Float64 f -> Obj.repr f | String str -> Obj.repr str in LargeArray.set ans i obj done; let get_data = function | Int n -> Obj.repr n | Ptr p -> LargeArray.get ans p | Atm tag -> Obj.new_block tag 0 | Fun _ -> assert false (* We shouldn't serialize closures *) in (* Second pass: set the pointers *) for i = 0 to len - 1 do match LargeArray.get mem i with | Struct (_, blk) -> let obj = LargeArray.get ans i in for k = 0 to Array.length blk - 1 do Obj.set_field obj k (get_data blk.(k)) done | Int64 _ | Float64 _ | String _ -> () done; get_data p coq-8.15.0/checker/analyze.mli000066400000000000000000000025401417001151100161310ustar00rootroot00000000000000(** Representation of data allocated on the OCaml heap. *) type data = | Int of int | Ptr of int | Atm of int (* tag *) | Fun of int (* address *) type obj = | Struct of int * data array (* tag × data *) | Int64 of Int64.t (* Primitive integer *) | Float64 of float (* Primitive float *) | String of string module LargeArray : sig type 'a t val empty : 'a t val length : 'a t -> int val make : int -> 'a -> 'a t val get : 'a t -> int -> 'a val set : 'a t -> int -> 'a -> unit end (** A data structure similar to arrays but allowing to overcome the 2^22 length limitation on 32-bit architecture. *) val parse_channel : in_channel -> (data * obj LargeArray.t) val parse_string : string -> (data * obj LargeArray.t) (** {6 Functorized version} *) module type Input = sig type t val input_byte : t -> int (** Input a single byte *) val input_binary_int : t -> int (** Input a big-endian 31-bits signed integer *) end (** Type of inputs *) module type S = sig type input val parse : input -> (data * obj LargeArray.t) (** Return the entry point and the reification of the memory out of a marshalled structure. *) end module Make (M : Input) : S with type input = M.t (** Functorized version of the previous code. *) val instantiate : data * obj LargeArray.t -> Obj.t (** Create the OCaml object out of the reified representation. *) coq-8.15.0/checker/check.ml000066400000000000000000000404771417001151100154050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* failwith "path_of_dirpath" | l::dir -> {dirpath=List.map Id.to_string dir;basename=Id.to_string l} let pr_dirlist dp = prlist_with_sep (fun _ -> str".") str (List.rev dp) let pr_path sp = match sp.dirpath with [] -> str sp.basename | sl -> pr_dirlist sl ++ str"." ++ str sp.basename (************************************************************************) (*s Modules loaded in memory contain the following informations. They are kept in the global table [libraries_table]. *) type compilation_unit_name = DirPath.t type seg_univ = Univ.ContextSet.t * bool type seg_proofs = Opaqueproof.opaque_proofterm option array type library_t = { library_name : compilation_unit_name; library_filename : CUnix.physical_path; library_compiled : Safe_typing.compiled_library; library_opaques : seg_proofs option; library_deps : (compilation_unit_name * Safe_typing.vodigest) array; library_digest : Safe_typing.vodigest; library_extra_univs : Univ.ContextSet.t } module LibraryOrdered = struct type t = DirPath.t let compare d1 d2 = compare (List.rev (DirPath.repr d1)) (List.rev (DirPath.repr d2)) end module LibrarySet = Set.Make(LibraryOrdered) module LibraryMap = Map.Make(LibraryOrdered) (* This is a map from names to loaded libraries *) let libraries_table = ref LibraryMap.empty (* various requests to the tables *) let find_library dir = LibraryMap.find dir !libraries_table let library_full_filename dir = (find_library dir).library_filename (* If a library is loaded several time, then the first occurrence must be performed first, thus the libraries_loaded_list ... *) let register_loaded_library m = libraries_table := LibraryMap.add m.library_name m !libraries_table (* Map from library names to table of opaque terms *) let opaque_tables = ref LibraryMap.empty let access_opaque_table dp i = let t = try LibraryMap.find dp !opaque_tables with Not_found -> assert false in let i = Opaqueproof.repr_handle i in let () = assert (0 <= i && i < Array.length t) in t.(i) let indirect_accessor o = let (sub, ci, dp, i) = Opaqueproof.repr o in let c = access_opaque_table dp i in let c = match c with | None -> CErrors.user_err Pp.(str "Cannot access opaque delayed proof.") | Some c -> c in let (c, prv) = Cooking.cook_constr ci c in let c = Mod_subst.(List.fold_right subst_mps sub c) in (c, prv) let () = Mod_checking.set_indirect_accessor indirect_accessor let check_one_lib admit senv (dir,m) = let md = m.library_compiled in let dig = m.library_digest in (* Look up if the library is to be admitted correct. We could also check if it carries a validation certificate (yet to be implemented). *) let senv = if LibrarySet.mem dir admit then (Flags.if_verbose Feedback.msg_notice (str "Admitting library: " ++ pr_dirpath dir); Safe_checking.unsafe_import (fst senv) md m.library_extra_univs dig), (snd senv) else (Flags.if_verbose Feedback.msg_notice (str "Checking library: " ++ pr_dirpath dir); Safe_checking.import (fst senv) (snd senv) md m.library_extra_univs dig) in register_loaded_library m; senv (*************************************************************************) (*s Load path. Mapping from physical to logical paths etc.*) type logical_path = DirPath.t let load_paths = ref ([],[] : CUnix.physical_path list * logical_path list) let find_logical_path phys_dir = let phys_dir = CUnix.canonical_path_name phys_dir in let physical, logical = !load_paths in match List.filter2 (fun p d -> p = phys_dir) physical logical with | _,[dir] -> dir | _,[] -> default_root_prefix | _,l -> anomaly (Pp.str ("Two logical paths are associated to "^phys_dir^".")) let remove_load_path dir = let physical, logical = !load_paths in load_paths := List.filter2 (fun p d -> p <> dir) physical logical let add_load_path (phys_path,coq_path) = if CDebug.(get_flag misc) then Feedback.msg_notice (str "path: " ++ pr_dirpath coq_path ++ str " ->" ++ spc() ++ str phys_path); let phys_path = CUnix.canonical_path_name phys_path in let physical, logical = !load_paths in match List.filter2 (fun p d -> p = phys_path) physical logical with | _,[dir] -> if coq_path <> dir (* If this is not the default -I . to coqtop *) && not (phys_path = CUnix.canonical_path_name Filename.current_dir_name && coq_path = default_root_prefix) then begin (* Assume the user is concerned by library naming *) if dir <> default_root_prefix then Feedback.msg_warning (str phys_path ++ strbrk " was previously bound to " ++ pr_dirpath dir ++ strbrk "; it is remapped to " ++ pr_dirpath coq_path); remove_load_path phys_path; load_paths := (phys_path::fst !load_paths, coq_path::snd !load_paths) end | _,[] -> load_paths := (phys_path :: fst !load_paths, coq_path :: snd !load_paths) | _ -> anomaly (Pp.str ("Two logical paths are associated to "^phys_path^".")) let load_paths_of_dir_path dir = let physical, logical = !load_paths in fst (List.filter2 (fun p d -> d = dir) physical logical) (************************************************************************) (*s Locate absolute or partially qualified library names in the path *) exception LibUnmappedDir exception LibNotFound let locate_absolute_library dir = (* Search in loadpath *) let pref, base = split_dirpath dir in let loadpath = load_paths_of_dir_path pref in if loadpath = [] then raise LibUnmappedDir; try let name = Id.to_string base^".vo" in let _, file = System.where_in_path ~warn:false loadpath name in (dir, file) with Not_found -> (* Last chance, removed from the file system but still in memory *) try (dir, library_full_filename dir) with Not_found -> raise LibNotFound let locate_qualified_library qid = try (* we assume qid is an absolute dirpath *) let loadpath = load_paths_of_dir_path (dir_of_path qid) in if loadpath = [] then raise LibUnmappedDir; let name = qid.basename^".vo" in let path, file = System.where_in_path loadpath name in let dir = extend_dirpath (find_logical_path path) (Id.of_string qid.basename) in (* Look if loaded *) try (dir, library_full_filename dir) with Not_found -> (dir, file) with Not_found -> raise LibNotFound let error_unmapped_dir qid = let prefix = qid.dirpath in user_err (str "Cannot load " ++ pr_path qid ++ str ":" ++ spc () ++ str "no physical path bound to" ++ spc () ++ pr_dirlist prefix ++ str "." ++ fnl ()) let error_lib_not_found qid = user_err (str "Cannot find library " ++ pr_path qid ++ str " in loadpath.") let try_locate_absolute_library dir = try locate_absolute_library dir with | LibUnmappedDir -> error_unmapped_dir (path_of_dirpath dir) | LibNotFound -> error_lib_not_found (path_of_dirpath dir) let try_locate_qualified_library lib = match lib with | PhysicalFile f -> let () = if not (System.file_exists_respecting_case "" f) then error_lib_not_found { dirpath = []; basename = f; } in let dir = Filename.dirname f in let base = Filename.chop_extension (Filename.basename f) in let dir = extend_dirpath (find_logical_path dir) (Id.of_string base) in (dir, f) | LogicalFile qid -> try locate_qualified_library qid with | LibUnmappedDir -> error_unmapped_dir qid | LibNotFound -> error_lib_not_found qid (************************************************************************) (*s Low-level interning of libraries from files *) let raw_intern_library f = ObjFile.open_in ~file:f (************************************************************************) (* Internalise libraries *) type summary_disk = { md_name : compilation_unit_name; md_deps : (compilation_unit_name * Safe_typing.vodigest) array; md_ocaml : string; } module Dyn = Dyn.Make () type obj = Dyn.t (* persistent dynamic objects *) type lib_objects = (Id.t * obj) list type library_objects = lib_objects * lib_objects type library_disk = { md_compiled : Safe_typing.compiled_library; md_objects : library_objects; } let mk_library sd md f table digest cst = { library_name = sd.md_name; library_filename = f; library_compiled = md.md_compiled; library_opaques = table; library_deps = sd.md_deps; library_digest = digest; library_extra_univs = cst } let name_clash_message dir mdir f = str ("The file " ^ f ^ " contains library") ++ spc () ++ pr_dirpath mdir ++ spc () ++ str "and not library" ++ spc() ++ pr_dirpath dir type intern_mode = Rec | Root | Dep (* Rec = standard, Root = -norec, Dep = dependency of norec *) (* Dependency graph *) let depgraph = ref LibraryMap.empty let marshal_in_segment ~validate ~value ~segment f ch = let () = LargeFile.seek_in ch segment.ObjFile.pos in if validate then let v = try let v = Analyze.parse_channel ch in let digest = Digest.input ch in let () = if not (String.equal digest segment.ObjFile.hash) then raise Exit in v with _ -> user_err (str "Corrupted file " ++ quote (str f)) in let () = Validate.validate value v in let v = Analyze.instantiate v in Obj.obj v else System.marshal_in f ch let marshal_or_skip ~validate ~value ~segment f ch = if validate then let v = marshal_in_segment ~validate:true ~value ~segment f ch in Some v else None let intern_from_file ~intern_mode (dir, f) = let validate = intern_mode <> Dep in Flags.if_verbose chk_pp (str"[intern "++str f++str" ..."); let (sd,md,table,opaque_csts,digest) = try (* First pass to read the metadata of the file *) let ch = System.with_magic_number_check raw_intern_library f in let seg_sd = ObjFile.get_segment ch ~segment:"summary" in let seg_md = ObjFile.get_segment ch ~segment:"library" in let seg_univs = ObjFile.get_segment ch ~segment:"universes" in let seg_tasks = ObjFile.get_segment ch ~segment:"tasks" in let seg_opaque = ObjFile.get_segment ch ~segment:"opaques" in let () = ObjFile.close_in ch in (* Actually read the data *) let ch = open_in_bin f in let (sd:summary_disk) = marshal_in_segment ~validate ~value:Values.v_libsum ~segment:seg_sd f ch in let (md:library_disk) = marshal_in_segment ~validate ~value:Values.v_lib ~segment:seg_md f ch in let (opaque_csts:seg_univ option) = marshal_in_segment ~validate ~value:Values.v_univopaques ~segment:seg_univs f ch in let (tasks:'a option) = marshal_in_segment ~validate ~value:Values.(Opt Any) ~segment:seg_tasks f ch in let (table:seg_proofs option) = marshal_or_skip ~validate ~value:Values.v_opaquetable ~segment:seg_opaque f ch in (* Verification of the final checksum *) let () = close_in ch in let ch = open_in_bin f in let () = close_in ch in let () = System.check_caml_version ~caml:sd.md_ocaml ~file:f in if dir <> sd.md_name then user_err (name_clash_message dir sd.md_name f); if tasks <> None then user_err (str "The file "++str f++str " contains unfinished tasks"); if opaque_csts <> None then begin Flags.if_verbose chk_pp (str " (was a vio file) "); Option.iter (fun (_,b) -> if not b then user_err (str "The file "++str f++str " is still a .vio")) opaque_csts; end; Flags.if_verbose chk_pp (str" done]" ++ fnl ()); let digest = let open ObjFile in if opaque_csts <> None then Safe_typing.Dvivo (seg_md.hash, seg_univs.hash) else (Safe_typing.Dvo_or_vi seg_md.hash) in sd,md,table,opaque_csts,digest with e -> Flags.if_verbose chk_pp (str" failed!]" ++ fnl ()); raise e in depgraph := LibraryMap.add sd.md_name sd.md_deps !depgraph; Option.iter (fun table -> opaque_tables := LibraryMap.add sd.md_name table !opaque_tables) table; let extra_cst = Option.default Univ.ContextSet.empty (Option.map (fun (cs,_) -> cs) opaque_csts) in mk_library sd md f table digest extra_cst let get_deps (dir, f) = try LibraryMap.find dir !depgraph with Not_found -> let _ = intern_from_file ~intern_mode:Dep (dir,f) in LibraryMap.find dir !depgraph (* Read a compiled library and all dependencies, in reverse order. Do not include files that are already in the context. *) let rec intern_library ~intern_mode seen (dir, f) needed = if LibrarySet.mem dir seen then failwith "Recursive dependencies!"; (* Look if in the current logical environment *) try let _ = find_library dir in needed with Not_found -> (* Look if already listed and consequently its dependencies too *) if List.mem_assoc_f DirPath.equal dir needed then needed else (* [dir] is an absolute name which matches [f] which must be in loadpath *) let m = intern_from_file ~intern_mode (dir,f) in let seen' = LibrarySet.add dir seen in let deps = Array.map (fun (d,_) -> try_locate_absolute_library d) m.library_deps in let intern_mode = match intern_mode with Rec -> Rec | Root | Dep -> Dep in (dir,m) :: Array.fold_right (intern_library ~intern_mode seen') deps needed (* Compute the reflexive transitive dependency closure *) let rec fold_deps seen ff (dir,f) (s,acc) = if LibrarySet.mem dir seen then failwith "Recursive dependencies!"; if LibrarySet.mem dir s then (s,acc) else let deps = get_deps (dir,f) in let deps = Array.map (fun (d,_) -> try_locate_absolute_library d) deps in let seen' = LibrarySet.add dir seen in let (s',acc') = Array.fold_right (fold_deps seen' ff) deps (s,acc) in (LibrarySet.add dir s', ff dir acc') and fold_deps_list seen ff modl needed = List.fold_right (fold_deps seen ff) modl needed let fold_deps_list ff modl acc = snd (fold_deps_list LibrarySet.empty ff modl (LibrarySet.empty,acc)) let recheck_library senv ~norec ~admit ~check = let ml = List.map try_locate_qualified_library check in let nrl = List.map try_locate_qualified_library norec in let al = List.map try_locate_qualified_library admit in let needed = List.fold_right (intern_library ~intern_mode:Rec LibrarySet.empty) ml [] in let needed = List.fold_right (intern_library ~intern_mode:Root LibrarySet.empty) nrl needed in let needed = List.rev needed in (* first compute the closure of norec, remove closure of check, add closure of admit, and finally remove norec and check *) let nochk = fold_deps_list LibrarySet.add nrl LibrarySet.empty in let nochk = fold_deps_list LibrarySet.remove ml nochk in let nochk = fold_deps_list LibrarySet.add al nochk in (* explicitly required modules cannot be skipped... *) let nochk = List.fold_right LibrarySet.remove (List.map fst (nrl@ml)) nochk in (* *) Flags.if_verbose Feedback.msg_notice (fnl()++hv 2 (str "Ordered list:" ++ fnl() ++ prlist (fun (dir,_) -> pr_dirpath dir ++ fnl()) needed)); let senv = List.fold_left (check_one_lib nochk) (senv, Cmap.empty) needed in Flags.if_verbose Feedback.msg_notice (str"Modules were successfully checked"); senv coq-8.15.0/checker/check.mli000066400000000000000000000022021417001151100155360ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit val recheck_library : safe_environment -> norec:object_file list -> admit:object_file list -> check:object_file list -> safe_environment * Cset.t Cmap.t coq-8.15.0/checker/checkFlags.ml000066400000000000000000000021541417001151100163500ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Environ.env -> Environ.env (** Set flags except for those ignored by the checker (eg vm_compute). *) coq-8.15.0/checker/checkInductive.ml000066400000000000000000000216511417001151100172510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* None | FakeRecord -> Some None | PrimRecord data -> Some (Some (Array.map (fun (x,_,_,_) -> x) data)) in let check_template ind = match ind.mind_arity with | RegularArity _ -> false | TemplateArity _ -> true in let mind_entry_template = Array.exists check_template mb.mind_packets in let () = if mind_entry_template then assert (Array.for_all check_template mb.mind_packets) in let mind_entry_universes = match mb.mind_universes with | Monomorphic -> (* We only need to rebuild the set of constraints for template polymorphic inductive types. The set of monomorphic constraints is already part of the graph at that point, but we need to emulate a broken bound variable mechanism for template inductive types. *) begin match mb.mind_template with | None -> Monomorphic_ind_entry | Some ctx -> Template_ind_entry ctx.template_context end | Polymorphic auctx -> Polymorphic_ind_entry (AbstractContext.repr auctx) in let ntyps = Array.length mb.mind_packets in let mind_entry_inds = Array.map_to_list (fun ind -> let mind_entry_arity = match ind.mind_arity with | RegularArity ar -> let ctx, arity = Term.decompose_prod_n_assum nparams ar.mind_user_arity in ignore ctx; (* we will check that the produced user_arity is equal to the input *) arity | TemplateArity ar -> let ctx = ind.mind_arity_ctxt in let ctx = List.firstn (List.length ctx - nparams) ctx in Term.mkArity (ctx, Sorts.sort_of_univ ar.template_level) in { mind_entry_typename = ind.mind_typename; mind_entry_arity; mind_entry_consnames = Array.to_list ind.mind_consnames; mind_entry_lc = Array.map_to_list (fun c -> let c = Inductive.abstract_constructor_type_relatively_to_inductive_types_context ntyps mind c in let ctx, c = Term.decompose_prod_n_assum nparams c in ignore ctx; (* we will check that the produced user_lc is equal to the input *) c ) ind.mind_user_lc; }) mb.mind_packets in let mind_entry_variance = Option.map (Array.map (fun v -> Some v)) mb.mind_variance in { mind_entry_record; mind_entry_finite = mb.mind_finite; mind_entry_params = mb.mind_params_ctxt; mind_entry_inds; mind_entry_universes; mind_entry_variance; mind_entry_private = mb.mind_private; } let check_arity env ar1 ar2 = match ar1, ar2 with | RegularArity ar, RegularArity {mind_user_arity;mind_sort} -> Constr.equal ar.mind_user_arity mind_user_arity && Sorts.equal ar.mind_sort mind_sort | TemplateArity ar, TemplateArity {template_level} -> UGraph.check_leq (universes env) template_level ar.template_level (* template_level is inferred by indtypes, so functor application can produce a smaller one *) | (RegularArity _ | TemplateArity _), _ -> assert false let check_template ar1 ar2 = match ar1, ar2 with | None, None -> true | Some ar, Some {template_context; template_param_levels} -> List.equal (Option.equal Univ.Level.equal) ar.template_param_levels template_param_levels && ContextSet.equal template_context ar.template_context | None, Some _ | Some _, None -> false let check_kelim k1 k2 = Sorts.family_leq k1 k2 (* Use [eq_ind_chk] because when we rebuild the recargs we have lost the knowledge of who is the canonical version. Try with to see test-suite/coqchk/include.v *) let eq_nested_types ty1 ty2 = match ty1, ty2 with | NestedInd ind1, NestedInd ind2 -> eq_ind_chk ind1 ind2 | NestedInd _, _ -> false | NestedPrimitive c1, NestedPrimitive c2 -> Names.Constant.CanOrd.equal c1 c2 | NestedPrimitive _, _ -> false let eq_recarg a1 a2 = match a1, a2 with | Norec, Norec -> true | Mrec i1, Mrec i2 -> eq_ind_chk i1 i2 | Nested ty1, Nested ty2 -> eq_nested_types ty1 ty2 | (Norec | Mrec _ | Nested _), _ -> false let eq_reloc_tbl = Array.equal (fun x y -> Int.equal (fst x) (fst y) && Int.equal (snd x) (snd y)) let eq_in_context (ctx1, t1) (ctx2, t2) = Context.Rel.equal Constr.equal ctx1 ctx2 && Constr.equal t1 t2 let check_packet env mind ind { mind_typename; mind_arity_ctxt; mind_arity; mind_consnames; mind_user_lc; mind_nrealargs; mind_nrealdecls; mind_kelim; mind_nf_lc; mind_consnrealargs; mind_consnrealdecls; mind_recargs; mind_relevance; mind_nb_constant; mind_nb_args; mind_reloc_tbl } = let check = check mind in ignore mind_typename; (* passed through *) check "mind_arity_ctxt" (Context.Rel.equal Constr.equal ind.mind_arity_ctxt mind_arity_ctxt); check "mind_arity" (check_arity env ind.mind_arity mind_arity); ignore mind_consnames; (* passed through *) check "mind_user_lc" (Array.equal Constr.equal ind.mind_user_lc mind_user_lc); check "mind_nrealargs" Int.(equal ind.mind_nrealargs mind_nrealargs); check "mind_nrealdecls" Int.(equal ind.mind_nrealdecls mind_nrealdecls); check "mind_kelim" (check_kelim ind.mind_kelim mind_kelim); check "mind_nf_lc" (Array.equal eq_in_context ind.mind_nf_lc mind_nf_lc); (* NB: here syntactic equality is not just an optimisation, we also care about the shape of the terms *) check "mind_consnrealargs" (Array.equal Int.equal ind.mind_consnrealargs mind_consnrealargs); check "mind_consnrealdecls" (Array.equal Int.equal ind.mind_consnrealdecls mind_consnrealdecls); check "mind_recargs" (Rtree.equal eq_recarg ind.mind_recargs mind_recargs); check "mind_relevant" (Sorts.relevance_equal ind.mind_relevance mind_relevance); check "mind_nb_args" Int.(equal ind.mind_nb_args mind_nb_args); check "mind_nb_constant" Int.(equal ind.mind_nb_constant mind_nb_constant); check "mind_reloc_tbl" (eq_reloc_tbl ind.mind_reloc_tbl mind_reloc_tbl); () let check_same_record r1 r2 = match r1, r2 with | NotRecord, NotRecord | FakeRecord, FakeRecord -> true | PrimRecord r1, PrimRecord r2 -> (* The kernel doesn't care about the names, we just need to check that the saved types are correct. *) Array.for_all2 (fun (_,_,r1,tys1) (_,_,r2,tys2) -> Array.equal Sorts.relevance_equal r1 r2 && Array.equal Constr.equal tys1 tys2) r1 r2 | (NotRecord | FakeRecord | PrimRecord _), _ -> false let check_inductive env mind mb = let entry = to_entry mind mb in let { mind_packets; mind_record; mind_finite; mind_ntypes; mind_hyps; mind_nparams; mind_nparams_rec; mind_params_ctxt; mind_universes; mind_template; mind_variance; mind_sec_variance; mind_private; mind_typing_flags; } = (* Locally set typing flags for further typechecking *) let env = CheckFlags.set_local_flags mb.mind_typing_flags env in Indtypes.check_inductive env ~sec_univs:None mind entry in let check = check mind in Array.iter2 (check_packet env mind) mb.mind_packets mind_packets; check "mind_record" (check_same_record mb.mind_record mind_record); check "mind_finite" (mb.mind_finite == mind_finite); check "mind_ntypes" Int.(equal mb.mind_ntypes mind_ntypes); check "mind_hyps" (Context.Named.equal Constr.equal mb.mind_hyps mind_hyps); check "mind_nparams" Int.(equal mb.mind_nparams mind_nparams); check "mind_nparams_rec" (mb.mind_nparams_rec <= mind_nparams_rec); (* module substitution can increase the real number of recursively uniform parameters, so be tolerant and use [<=]. *) check "mind_params_ctxt" (Context.Rel.equal Constr.equal mb.mind_params_ctxt mind_params_ctxt); ignore mind_universes; (* Indtypes did the necessary checking *) check "mind_template" (check_template mb.mind_template mind_template); check "mind_variance" (Option.equal (Array.equal Univ.Variance.equal) mb.mind_variance mind_variance); check "mind_sec_variance" (Option.is_empty mind_sec_variance); ignore mind_private; (* passed through Indtypes *) ignore mind_typing_flags; (* TODO non oracle flags *) add_mind mind mb env coq-8.15.0/checker/checkInductive.mli000066400000000000000000000017251417001151100174220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* MutInd.t -> Declarations.mutual_inductive_body -> env coq-8.15.0/checker/checkTypes.ml000066400000000000000000000027361417001151100164260ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* () | _ -> failwith "not the correct sort" let check_polymorphic_arity env params par = let pl = par.template_param_levels in let rec check_p env pl params = let open Context.Rel.Declaration in match pl, params with Some u::pl, LocalAssum (na,ty)::params -> check_kind env ty u; check_p (push_rel (LocalAssum (na,ty)) env) pl params | None::pl,d::params -> check_p (push_rel d env) pl params | [], _ -> () | _ -> failwith "check_poly: not the right number of params" in check_p env pl (List.rev params) coq-8.15.0/checker/checkTypes.mli000066400000000000000000000015371417001151100165750ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* rel_context -> template_universes -> unit coq-8.15.0/checker/check_stat.ml000066400000000000000000000062471417001151100164350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* " else hv 2 (str ass ++ str ":" ++ fnl() ++ prlist_with_sep fnl str axs) let pr_axioms env opac = let add c cb acc = if Declareops.constant_has_body cb then acc else match Cmap.find_opt c opac with | None -> Cset.add c acc | Some s -> Cset.union s acc in let csts = fold_constants add env Cset.empty in let csts = Cset.fold (fun c acc -> Constant.to_string c :: acc) csts [] in pr_assumptions "Axioms" csts let pr_type_in_type env = let csts = fold_constants (fun c cb acc -> if not cb.const_typing_flags.check_universes then Constant.to_string c :: acc else acc) env [] in let csts = fold_inductives (fun c cb acc -> if not cb.mind_typing_flags.check_universes then MutInd.to_string c :: acc else acc) env csts in pr_assumptions "Constants/Inductives relying on type-in-type" csts let pr_unguarded env = let csts = fold_constants (fun c cb acc -> if not cb.const_typing_flags.check_guarded then Constant.to_string c :: acc else acc) env [] in let csts = fold_inductives (fun c cb acc -> if not cb.mind_typing_flags.check_guarded then MutInd.to_string c :: acc else acc) env csts in pr_assumptions "Constants/Inductives relying on unsafe (co)fixpoints" csts let pr_nonpositive env = let inds = fold_inductives (fun c cb acc -> if not cb.mind_typing_flags.check_positive then MutInd.to_string c :: acc else acc) env [] in pr_assumptions "Inductives whose positivity is assumed" inds let print_context env opac = if !output_context then begin Feedback.msg_notice (hov 0 (fnl() ++ str"CONTEXT SUMMARY" ++ fnl() ++ str"===============" ++ fnl() ++ fnl() ++ str "* " ++ hov 0 (pr_impredicative_set env ++ fnl()) ++ fnl() ++ str "* " ++ hov 0 (pr_axioms env opac ++ fnl()) ++ fnl() ++ str "* " ++ hov 0 (pr_type_in_type env ++ fnl()) ++ fnl() ++ str "* " ++ hov 0 (pr_unguarded env ++ fnl()) ++ fnl() ++ str "* " ++ hov 0 (pr_nonpositive env ++ fnl())) ) end let stats env opac = print_context env opac; print_memory_stat () coq-8.15.0/checker/check_stat.mli000066400000000000000000000014321417001151100165750ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Names.Cset.t Names.Cmap.t -> unit coq-8.15.0/checker/checker.ml000066400000000000000000000337321417001151100157300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* =len then dirs else let pos = try String.index_from s n '.' with Not_found -> len in let dir = String.sub s n (pos-n) in decoupe_dirs (dir::dirs) (pos+1) in decoupe_dirs [] 0 let dirpath_of_string s = match parse_dir s with [] -> Check.default_root_prefix | dir -> DirPath.make (List.map Id.of_string dir) let path_of_string s = if Filename.check_suffix s ".vo" then PhysicalFile s else match parse_dir s with [] -> invalid_arg "path_of_string" | l::dir -> LogicalFile {dirpath=dir; basename=l} let get_version () = try let env = Boot.Env.init () in let revision = Boot.Env.(Path.to_string (revision env)) in let ch = open_in revision in let ver = input_line ch in let rev = input_line ch in let () = close_in ch in Printf.sprintf "%s (%s)" ver rev with _ -> Coq_config.version let print_header () = Printf.printf "Welcome to Chicken %s\n%!" (get_version ()) (* Adding files to Coq loadpath *) let add_path ~unix_path:dir ~coq_root:coq_dirpath = if exists_dir dir then begin Check.add_load_path (dir,coq_dirpath) end else Feedback.msg_warning (str "Cannot open " ++ str dir) let convert_string d = try Id.of_string d with CErrors.UserError _ -> Flags.if_verbose Feedback.msg_warning (str "Directory " ++ str d ++ str " cannot be used as a Coq identifier (skipped)"); raise Exit let add_rec_path ~unix_path ~coq_root = if exists_dir unix_path then let dirs = all_subdirs ~unix_path in let prefix = DirPath.repr coq_root in let convert_dirs (lp, cp) = try let path = List.rev_map convert_string cp @ prefix in Some (lp, Names.DirPath.make path) with Exit -> None in let dirs = List.map_filter convert_dirs dirs in List.iter Check.add_load_path dirs; Check.add_load_path (unix_path, coq_root) else Feedback.msg_warning (str "Cannot open " ++ str unix_path) (* By the option -R/-Q of the command line *) let includes = ref [] let push_include (s, alias) = includes := (s,alias) :: !includes let set_include d p = let p = dirpath_of_string p in push_include (d,p) (* Initializes the LoadPath *) let init_load_path () = let coqenv = Boot.Env.init () in (* the to_string casting won't be necessary once Boot handles include paths *) let plugins = Boot.Env.plugins coqenv |> Boot.Path.to_string in let theories = Boot.Env.stdlib coqenv |> Boot.Path.to_string in let user_contrib = Boot.Env.user_contrib coqenv |> Boot.Path.to_string in let xdg_dirs = Envars.xdg_dirs in let coqpath = Envars.coqpath in (* NOTE: These directories are searched from last to first *) (* first standard library *) add_rec_path ~unix_path:theories ~coq_root:(Names.DirPath.make[coq_root]); (* then plugins *) add_rec_path ~unix_path:plugins ~coq_root:(Names.DirPath.make [coq_root]); (* then user-contrib *) if Sys.file_exists user_contrib then add_rec_path ~unix_path:user_contrib ~coq_root:Check.default_root_prefix; (* then directories in XDG_DATA_DIRS and XDG_DATA_HOME *) List.iter (fun s -> add_rec_path ~unix_path:s ~coq_root:Check.default_root_prefix) (xdg_dirs ~warn:(fun x -> Feedback.msg_warning (str x))); (* then directories in COQPATH *) List.iter (fun s -> add_rec_path ~unix_path:s ~coq_root:Check.default_root_prefix) coqpath; (* then current directory *) add_path ~unix_path:"." ~coq_root:Check.default_root_prefix; (* additional loadpath, given with -R/-Q options *) List.iter (fun (unix_path, coq_root) -> add_rec_path ~unix_path ~coq_root) (List.rev !includes); includes := [] let impredicative_set = ref false let set_impredicative_set () = impredicative_set := true let indices_matter = ref false let make_senv () = let senv = Safe_typing.empty_environment in let senv = Safe_typing.set_impredicative_set !impredicative_set senv in let senv = Safe_typing.set_indices_matter !indices_matter senv in let senv = Safe_typing.set_VM false senv in let senv = Safe_typing.set_allow_sprop true senv in (* be smarter later *) Safe_typing.set_native_compiler false senv let admit_list = ref ([] : object_file list) let add_admit s = admit_list := path_of_string s :: !admit_list let norec_list = ref ([] : object_file list) let add_norec s = norec_list := path_of_string s :: !norec_list let compile_list = ref ([] : object_file list) let add_compile s = compile_list := path_of_string s :: !compile_list (*s Parsing of the command line. We no longer use [Arg.parse], in order to use share [Usage.print_usage] between coqtop and coqc. *) let compile_files senv = Check.recheck_library senv ~norec:(List.rev !norec_list) ~admit:(List.rev !admit_list) ~check:(List.rev !compile_list) let version () = Printf.printf "The Coq Proof Checker, version %s\n" Coq_config.version; exit 0 (* print the usage of coqtop (or coqc) on channel co *) let print_usage_channel co command = output_string co command; output_string co "coqchk options are:\n"; output_string co " -Q dir coqdir map physical dir to logical coqdir\ \n -R dir coqdir synonymous for -Q\ \n\ \n\ \n -admit module load module and dependencies without checking\ \n -norec module check module but admit dependencies without checking\ \n\ \n -coqlib dir set coqchk's standard library location\ \n -where print coqchk's standard library location and exit\ \n -v print coqchk version and exit\ \n -o, --output-context print the list of assumptions\ \n -m, --memory print the maximum heap size\ \n -silent disable trace of constants being checked\ \n\ \n -impredicative-set set sort Set impredicative\ \n\ \n -h, --help print this list of options\ \n" (* print the usage on standard error *) let print_usage = print_usage_channel stderr let print_usage_coqtop () = print_usage "Usage: coqchk modules\n\n" let usage () = print_usage_coqtop (); flush stderr; exit 1 open Type_errors let anomaly_string () = str "Anomaly: " let report () = strbrk (". Please report at " ^ Coq_config.wwwbugtracker ^ ".") let guill s = str "\"" ++ str s ++ str "\"" let explain_exn = function | Stream.Failure -> hov 0 (anomaly_string () ++ str "uncaught Stream.Failure.") | Stream.Error txt -> hov 0 (str "Syntax error: " ++ str txt) | Sys_error msg -> hov 0 (anomaly_string () ++ str "uncaught exception Sys_error " ++ guill msg ++ report() ) | UserError pps -> hov 1 (str "User error: " ++ pps) | Out_of_memory -> hov 0 (str "Out of memory") | Stack_overflow -> hov 0 (str "Stack overflow") | Match_failure(filename,pos1,pos2) -> hov 1 (anomaly_string () ++ str "Match failure in file " ++ guill filename ++ str " at line " ++ int pos1 ++ str " character " ++ int pos2 ++ report ()) | Not_found -> hov 0 (anomaly_string () ++ str "uncaught exception Not_found" ++ report ()) | Failure s -> hov 0 (str "Failure: " ++ str s ++ report ()) | Invalid_argument s -> hov 0 (anomaly_string () ++ str "uncaught exception Invalid_argument " ++ guill s ++ report ()) | Sys.Break -> hov 0 (fnl () ++ str "User interrupt.") | Univ.UniverseInconsistency i -> let msg = if CDebug.(get_flag misc) then str "." ++ spc() ++ Univ.explain_universe_inconsistency Univ.Level.pr i else mt() in hov 0 (str "Error: Universe inconsistency" ++ msg ++ str ".") | TypeError(ctx,te) -> hov 0 (str "Type error: " ++ (match te with | UnboundRel i -> str"UnboundRel " ++ int i | UnboundVar v -> str"UnboundVar" ++ str(Names.Id.to_string v) | NotAType _ -> str"NotAType" | BadAssumption _ -> str"BadAssumption" | ReferenceVariables _ -> str"ReferenceVariables" | ElimArity _ -> str"ElimArity" | CaseNotInductive _ -> str"CaseNotInductive" | WrongCaseInfo _ -> str"WrongCaseInfo" | NumberBranches _ -> str"NumberBranches" | IllFormedBranch _ -> str"IllFormedBranch" | Generalization _ -> str"Generalization" | ActualType _ -> str"ActualType" | IncorrectPrimitive _ -> str"IncorrectPrimitive" | CantApplyBadType ((n,a,b),{uj_val = hd; uj_type = hdty},args) -> let pp_arg i judge = hv 1 (str"arg " ++ int (i+1) ++ str"= " ++ Constr.debug_print judge.uj_val ++ str ",type= " ++ Constr.debug_print judge.uj_type) ++ fnl () in Feedback.msg_notice (str"====== ill-typed term ====" ++ fnl () ++ hov 2 (str"application head= " ++ Constr.debug_print hd) ++ fnl () ++ hov 2 (str"head type= " ++ Constr.debug_print hdty) ++ fnl () ++ str"arguments:" ++ fnl () ++ hv 1 (prvecti pp_arg args)); Feedback.msg_notice (str"====== type error ====@" ++ fnl () ++ Constr.debug_print b ++ fnl () ++ str"is not convertible with" ++ fnl () ++ Constr.debug_print a ++ fnl ()); Feedback.msg_notice (str"====== universes ====" ++ fnl () ++ (UGraph.pr_universes Univ.Level.pr (UGraph.repr (ctx.Environ.env_universes)))); str "CantApplyBadType at argument " ++ int n | CantApplyNonFunctional _ -> str"CantApplyNonFunctional" | IllFormedRecBody _ -> str"IllFormedRecBody" | IllTypedRecBody _ -> str"IllTypedRecBody" | UnsatisfiedConstraints _ -> str"UnsatisfiedConstraints" | DisallowedSProp -> str"DisallowedSProp" | BadRelevance -> str"BadRelevance" | BadInvert -> str"BadInvert" | UndeclaredUniverse _ -> str"UndeclaredUniverse" | BadVariance _ -> str "BadVariance" )) | InductiveError e -> hov 0 (str "Error related to inductive types") (* let ctx = Check.get_env() in hov 0 (str "Error:" ++ spc () ++ Himsg.explain_inductive_error ctx e)*) | CheckInductive.InductiveMismatch (mind,field) -> hov 0 (MutInd.print mind ++ str ": field " ++ str field ++ str " is incorrect.") | Assert_failure (s,b,e) -> hov 0 (anomaly_string () ++ str "assert failure" ++ spc () ++ (if s = "" then mt () else (str "(file \"" ++ str s ++ str "\", line " ++ int b ++ str ", characters " ++ int e ++ str "-" ++ int (e+6) ++ str ")")) ++ report ()) | e -> CErrors.print e (* for anomalies and other uncaught exceptions *) let parse_args argv = let rec parse = function | [] -> () | "-impredicative-set" :: rem -> set_impredicative_set (); parse rem | "-indices-matter" :: rem -> indices_matter:=true; parse rem | "-coqlib" :: s :: rem -> if not (exists_dir s) then fatal_error (str "Directory '" ++ str s ++ str "' does not exist") false; Boot.Env.set_coqlib s; parse rem | ("-Q"|"-R") :: d :: p :: rem -> set_include d p;parse rem | ("-Q"|"-R") :: ([] | [_]) -> usage () | "-debug" :: rem -> CDebug.set_debug_all true; parse rem | "-where" :: _ -> let env = Boot.Env.init () in let coqlib = Boot.Env.coqlib env |> Boot.Path.to_string in print_endline coqlib; exit 0 | ("-?"|"-h"|"-H"|"-help"|"--help") :: _ -> usage () | ("-v"|"--version") :: _ -> version () | ("-m" | "--memory") :: rem -> Check_stat.memory_stat := true; parse rem | ("-o" | "--output-context") :: rem -> Check_stat.output_context := true; parse rem | "-admit" :: s :: rem -> add_admit s; parse rem | "-admit" :: [] -> usage () | "-norec" :: s :: rem -> add_norec s; parse rem | "-norec" :: [] -> usage () | "-silent" :: rem -> Flags.quiet := true; parse rem | s :: _ when s<>"" && s.[0]='-' -> fatal_error (str "Unknown option " ++ str s) false | s :: rem -> add_compile s; parse rem in parse (List.tl (Array.to_list argv)) (* XXX: At some point we need to either port the checker to use the feedback system or to remove its use completely. *) let init_with_argv argv = Sys.catch_break false; (* Ctrl-C is fatal during the initialisation *) let _fhandle = Feedback.(add_feeder (console_feedback_listener Format.err_formatter)) in try parse_args argv; CWarnings.set_flags ("+"^Typeops.warn_bad_relevance_name); if CDebug.(get_flag misc) then Printexc.record_backtrace true; Flags.if_verbose print_header (); init_load_path (); make_senv () with e -> fatal_error (str "Error during initialization :" ++ (explain_exn e)) (is_anomaly e) let init() = init_with_argv Sys.argv let run senv = try let senv = compile_files senv in flush_all(); senv with e -> if CDebug.(get_flag misc) then Printexc.print_backtrace stderr; fatal_error (explain_exn e) (is_anomaly e) let start () = let senv = init() in let senv, opac = run senv in Check_stat.stats (Safe_typing.env_of_safe_env senv) opac; exit 0 coq-8.15.0/checker/checker.mli000066400000000000000000000012751417001151100160760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit coq-8.15.0/checker/coqchk.ml000066400000000000000000000000321417001151100155570ustar00rootroot00000000000000 let _ = Checker.start () coq-8.15.0/checker/coqchk.mli000066400000000000000000000014501417001151100157350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* pr_id id | _ -> str"_";; let prdp dp = pp(str(string_of_dirpath dp));; (* let prc c = pp(Himsg.pr_lconstr_env (Check.get_env()) c);; let prcs cs = prc (Declarations.force cs);; let pru u = pp(str(Univ.string_of_universe u));;*) let pru u = pp(Univ.pr_uni u);; let prlab l = pp(str(string_of_label l));; let prid id = pp(pr_id id);; let prcon c = pp(Indtypes.prcon c);; let prkn kn = pp(Indtypes.prkn kn);; let prus g = pp(Univ.pr_universes g);; let prcstrs c = let g = Univ.merge_constraints c Univ.initial_universes in pp(Univ.pr_universes g);; (*let prcstrs c = pp(Univ.pr_constraints c);; *) (* let prenvu e = let u = universes e in let pu = str "UNIVERSES:"++fnl()++str" "++hov 0 (Univ.pr_universes u) ++fnl() in pp pu;; let prenv e = let ctx1 = named_context e in let ctx2 = rel_context e in let pe = hov 1 (str"[" ++ prlist_with_sep spc (fun (na,_,_) -> pr_id na) (List.rev ctx1)++ str"]") ++ spc() ++ hov 1 (str"[" ++ prlist_with_sep spc (fun (na,_,_) -> pr_na na) (List.rev ctx2)++ str"]") in pp pe;; *) (* let prsub s = let string_of_mp mp = let s = string_of_mp mp in (match mp with MPbound _ -> "#bound."|_->"")^s in pp (hv 0 (fold_subst (fun msid mp strm -> str "S " ++ str (debug_string_of_msid msid) ++ str " |-> " ++ str (string_of_mp mp) ++ fnl() ++ strm) (fun mbid mp strm -> str"B " ++ str (debug_string_of_mbid mbid) ++ str " |-> " ++ str (string_of_mp mp) ++ fnl() ++ strm) (fun mp1 mp strm -> str"P " ++ str (string_of_mp mp1) ++ str " |-> " ++ str (string_of_mp mp) ++ fnl() ++ strm) s (mt()))) ;; *) #install_printer prid;; #install_printer prcon;; #install_printer prlab;; #install_printer prdp;; #install_printer prkn;; #install_printer pru;; (* #install_printer prc;; #install_printer prcs;; *) #install_printer prcstrs;; (*#install_printer prus;;*) (*#install_printer prenv;;*) (*#install_printer prenvu;; #install_printer prsub;;*) Checker.init_with_argv [|"";"-coqlib";"."|];; Flags.quiet := false;; Flags.debug := true;; Sys.catch_break true;; let module_of_file f = let (_,mb,_,_) = Obj.magic ((intern_from_file f).library_compiled) in (mb:Cic.module_body) ;; let deref_mod md s = let l = match md.mod_expr with Struct(NoFunctor l) -> l | FullStruct -> (match md.mod_type with NoFunctor l -> l) in List.assoc (label_of_id(id_of_string s)) l ;; (* let mod_access m fld = match m.mod_expr with Some(SEBstruct l) -> List.assoc fld l | _ -> failwith "bad structure type" ;; *) let parse_dp s = make_dirpath(List.rev_map id_of_string (Str.split(Str.regexp"\\.") s)) ;; let parse_sp s = let l = List.rev (Str.split(Str.regexp"\\.") s) in {dirpath=List.tl l; basename=List.hd l};; let parse_kn s = let l = List.rev (Str.split(Str.regexp"\\.") s) in let dp = make_dirpath(List.map id_of_string(List.tl l)) in make_kn(MPfile dp) empty_dirpath (label_of_id (id_of_string (List.hd l))) ;; let parse_con s = let l = List.rev (Str.split(Str.regexp"\\.") s) in let dp = make_dirpath(List.map id_of_string(List.tl l)) in make_con(MPfile dp) empty_dirpath (label_of_id (id_of_string (List.hd l))) ;; let get_mod dp = lookup_module dp (Safe_typing.get_env()) ;; let get_mod_type dp = lookup_modtype dp (Safe_typing.get_env()) ;; let get_cst kn = lookup_constant kn (Safe_typing.get_env()) ;; let read_mod s f = let lib = intern_from_file (parse_dp s,f) in ((Obj.magic lib.library_compiled): dir_path * module_body * (dir_path * Digest.t) list);; let expln f x = try f x with UserError(_,strm) as e -> msgnl strm; raise e let admit_l l = let l = List.map parse_sp l in Check.recheck_library ~admit:l ~check:l;; let run_l l = Check.recheck_library ~admit:[] ~check:(List.map parse_sp l);; let norec q = Check.recheck_library ~norec:[parse_sp q] ~admit:[] ~check:[];; (* admit_l["Bool";"OrderedType";"DecidableType"];; run_l["FSetInterface"];; *) coq-8.15.0/checker/mod_checking.ml000066400000000000000000000164601417001151100167350ustar00rootroot00000000000000open Pp open Util open Names open Reduction open Typeops open Declarations open Environ (** {6 Checking constants } *) let indirect_accessor : (cooking_info Opaqueproof.opaque -> Constr.t * unit Opaqueproof.delayed_universes) ref = ref (fun _ -> assert false) let set_indirect_accessor f = indirect_accessor := f let register_opacified_constant env opac kn cb = let rec gather_consts s c = match Constr.kind c with | Constr.Const (c, _) -> Cset.add c s | _ -> Constr.fold gather_consts s c in let wo_body = Cset.fold (fun kn s -> if Declareops.constant_has_body (lookup_constant kn env) then s else match Cmap.find_opt kn opac with | None -> Cset.add kn s | Some s' -> Cset.union s' s) (gather_consts Cset.empty cb) Cset.empty in Cmap.add kn wo_body opac let check_constant_declaration env opac kn cb opacify = Flags.if_verbose Feedback.msg_notice (str " checking cst:" ++ Constant.print kn); let env = CheckFlags.set_local_flags cb.const_typing_flags env in let poly, env = match cb.const_universes with | Monomorphic -> (* Monomorphic universes are stored at the library level, the ones in const_universes should not be needed *) false, env | Polymorphic auctx -> let ctx = Univ.AbstractContext.repr auctx in (* [env] contains De Bruijn universe variables *) let env = push_context ~strict:false ctx env in true, env in let ty = cb.const_type in let _ = infer_type env ty in let body, env = match cb.const_body with | Undef _ | Primitive _ -> None, env | Def c -> Some c, env | OpaqueDef o -> let c, u = !indirect_accessor o in let env = match u, cb.const_universes with | Opaqueproof.PrivateMonomorphic (), Monomorphic -> env | Opaqueproof.PrivatePolymorphic (_, local), Polymorphic _ -> push_subgraph local env | _ -> assert false in Some c, env in let () = match body with | Some bd -> let j = infer env bd in (try conv_leq env j.uj_type ty with NotConvertible -> Type_errors.error_actual_type env j ty) | None -> () in match body with | Some body when opacify -> register_opacified_constant env opac kn body | Some _ | None -> opac let check_constant_declaration env opac kn cb opacify = let opac = check_constant_declaration env opac kn cb opacify in Environ.add_constant kn cb env, opac (** {6 Checking modules } *) (** We currently ignore the [mod_type_alg] and [typ_expr_alg] fields. The only delicate part is when [mod_expr] is an algebraic expression : we need to expand it before checking it is indeed a subtype of [mod_type]. Fortunately, [mod_expr] cannot contain any [MEwith]. *) let lookup_module mp env = try Environ.lookup_module mp env with Not_found -> failwith ("Unknown module: "^ModPath.to_string mp) let mk_mtb mp sign delta = { mod_mp = mp; mod_expr = (); mod_type = sign; mod_type_alg = None; mod_delta = delta; mod_retroknowledge = ModTypeRK; } let rec collect_constants_without_body sign mp accu = let collect_sf s lab = function | SFBconst cb -> let c = Constant.make2 mp lab in if Declareops.constant_has_body cb then s else Cset.add c s | SFBmodule msb -> collect_constants_without_body msb.mod_type (MPdot(mp,lab)) s | SFBmind _ | SFBmodtype _ -> s in match sign with | MoreFunctor _ -> Cset.empty (* currently ignored *) | NoFunctor struc -> List.fold_left (fun s (lab,mb) -> collect_sf s lab mb) accu struc let rec check_module env opac mp mb opacify = Flags.if_verbose Feedback.msg_notice (str " checking module: " ++ str (ModPath.to_string mp)); let env = Modops.add_retroknowledge mb.mod_retroknowledge env in let sign, opac = check_signature env opac mb.mod_type mb.mod_mp mb.mod_delta opacify in let optsign, opac = match mb.mod_expr with |Struct sign_struct -> let opacify = collect_constants_without_body sign mb.mod_mp opacify in let sign, opac = check_signature env opac sign_struct mb.mod_mp mb.mod_delta opacify in Some (sign, mb.mod_delta), opac |Algebraic me -> Some (check_mexpression env opac me mb.mod_mp mb.mod_delta), opac |Abstract|FullStruct -> None, opac in let () = match optsign with |None -> () |Some (sign,delta) -> let mtb1 = mk_mtb mp sign delta and mtb2 = mk_mtb mp mb.mod_type mb.mod_delta in let env = Modops.add_module_type mp mtb1 env in let cu = Subtyping.check_subtypes env mtb1 mtb2 in if not (Environ.check_constraints cu env) then CErrors.user_err Pp.(str "Incorrect universe constraints for module subtyping"); in opac and check_module_type env mty = Flags.if_verbose Feedback.msg_notice (str " checking module type: " ++ str (ModPath.to_string mty.mod_mp)); let (_:module_signature), _ = check_signature env Cmap.empty mty.mod_type mty.mod_mp mty.mod_delta Cset.empty in () and check_structure_field env opac mp lab res opacify = function | SFBconst cb -> let c = Constant.make2 mp lab in check_constant_declaration env opac c cb (Cset.mem c opacify) | SFBmind mib -> let kn = KerName.make mp lab in let kn = Mod_subst.mind_of_delta_kn res kn in CheckInductive.check_inductive env kn mib, opac | SFBmodule msb -> let opac = check_module env opac (MPdot(mp,lab)) msb opacify in Modops.add_module msb env, opac | SFBmodtype mty -> check_module_type env mty; add_modtype mty env, opac and check_mexpr env opac mse mp_mse res = match mse with | MEident mp -> let mb = lookup_module mp env in let mb = Modops.strengthen_and_subst_mb mb mp_mse false in mb.mod_type, mb.mod_delta | MEapply (f,mp) -> let sign, delta = check_mexpr env opac f mp_mse res in let farg_id, farg_b, fbody_b = Modops.destr_functor sign in let mtb = Modops.module_type_of_module (lookup_module mp env) in let cu = Subtyping.check_subtypes env mtb farg_b in if not (Environ.check_constraints cu env) then CErrors.user_err Pp.(str "Incorrect universe constraints for module subtyping"); let subst = Mod_subst.map_mbid farg_id mp Mod_subst.empty_delta_resolver in Modops.subst_signature subst fbody_b, Mod_subst.subst_codom_delta_resolver subst delta | MEwith _ -> CErrors.user_err Pp.(str "Unsupported 'with' constraint in module implementation") and check_mexpression env opac sign mp_mse res = match sign with | MoreFunctor (arg_id, mtb, body) -> check_module_type env mtb; let env' = Modops.add_module_type (MPbound arg_id) mtb env in let body, delta = check_mexpression env' opac body mp_mse res in MoreFunctor(arg_id,mtb,body), delta | NoFunctor me -> check_mexpr env opac me mp_mse res and check_signature env opac sign mp_mse res opacify = match sign with | MoreFunctor (arg_id, mtb, body) -> check_module_type env mtb; let env' = Modops.add_module_type (MPbound arg_id) mtb env in let body, opac = check_signature env' opac body mp_mse res Cset.empty in MoreFunctor(arg_id,mtb,body), opac | NoFunctor struc -> let (_:env), opac = List.fold_left (fun (env, opac) (lab,mb) -> check_structure_field env opac mp_mse lab res opacify mb) (env, opac) struc in NoFunctor struc, opac let check_module env opac mp mb = check_module env opac mp mb Cset.empty coq-8.15.0/checker/mod_checking.mli000066400000000000000000000016611417001151100171030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Constr.t * unit Opaqueproof.delayed_universes) -> unit val check_module : Environ.env -> Names.Cset.t Names.Cmap.t -> Names.ModPath.t -> Declarations.module_body -> Names.Cset.t Names.Cmap.t coq-8.15.0/checker/safe_checking.ml000066400000000000000000000024251417001151100170700ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Names.Cset.t Names.Cmap.t -> compiled_library -> Univ.ContextSet.t -> vodigest -> safe_environment * Names.Cset.t Names.Cmap.t val unsafe_import : safe_environment -> compiled_library -> Univ.ContextSet.t -> vodigest -> safe_environment coq-8.15.0/checker/validate.ml000066400000000000000000000155431417001151100161150ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Format.print_int i | Ptr p -> let v = LargeArray.get mem p in begin match v with | Struct (tag, data) -> let n = Array.length data in Format.print_string ("#"^string_of_int tag^"("); Format.open_hvbox 0; for i = 0 to n-1 do pr_obj_rec mem (Array.get data i); if i<>n-1 then (Format.print_string ","; Format.print_cut()) done; Format.close_box(); Format.print_string ")" | String s -> Format.print_string ("\""^String.escaped s^"\"") | Int64 _ | Float64 _ -> Format.print_string "?" end | Atm tag -> Format.print_string ("#"^string_of_int tag^"()"); | Fun addr -> Format.printf "fun@%x" addr let pr_obj mem o = pr_obj_rec mem o; Format.print_newline() (**************************************************************************) (* Obj low-level validators *) type error_frame = | CtxAnnot of string | CtxType of string | CtxField of int | CtxTag of int type error_context = error_frame list let mt_ec : error_context = [] let (/) (ctx:error_context) s : error_context = s::ctx exception ValidObjError of string * error_context * data let fail _mem ctx o s = raise (ValidObjError(s,ctx,o)) let is_block mem o = match o with | Ptr _ | Atm _ -> true | Fun _ | Int _ -> false let is_int _mem o = match o with | Int _ -> true | Fun _ | Ptr _ | Atm _ -> false let is_int64 mem o = match o with | Int _ | Fun _ | Atm _ -> false | Ptr p -> match LargeArray.get mem p with | Int64 _ -> true | Float64 _ | Struct _ | String _ -> false let is_float64 mem o = match o with | Int _ | Fun _ | Atm _ -> false | Ptr p -> match LargeArray.get mem p with | Float64 _ -> true | Int64 _ | Struct _ | String _ -> false let get_int _mem = function | Int i -> i | Fun _ | Ptr _ | Atm _ -> assert false let tag mem o = match o with | Atm tag -> tag | Fun _ -> Obj.out_of_heap_tag | Int _ -> Obj.int_tag | Ptr p -> match LargeArray.get mem p with | Struct (tag, _) -> tag | String _ -> Obj.string_tag | Float64 _ -> Obj.double_tag | Int64 _ -> Obj.custom_tag let size mem o = match o with | Atm _ -> 0 | Fun _ | Int _ -> assert false | Ptr p -> match LargeArray.get mem p with | Struct (tag, blk) -> Array.length blk | String _ | Float64 _ | Int64 _ -> assert false let field mem o i = match o with | Atm _ | Fun _ | Int _ -> assert false | Ptr p -> match LargeArray.get mem p with | Struct (tag, blk) -> Array.get blk i | String _ | Float64 _ | Int64 _ -> assert false (* Check that object o is a block with tag t *) let val_tag t mem ctx o = if is_block mem o && tag mem o = t then () else fail mem ctx o ("expected tag "^string_of_int t) let val_block mem ctx o = if is_block mem o then (if tag mem o > Obj.no_scan_tag then fail mem ctx o "block: found no scan tag") else fail mem ctx o "expected block obj" let val_dyn mem ctx o = let fail () = fail mem ctx o "expected a Dyn.t" in if not (is_block mem o) then fail () else if not (size mem o = 2) then fail () else if not (tag mem (field mem o 0) = Obj.int_tag) then fail () else () open Values let rec val_gen v mem ctx o = match v with | Tuple (name,vs) -> val_tuple ~name vs mem ctx o | Sum (name,cc,vv) -> val_sum name cc vv mem ctx o | Array v -> val_array v mem ctx o | List v0 -> val_sum "list" 1 [|[|Annot ("elem",v0);v|]|] mem ctx o | Opt v -> val_sum "option" 1 [|[|v|]|] mem ctx o | Int -> if not (is_int mem o) then fail mem ctx o "expected an int" | String -> (try val_tag Obj.string_tag mem ctx o with Failure _ -> fail mem ctx o "expected a string") | Any -> () | Fail s -> fail mem ctx o ("unexpected object " ^ s) | Annot (s,v) -> val_gen v mem (ctx/CtxAnnot s) o | Dyn -> val_dyn mem ctx o | Proxy { contents = v } -> val_gen v mem ctx o | Int64 -> val_int64 mem ctx o | Float64 -> val_float64 mem ctx o (* Check that an object is a tuple (or a record). vs is an array of value representation for each field. Its size corresponds to the expected size of the object. *) and val_tuple ?name vs mem ctx o = let ctx = match name with | Some n -> ctx/CtxType n | _ -> ctx in let n = Array.length vs in let val_fld i v = val_gen v mem (ctx/(CtxField i)) (field mem o i) in val_block mem ctx o; if size mem o = n then Array.iteri val_fld vs else fail mem ctx o ("tuple size: found "^string_of_int (size mem o)^ ", expected "^string_of_int n) (* Check that the object is either a constant constructor of tag < cc, or a constructed variant. each element of vv is an array of value representations of the constructor arguments. The size of vv corresponds to the number of non-constant constructors, and the size of vv.(i) is the expected arity of the i-th non-constant constructor. *) and val_sum name cc vv mem ctx o = let ctx = ctx/CtxType name in if is_block mem o then (val_block mem ctx o; let n = Array.length vv in let i = tag mem o in let ctx' = if n=1 then ctx else ctx/CtxTag i in if i < n then val_tuple vv.(i) mem ctx' o else fail mem ctx' o ("sum: unexpected tag")) else if is_int mem o then let (n:int) = get_int mem o in (if n<0 || n>=cc then fail mem ctx o ("bad constant constructor "^string_of_int n)) else fail mem ctx o "not a sum" (* Check the o is an array of values satisfying f. *) and val_array v mem ctx o = val_block mem (ctx/CtxType "array") o; for i = 0 to size mem o - 1 do val_gen v mem ctx (field mem o i) done and val_int64 mem ctx o = if not (is_int64 mem o) then fail mem ctx o "not a 63-bit unsigned integer" and val_float64 mem ctx o = if not (is_float64 mem o) then fail mem ctx o "not a 64-bit float" let print_frame = function | CtxType t -> t | CtxAnnot t -> t | CtxField i -> Printf.sprintf "fld=%i" i | CtxTag i -> Printf.sprintf "tag=%i" i let validate v (o, mem) = try val_gen v mem mt_ec o with ValidObjError(msg,ctx,obj) -> let rctx = List.rev_map print_frame ctx in print_endline ("Context: "^String.concat"/"rctx); pr_obj mem obj; failwith ("Validation failed: "^msg^" (in "^(print_frame (List.hd ctx))^")") coq-8.15.0/checker/validate.mli000066400000000000000000000013611417001151100162570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* data * obj LargeArray.t -> unit coq-8.15.0/checker/values.ml000066400000000000000000000302721417001151100156170ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* value) : value = let self = ref Any in let ans = f (Proxy self) in let () = self := ans in ans (** Some pseudo-constructors *) let v_tuple name v = Tuple(name,v) let v_sum name cc vv = Sum(name,cc,vv) let v_enum name n = Sum(name,n,[||]) (** Ocaml standard library *) let v_pair v1 v2 = v_tuple "*" [|v1; v2|] let v_bool = v_enum "bool" 2 let v_unit = v_enum "unit" 1 let v_set v = let rec s = Sum ("Set.t",1, [|[|s; Annot("elem",v); s; Annot("bal",Int)|]|]) in s let v_map vk vd = let rec m = Sum ("Map.t",1, [|[|m; Annot("key",vk); Annot("data",vd); m; Annot("bal",Int)|]|]) in m let v_hset v = v_map Int (v_set v) let v_hmap vk vd = v_map Int (v_map vk vd) let v_pred v = v_pair v_bool (v_set v) (** kernel/names *) let v_id = String let v_dp = Annot ("dirpath", List v_id) let v_name = v_sum "name" 1 [|[|v_id|]|] let v_uid = v_tuple "uniq_ident" [|Int;String;v_dp|] let rec v_mp = Sum("module_path",0, [|[|v_dp|]; [|v_uid|]; [|v_mp;v_id|]|]) let v_kn = v_tuple "kernel_name" [|v_mp;v_id;Int|] let v_cst = v_sum "cst|mind" 0 [|[|v_kn|];[|v_kn;v_kn|]|] let v_ind = v_tuple "inductive" [|v_cst;Int|] let v_cons = v_tuple "constructor" [|v_ind;Int|] (** kernel/univ *) let v_level_global = v_tuple "Level.Global.t" [|v_dp;String;Int|] let v_raw_level = v_sum "raw_level" 3 (* SProp, Prop, Set *) [|(*Level*)[|v_level_global|]; (*Var*)[|Int|]|] let v_level = v_tuple "level" [|Int;v_raw_level|] let v_expr = v_tuple "levelexpr" [|v_level;Int|] let v_univ = List v_expr let v_cstrs = Annot ("Univ.constraints", v_set (v_tuple "univ_constraint" [|v_level;v_enum "order_request" 3;v_level|])) let v_variance = v_enum "variance" 3 let v_instance = Annot ("instance", Array v_level) let v_abs_context = v_tuple "abstract_universe_context" [|Array v_name; v_cstrs|] let v_context_set = v_tuple "universe_context_set" [|v_hset v_level;v_cstrs|] (** kernel/term *) let v_sort = v_sum "sort" 3 (*SProp, Prop, Set*) [|[|v_univ(*Type*)|]|] let v_sortfam = v_enum "sorts_family" 4 let v_relevance = v_sum "relevance" 2 [||] let v_binder_annot x = v_tuple "binder_annot" [|x;v_relevance|] let v_puniverses v = v_tuple "punivs" [|v;v_instance|] let v_boollist = List v_bool let v_caseinfo = let v_cstyle = v_enum "case_style" 5 in let v_cprint = v_tuple "case_printing" [|v_boollist;Array v_boollist;v_cstyle|] in v_tuple "case_info" [|v_ind;Int;Array Int;Array Int;v_relevance;v_cprint|] let v_cast = v_enum "cast_kind" 3 let v_proj_repr = v_tuple "projection_repr" [|v_ind;Int;Int;v_id|] let v_proj = v_tuple "projection" [|v_proj_repr; v_bool|] let v_uint63 = if Sys.word_size == 64 then Int else Int64 let rec v_constr = Sum ("constr",0,[| [|Int|]; (* Rel *) [|v_id|]; (* Var *) [|Fail "Meta"|]; (* Meta *) [|Fail "Evar"|]; (* Evar *) [|v_sort|]; (* Sort *) [|v_constr;v_cast;v_constr|]; (* Cast *) [|v_binder_annot v_name;v_constr;v_constr|]; (* Prod *) [|v_binder_annot v_name;v_constr;v_constr|]; (* Lambda *) [|v_binder_annot v_name;v_constr;v_constr;v_constr|]; (* LetIn *) [|v_constr;Array v_constr|]; (* App *) [|v_puniverses v_cst|]; (* Const *) [|v_puniverses v_ind|]; (* Ind *) [|v_puniverses v_cons|]; (* Construct *) [|v_caseinfo;v_instance; Array v_constr; v_case_return; v_case_invert; v_constr; Array v_case_branch|]; (* Case *) [|v_fix|]; (* Fix *) [|v_cofix|]; (* CoFix *) [|v_proj;v_constr|]; (* Proj *) [|v_uint63|]; (* Int *) [|Float64|]; (* Float *) [|v_instance;Array v_constr;v_constr;v_constr|] (* Array *) |]) and v_prec = Tuple ("prec_declaration", [|Array (v_binder_annot v_name); Array v_constr; Array v_constr|]) and v_fix = Tuple ("pfixpoint", [|Tuple ("fix2",[|Array Int;Int|]);v_prec|]) and v_cofix = Tuple ("pcofixpoint",[|Int;v_prec|]) and v_case_invert = Sum ("case_inversion", 1, [|[|Array v_constr|]|]) and v_case_branch = Tuple ("case_branch", [|Array (v_binder_annot v_name); v_constr|]) and v_case_return = Tuple ("case_return", [|Array (v_binder_annot v_name); v_constr|]) let v_rdecl = v_sum "rel_declaration" 0 [| [|v_binder_annot v_name; v_constr|]; (* LocalAssum *) [|v_binder_annot v_name; v_constr; v_constr|] |] (* LocalDef *) let v_rctxt = List v_rdecl let v_section_ctxt = v_enum "emptylist" 1 (** kernel/mod_subst *) let v_univ_abstracted v = v_tuple "univ_abstracted" [|v;v_abs_context|] let v_delta_hint = v_sum "delta_hint" 0 [|[|Int; Opt (v_univ_abstracted v_constr)|];[|v_kn|]|] let v_resolver = v_tuple "delta_resolver" [|v_map v_mp v_mp; v_hmap v_kn v_delta_hint|] let v_mp_resolver = v_tuple "" [|v_mp;v_resolver|] let v_subst = Annot ("substitution", v_map v_mp v_mp_resolver) (** kernel/lazyconstr *) let v_ndecl = v_sum "named_declaration" 0 [| [|v_binder_annot v_id; v_constr|]; (* LocalAssum *) [|v_binder_annot v_id; v_constr; v_constr|] |] (* LocalDef *) let v_nctxt = List v_ndecl let v_work_list = let v_abstr = v_pair v_instance (Array v_id) in Tuple ("work_list", [|v_hmap v_cst v_abstr; v_hmap v_cst v_abstr|]) let v_abstract = Tuple ("abstract", [| v_nctxt; v_instance; v_abs_context |]) let v_cooking_info = Tuple ("cooking_info", [|v_work_list; v_abstract|]) let v_opaque = v_sum "opaque" 0 [|[|List v_subst; List v_cooking_info; v_dp; Int|]|] (** kernel/declarations *) let v_conv_level = v_sum "conv_level" 2 [|[|Int|]|] let v_oracle = v_tuple "oracle" [| v_map v_id v_conv_level; v_hmap v_cst v_conv_level; v_pred v_id; v_pred v_cst; |] let v_template_arity = v_tuple "template_arity" [|v_univ|] let v_template_universes = v_tuple "template_universes" [|List(Opt v_level);v_context_set|] let v_primitive = v_enum "primitive" 54 (* Number of constructors of the CPrimitives.t type *) let v_cst_def = v_sum "constant_def" 0 [|[|Opt Int|]; [|v_constr|]; [|v_opaque|]; [|v_primitive|]|] let v_typing_flags = v_tuple "typing_flags" [|v_bool; v_bool; v_bool; v_oracle; v_bool; v_bool; v_bool; v_bool; v_bool; v_bool; v_bool; v_bool|] let v_univs = v_sum "universes" 1 [|[|v_abs_context|]|] let v_cb = v_tuple "constant_body" [|v_section_ctxt; v_cst_def; v_constr; v_relevance; Any; v_univs; v_bool; v_typing_flags|] let v_nested = v_sum "nested" 0 [|[|v_ind|] (* NestedInd *);[|v_cst|] (* NestedPrimitive *)|] let v_recarg = v_sum "recarg" 1 (* Norec *) [|[|v_ind|] (* Mrec *);[|v_nested|] (* Nested *)|] let rec v_wfp = Sum ("wf_paths",0, [|[|Int;Int|]; (* Rtree.Param *) [|v_recarg;Array v_wfp|]; (* Rtree.Node *) [|Int;Array v_wfp|] (* Rtree.Rec *) |]) let v_mono_ind_arity = v_tuple "monomorphic_inductive_arity" [|v_constr;v_sort|] let v_ind_arity = v_sum "inductive_arity" 0 [|[|v_mono_ind_arity|];[|v_template_arity|]|] let v_one_ind = v_tuple "one_inductive_body" [|v_id; v_rctxt; v_ind_arity; Array v_id; Array v_constr; Int; Int; v_sortfam; Array (v_pair v_rctxt v_constr); Array Int; Array Int; v_wfp; v_relevance; Int; Int; Any|] let v_finite = v_enum "recursivity_kind" 3 let v_record_info = v_sum "record_info" 2 [| [| Array (v_tuple "record" [| v_id; Array v_id; Array v_relevance; Array v_constr |]) |] |] let v_ind_pack = v_tuple "mutual_inductive_body" [|Array v_one_ind; v_record_info; v_finite; Int; v_section_ctxt; Int; Int; v_rctxt; v_univs; (* universes *) Opt v_template_universes; Opt (Array v_variance); Opt (Array v_variance); Opt v_bool; v_typing_flags|] let v_prim_ind = v_enum "prim_ind" 6 (* Number of "Register ... as kernel.ind_..." in PrimInt63.v and PrimFloat.v *) let v_prim_type = v_enum "prim_type" 3 (* Number of constructors of prim_type in "kernel/cPrimitives.ml" *) let v_retro_action = v_sum "retro_action" 0 [| [|v_prim_ind; v_ind|]; [|v_prim_type; v_cst|]; [|v_cst|]; |] let v_retroknowledge = v_sum "module_retroknowledge" 1 [|[|List v_retro_action|]|] let rec v_mae = Sum ("module_alg_expr",0, [|[|v_mp|]; (* SEBident *) [|v_mae;v_mp|]; (* SEBapply *) [|v_mae; Any|] (* SEBwith *) |]) let rec v_sfb = Sum ("struct_field_body",0, [|[|v_cb|]; (* SFBconst *) [|v_ind_pack|]; (* SFBmind *) [|v_module|]; (* SFBmodule *) [|v_modtype|] (* SFBmodtype *) |]) and v_struc = List (Tuple ("label*sfb",[|v_id;v_sfb|])) and v_sign = Sum ("module_sign",0, [|[|v_struc|]; (* NoFunctor *) [|v_uid;v_modtype;v_sign|]|]) (* MoreFunctor *) and v_mexpr = Sum ("module_expr",0, [|[|v_mae|]; (* NoFunctor *) [|v_uid;v_modtype;v_mexpr|]|]) (* MoreFunctor *) and v_impl = Sum ("module_impl",2, (* Abstract, FullStruct *) [|[|v_mexpr|]; (* Algebraic *) [|v_sign|]|]) (* Struct *) and v_noimpl = v_unit and v_module = Tuple ("module_body", [|v_mp;v_impl;v_sign;Opt v_mexpr;v_resolver;v_retroknowledge|]) and v_modtype = Tuple ("module_type_body", [|v_mp;v_noimpl;v_sign;Opt v_mexpr;v_resolver;v_unit|]) (** kernel/safe_typing *) let v_vodigest = Sum ("module_impl",0, [| [|String|]; [|String;String|] |]) let v_deps = Array (v_tuple "dep" [|v_dp;v_vodigest|]) let v_compiled_lib = v_tuple "compiled" [|v_dp;v_module;v_context_set;v_deps|] (** Library objects *) let v_obj = Dyn let v_open_filter = Sum ("open_filter",1,[|[|v_pred String|]|]) let rec v_aobjs = Sum("algebraic_objects", 0, [| [|v_libobjs|]; [|v_mp;v_subst|] |]) and v_substobjs = Tuple("*", [|List v_uid;v_aobjs|]) and v_libobjt = Sum("Libobject.t",0, [| [| v_substobjs |]; [| v_substobjs |]; [| v_aobjs |]; [| v_libobjs |]; [| List (v_pair v_open_filter v_mp)|]; [| v_obj |] |]) and v_libobj = Tuple ("libobj", [|v_id;v_libobjt|]) and v_libobjs = List v_libobj let v_libraryobjs = Tuple ("library_objects",[|v_libobjs;v_libobjs|]) (** STM objects *) let v_frozen = Tuple ("frozen", [|List (v_pair Int Dyn); Opt Dyn|]) let v_states = v_pair Any v_frozen let v_state = Tuple ("state", [|v_states; Any; v_bool|]) let v_vcs = let vcs self = Tuple ("vcs", [|Any; Any; Tuple ("dag", [|Any; Any; v_map Any (Tuple ("state_info", [|Any; Any; Opt v_state; v_pair (Opt self) Any|])) |]) |]) in fix vcs let v_uuid = Any let v_request id doc = Tuple ("request", [|Any; Any; doc; Any; id; String|]) let v_tasks = List (v_pair (v_request v_uuid v_vcs) v_bool) let v_counters = Any let v_stm_seg = v_pair v_tasks v_counters (** Toplevel structures in a vo (see Cic.mli) *) let v_libsum = Tuple ("summary", [|v_dp;v_deps;String|]) let v_lib = Tuple ("library",[|v_compiled_lib;v_libraryobjs|]) let v_delayed_universes = Sum ("delayed_universes", 0, [| [| v_unit |]; [| Int; v_context_set |] |]) let v_opaquetable = Array (Opt (v_pair v_constr v_delayed_universes)) let v_univopaques = Opt (Tuple ("univopaques",[|v_context_set;v_bool|])) coq-8.15.0/checker/values.mli000066400000000000000000000031151417001151100157640ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* quit () in if l = "u" then None else if l = "x" then quit () else match int_of_string l with | v -> if v < 0 || v >= max then let () = Printf.printf "Out-of-range input! (only %d children)\n%!" max in read_num max else Some v | exception Failure _ -> Printf.printf "Unrecognized input! enters the -th child, u goes up 1 level, x exits\n%!"; read_num max type 'a repr = | INT of int | STRING of string | BLOCK of int * 'a array | OTHER module type S = sig type obj val input : in_channel -> obj val repr : obj -> obj repr val size : obj -> int val oid : obj -> int option end module ReprObj : S = struct type obj = Obj.t * int list let input chan = let obj = input_value chan in let () = CObj.register_shared_size obj in (obj, []) let repr (obj, pos) = if Obj.is_block obj then let tag = Obj.tag obj in if tag = Obj.string_tag then STRING (Obj.magic obj) else if tag < Obj.no_scan_tag then let init i = (Obj.field obj i, i :: pos) in let data = Array.init (Obj.size obj) init in BLOCK (tag, Obj.magic data) else OTHER else INT (Obj.magic obj) let size (_, p) = CObj.shared_size_of_pos p let oid _ = None end module ReprMem : S = struct open Analyze type obj = data let memory = ref LargeArray.empty let sizes = ref LargeArray.empty (** size, in words *) let ws = Sys.word_size / 8 let rec init_size seen k = function | Int _ | Atm _ | Fun _ -> k 0 | Ptr p -> if LargeArray.get seen p then k 0 else let () = LargeArray.set seen p true in match LargeArray.get !memory p with | Struct (tag, os) -> let len = Array.length os in let rec fold i accu k = if i == len then k accu else init_size seen (fun n -> fold (succ i) (accu + 1 + n) k) os.(i) in fold 0 1 (fun size -> let () = LargeArray.set !sizes p size in k size) | Int64 _ -> k 0 | Float64 _ -> k 0 | String s -> let size = 2 + (String.length s / ws) in let () = LargeArray.set !sizes p size in k size let size = function | Int _ | Atm _ | Fun _ -> 0 | Ptr p -> LargeArray.get !sizes p let repr = function | Int i -> INT i | Atm t -> BLOCK (t, [||]) | Fun _ -> OTHER | Ptr p -> match LargeArray.get !memory p with | Struct (tag, os) -> BLOCK (tag, os) | Int64 _ -> OTHER (* TODO: pretty-print int63 values *) | Float64 _ -> OTHER (* TODO: pretty-print float64 values *) | String s -> STRING s let input ch = let obj, mem = parse_channel ch in let () = memory := mem in let () = sizes := LargeArray.make (LargeArray.length mem) (-1) in let seen = LargeArray.make (LargeArray.length mem) false in let () = init_size seen ignore obj in obj let oid = function | Int _ | Atm _ | Fun _ -> None | Ptr p -> Some p end module Visit (Repr : S) : sig val init : unit -> unit val visit : Values.value -> Repr.obj -> int list -> unit end = struct (** Name of a value *) let rec get_name ?(extra=false) = function |Any -> "?" |Fail s -> "Invalid node: "^s |Tuple (name,_) -> name |Sum (name,_,_) -> name |Array v -> "array"^(if extra then "/"^get_name ~extra v else "") |List v -> "list"^(if extra then "/"^get_name ~extra v else "") |Opt v -> "option"^(if extra then "/"^get_name ~extra v else "") |Int -> "int" |String -> "string" |Annot (s,v) -> s^"/"^get_name ~extra v |Dyn -> "" | Proxy v -> get_name ~extra !v | Int64 -> "Int64" | Float64 -> "Float64" (** For tuples, its quite handy to display the inner 1st string (if any). Cf. [structure_body] for instance *) exception TupleString of string let get_string_in_tuple o = try for i = 0 to Array.length o - 1 do match Repr.repr o.(i) with | STRING s -> let len = min max_string_length (String.length s) in raise (TupleString (Printf.sprintf " [..%s..]" (String.sub s 0 len))) | _ -> () done; "" with TupleString s -> s (** Some details : tags, integer value for non-block, etc etc *) let rec get_details v o = match v, Repr.repr o with | (String | Any), STRING s -> let len = min max_string_length (String.length s) in Printf.sprintf " [%s]" (String.escaped (String.sub s 0 len)) |Tuple (_,v), BLOCK (_, o) -> get_string_in_tuple o |(Sum _|Any), BLOCK (tag, _) -> Printf.sprintf " [tag=%i]" tag |(Sum _|Any), INT i -> Printf.sprintf " [imm=%i]" i |Int, INT i -> Printf.sprintf " [imm=%i]" i |Annot (s,v), _ -> get_details v o |_ -> "" let get_oid obj = match Repr.oid obj with | None -> "" | Some id -> Printf.sprintf " [0x%08x]" id let node_info (v,o,p) = get_name ~extra:true v ^ get_details v o ^ " (size "^ string_of_int (Repr.size o)^"w)" ^ get_oid o (** Children of a block : type, object, position. For lists, we collect all elements of the list at once *) let access_children vs os pos = if Array.length os = Array.length vs then Array.mapi (fun i v -> v, os.(i), i::pos) vs else raise Exit let access_list v o pos = let rec loop o pos accu = match Repr.repr o with | INT 0 -> List.rev accu | BLOCK (0, [|hd; tl|]) -> loop tl (1 :: pos) ((v, hd, 0 :: pos) :: accu) | _ -> raise Exit in Array.of_list (loop o pos []) let access_block o = match Repr.repr o with | BLOCK (tag, os) -> (tag, os) | _ -> raise Exit (** raises Exit if the object has not the expected structure *) exception Forbidden let rec get_children v o pos = match v with |Tuple (_, v) -> let (_, os) = access_block o in access_children v os pos |Sum (_, _, vv) -> begin match Repr.repr o with | BLOCK (tag, os) -> access_children vv.(tag) os pos | INT _ -> [||] | _ -> raise Exit end |Array v -> let (_, os) = access_block o in access_children (Array.make (Array.length os) v) os pos |List v -> access_list v o pos |Opt v -> begin match Repr.repr o with | INT 0 -> [||] | BLOCK (0, [|x|]) -> [|(v, x, 0 :: pos)|] | _ -> raise Exit end | String -> begin match Repr.repr o with | STRING _ -> [||] | _ -> raise Exit end | Int -> begin match Repr.repr o with | INT _ -> [||] | _ -> raise Exit end |Annot (s,v) -> get_children v o pos |Any -> raise Exit |Dyn -> begin match Repr.repr o with | BLOCK (0, [|id; o|]) -> let tpe = Any in [|(Int, id, 0 :: pos); (tpe, o, 1 :: pos)|] | _ -> raise Exit end |Fail s -> raise Forbidden | Proxy v -> get_children !v o pos | Int64 -> raise Exit | Float64 -> raise Exit let get_children v o pos = try get_children v o pos with Exit -> match Repr.repr o with | BLOCK (_, os) -> Array.mapi (fun i o -> Any, o, i :: pos) os | _ -> [||] type info = { nam : string; typ : value; obj : Repr.obj; pos : int list } let stk = ref ([] : info list) let init () = stk := [] let push name v o p = stk := { nam = name; typ = v; obj = o; pos = p } :: !stk exception EmptyStack let pop () = match !stk with | i::s -> stk := s; i | _ -> raise EmptyStack let rec visit v o pos = Printf.printf "\nDepth %d Pos %s Context %s\n" (List.length !stk) (String.concat "." (List.rev_map string_of_int pos)) (String.concat "/" (List.rev_map (fun i -> i.nam) !stk)); Printf.printf "-------------\n"; let children = get_children v o pos in let nchild = Array.length children in Printf.printf "Here: %s, %d child%s\n" (node_info (v,o,pos)) nchild (if nchild = 0 then "" else "ren:"); Array.iteri (fun i vop -> Printf.printf " %d: %s\n" i (node_info vop)) children; Printf.printf "-------------\n"; try match read_num (Array.length children) with | None -> let info = pop () in visit info.typ info.obj info.pos | Some child -> let v',o',pos' = children.(child) in push (get_name v) v o pos; visit v' o' pos' with | EmptyStack -> () | Forbidden -> let info = pop () in visit info.typ info.obj info.pos | Failure _ | Invalid_argument _ -> visit v o pos end (** Loading the vo *) type header = { magic : string; (** Magic number of the marshaller *) length : int; (** Size on disk in bytes *) size32 : int; (** Size in words when loaded on 32-bit systems *) size64 : int; (** Size in words when loaded on 64-bit systems *) objects : int; (** Number of blocks defined in the marshalled structure *) } let dummy_header = { magic = "\000\000\000\000"; length = 0; size32 = 0; size64 = 0; objects = 0; } let parse_header chan = let magic = really_input_string chan 4 in let length = input_binary_int chan in let objects = input_binary_int chan in let size32 = input_binary_int chan in let size64 = input_binary_int chan in { magic; length; size32; size64; objects } module ObjFile = struct type segment = { name : string; pos : int64; len : int64; hash : Digest.t; mutable header : header; } let input_int32 ch = let accu = ref 0l in for _i = 0 to 3 do let c = input_byte ch in accu := Int32.add (Int32.shift_left !accu 8) (Int32.of_int c) done; !accu let input_int64 ch = let accu = ref 0L in for _i = 0 to 7 do let c = input_byte ch in accu := Int64.add (Int64.shift_left !accu 8) (Int64.of_int c) done; !accu let input_segment_summary ch = let nlen = input_int32 ch in let name = really_input_string ch (Int32.to_int nlen) in let pos = input_int64 ch in let len = input_int64 ch in let hash = Digest.input ch in { name; pos; len; hash; header = dummy_header } let rec input_segment_summaries ch n accu = if Int32.equal n 0l then Array.of_list (List.rev accu) else let s = input_segment_summary ch in let accu = s :: accu in input_segment_summaries ch (Int32.pred n) accu let parse_segments ch = let magic = input_int32 ch in let version = input_int32 ch in let summary_pos = input_int64 ch in let () = LargeFile.seek_in ch summary_pos in let nsum = input_int32 ch in let seg = input_segment_summaries ch nsum [] in for i = 0 to Array.length seg - 1 do let () = LargeFile.seek_in ch seg.(i).pos in let header = parse_header ch in seg.(i).header <- header done; (magic, version, seg) end let visit_vo f = Printf.printf "\nWelcome to votour !\n"; Printf.printf "Enjoy your guided tour of a Coq .vo or .vi file\n"; Printf.printf "Object sizes are in words (%d bits)\n" Sys.word_size; Printf.printf "At prompt, enters the -th child, u goes up 1 level, x exits\n\n%!"; let known_segments = [ "summary", Values.v_libsum; "library", Values.v_lib; "universes", Values.v_univopaques; "tasks", (Opt Values.v_stm_seg); "opaques", Values.v_opaquetable; ] in let repr = if Sys.word_size = 64 then (module ReprMem : S) else (module ReprObj : S) (* On 32-bit machines, representation may exceed the max size of arrays *) in let module Repr = (val repr : S) in let module Visit = Visit(Repr) in while true do let ch = open_in_bin f in let (_magic, version, segments) = ObjFile.parse_segments ch in Printf.printf "File format: %ld\n%!" version; Printf.printf "The file has %d segments, choose the one to visit:\n" (Array.length segments); Array.iteri (fun i ObjFile.{ name; pos; header } -> let size = if Sys.word_size = 64 then header.size64 else header.size32 in Printf.printf " %d: %s, starting at byte %Ld (size %iw)\n" i name pos size) segments; match read_num (Array.length segments) with | Some seg -> let seg = segments.(seg) in let open ObjFile in LargeFile.seek_in ch seg.pos; let o = Repr.input ch in let () = Visit.init () in let typ = try List.assoc seg.name known_segments with Not_found -> Any in Visit.visit typ o [] | None -> () done let () = if not !Sys.interactive then Arg.parse [] visit_vo ("votour: guided tour of a Coq .vo or .vi file\n"^ "Usage: votour file.v[oi]") coq-8.15.0/checker/votour.mli000066400000000000000000000014501417001151100160230ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a -> int) -> 'a array -> 'a array -> int val equal : ('a -> 'a -> bool) -> 'a array -> 'a array -> bool val equal_norefl : ('a -> 'a -> bool) -> 'a array -> 'a array -> bool val is_empty : 'a array -> bool val exists2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool val for_all2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool val for_all3 : ('a -> 'b -> 'c -> bool) -> 'a array -> 'b array -> 'c array -> bool val for_all4 : ('a -> 'b -> 'c -> 'd -> bool) -> 'a array -> 'b array -> 'c array -> 'd array -> bool val for_all_i : (int -> 'a -> bool) -> int -> 'a array -> bool val findi : (int -> 'a -> bool) -> 'a array -> int option val hd : 'a array -> 'a val tl : 'a array -> 'a array val last : 'a array -> 'a val cons : 'a -> 'a array -> 'a array val rev : 'a array -> unit val fold_right_i : (int -> 'b -> 'a -> 'a) -> 'b array -> 'a -> 'a val fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b array -> 'a val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a array -> 'b array -> 'c -> 'c val fold_right3 : ('a -> 'b -> 'c -> 'd -> 'd) -> 'a array -> 'b array -> 'c array -> 'd -> 'd val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b array -> 'c array -> 'a val fold_left3 : ('a -> 'b -> 'c -> 'd -> 'a) -> 'a -> 'b array -> 'c array -> 'd array -> 'a val fold_left4 : ('a -> 'b -> 'c -> 'd -> 'e -> 'a) -> 'a -> 'b array -> 'c array -> 'd array -> 'e array -> 'a val fold_left2_i : (int -> 'a -> 'b -> 'c -> 'a) -> 'a -> 'b array -> 'c array -> 'a val fold_left_from : int -> ('a -> 'b -> 'a) -> 'a -> 'b array -> 'a val map_to_list : ('a -> 'b) -> 'a array -> 'b list val map_of_list : ('a -> 'b) -> 'a list -> 'b array val chop : int -> 'a array -> 'a array * 'a array val split : ('a * 'b) array -> 'a array * 'b array val map2_i : (int -> 'a -> 'b -> 'c) -> 'a array -> 'b array -> 'c array val map3 : ('a -> 'b -> 'c -> 'd) -> 'a array -> 'b array -> 'c array -> 'd array val map3_i : (int -> 'a -> 'b -> 'c -> 'd) -> 'a array -> 'b array -> 'c array -> 'd array val map_left : ('a -> 'b) -> 'a array -> 'b array val iter2_i : (int -> 'a -> 'b -> unit) -> 'a array -> 'b array -> unit val iter3 : ('a -> 'b -> 'c -> unit) -> 'a array -> 'b array -> 'c array -> unit val fold_left_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b array -> 'a * 'c array val fold_right_map : ('a -> 'c -> 'b * 'c) -> 'a array -> 'c -> 'b array * 'c val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b array -> 'c array -> 'a * 'd array val fold_left2_map_i : (int -> 'a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b array -> 'c array -> 'a * 'd array val fold_right2_map : ('a -> 'b -> 'c -> 'd * 'c) -> 'a array -> 'b array -> 'c -> 'd array * 'c val distinct : 'a array -> bool val rev_of_list : 'a list -> 'a array val rev_to_list : 'a array -> 'a list val filter_with : bool list -> 'a array -> 'a array module Smart : sig val map : ('a -> 'a) -> 'a array -> 'a array val map_i : (int -> 'a -> 'a) -> 'a array -> 'a array val map2 : ('a -> 'b -> 'b) -> 'a array -> 'b array -> 'b array val fold_left_map : ('a -> 'b -> 'a * 'b) -> 'a -> 'b array -> 'a * 'b array val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'c) -> 'a -> 'b array -> 'c array -> 'a * 'c array end module Fun1 : sig val map : ('r -> 'a -> 'b) -> 'r -> 'a array -> 'b array val iter : ('r -> 'a -> unit) -> 'r -> 'a array -> unit val iter2 : ('r -> 'a -> 'b -> unit) -> 'r -> 'a array -> 'b array -> unit module Smart : sig val map : ('r -> 'a -> 'a) -> 'r -> 'a array -> 'a array end end end include Array let uget = Array.unsafe_get (* Arrays *) let compare cmp v1 v2 = if v1 == v2 then 0 else let len = Array.length v1 in let c = Int.compare len (Array.length v2) in if c <> 0 then c else let rec loop i = if i < 0 then 0 else let x = uget v1 i in let y = uget v2 i in let c = cmp x y in if c <> 0 then c else loop (i - 1) in loop (len - 1) let equal_norefl cmp t1 t2 = let len = Array.length t1 in if not (Int.equal len (Array.length t2)) then false else let rec aux i = if i < 0 then true else let x = uget t1 i in let y = uget t2 i in cmp x y && aux (pred i) in aux (len - 1) let equal cmp t1 t2 = if t1 == t2 then true else equal_norefl cmp t1 t2 let is_empty array = Int.equal (Array.length array) 0 let exists2 f v1 v2 = let rec exrec = function | -1 -> false | n -> f (uget v1 n) (uget v2 n) || (exrec (n-1)) in let lv1 = Array.length v1 in lv1 = Array.length v2 && exrec (lv1-1) let for_all2 f v1 v2 = let rec allrec = function | -1 -> true | n -> let ans = f (uget v1 n) (uget v2 n) in ans && (allrec (n-1)) in let lv1 = Array.length v1 in lv1 = Array.length v2 && allrec (pred lv1) let for_all3 f v1 v2 v3 = let rec allrec = function | -1 -> true | n -> let ans = f (uget v1 n) (uget v2 n) (uget v3 n) in ans && (allrec (n-1)) in let lv1 = Array.length v1 in lv1 = Array.length v2 && lv1 = Array.length v3 && allrec (pred lv1) let for_all4 f v1 v2 v3 v4 = let rec allrec = function | -1 -> true | n -> let ans = f (uget v1 n) (uget v2 n) (uget v3 n) (uget v4 n) in ans && (allrec (n-1)) in let lv1 = Array.length v1 in lv1 = Array.length v2 && lv1 = Array.length v3 && lv1 = Array.length v4 && allrec (pred lv1) let for_all_i f i v = let len = Array.length v in let rec allrec i n = n = len || f i (uget v n) && allrec (i+1) (n+1) in allrec i 0 exception Found of int let findi (pred: int -> 'a -> bool) (arr: 'a array) : int option = try for i=0 to Array.length arr - 1 do if pred i (uget arr i) then raise (Found i) done; None with Found i -> Some i let hd v = match Array.length v with | 0 -> failwith "Array.hd" | _ -> uget v 0 let tl v = match Array.length v with | 0 -> failwith "Array.tl" | n -> Array.sub v 1 (pred n) let last v = match Array.length v with | 0 -> failwith "Array.last" | n -> uget v (pred n) let cons e v = let len = Array.length v in let ans = Array.make (Array.length v + 1) e in let () = Array.blit v 0 ans 1 len in ans let rev t = let n=Array.length t in if n <=0 then () else for i = 0 to pred (n/2) do let tmp = uget t ((pred n)-i) in Array.unsafe_set t ((pred n)-i) (uget t i); Array.unsafe_set t i tmp done let fold_right_i f v a = let rec fold a n = if n=0 then a else let k = n-1 in fold (f k (uget v k) a) k in fold a (Array.length v) let fold_left_i f v a = let n = Array.length a in let rec fold i v = if i = n then v else fold (succ i) (f i v (uget a i)) in fold 0 v let fold_right2 f v1 v2 a = let lv1 = Array.length v1 in let rec fold a n = if n=0 then a else let k = n-1 in fold (f (uget v1 k) (uget v2 k) a) k in if Array.length v2 <> lv1 then invalid_arg "Array.fold_right2"; fold a lv1 let fold_left2 f a v1 v2 = let lv1 = Array.length v1 in let rec fold a n = if n >= lv1 then a else fold (f a (uget v1 n) (uget v2 n)) (succ n) in if Array.length v2 <> lv1 then invalid_arg "Array.fold_left2"; fold a 0 let fold_left2_i f a v1 v2 = let lv1 = Array.length v1 in let rec fold a n = if n >= lv1 then a else fold (f n a (uget v1 n) (uget v2 n)) (succ n) in if Array.length v2 <> lv1 then invalid_arg "Array.fold_left2_i"; fold a 0 let fold_right3 f v1 v2 v3 a = let lv1 = Array.length v1 in let rec fold a n = if n=0 then a else let k = n-1 in fold (f (uget v1 k) (uget v2 k) (uget v3 k) a) k in if Array.length v2 <> lv1 || Array.length v3 <> lv1 then invalid_arg "Array.fold_right3"; fold a lv1 let fold_left3 f a v1 v2 v3 = let lv1 = Array.length v1 in let rec fold a n = if n >= lv1 then a else fold (f a (uget v1 n) (uget v2 n) (uget v3 n)) (succ n) in if Array.length v2 <> lv1 || Array.length v3 <> lv1 then invalid_arg "Array.fold_left3"; fold a 0 let fold_left4 f a v1 v2 v3 v4 = let lv1 = Array.length v1 in let rec fold a n = if n >= lv1 then a else fold (f a (uget v1 n) (uget v2 n) (uget v3 n) (uget v4 n)) (succ n) in if Array.length v2 <> lv1 || Array.length v3 <> lv1 || Array.length v4 <> lv1 then invalid_arg "Array.fold_left4"; fold a 0 let fold_left_from n f a v = let len = Array.length v in let () = if n < 0 then invalid_arg "Array.fold_left_from" in let rec fold a n = if n >= len then a else fold (f a (uget v n)) (succ n) in fold a n let rev_of_list = function | [] -> [| |] | x :: l -> let len = List.length l in let ans = Array.make (succ len) x in let rec set i = function | [] -> () | x :: l -> Array.unsafe_set ans i x; set (pred i) l in let () = set (len - 1) l in ans let map_to_list = CList.map_of_array let map_of_list f l = let len = List.length l in let rec fill i v = function | [] -> () | x :: l -> Array.unsafe_set v i (f x); fill (succ i) v l in match l with | [] -> [||] | x :: l -> let ans = Array.make len (f x) in let () = fill 1 ans l in ans let chop n v = let vlen = Array.length v in if n > vlen then failwith "Array.chop"; (Array.sub v 0 n, Array.sub v n (vlen-n)) let split v = (Array.map fst v, Array.map snd v) let map2_i f v1 v2 = let len1 = Array.length v1 in let len2 = Array.length v2 in let () = if not (Int.equal len1 len2) then invalid_arg "Array.map2" in if Int.equal len1 0 then [| |] else begin let res = Array.make len1 (f 0 (uget v1 0) (uget v2 0)) in for i = 1 to pred len1 do Array.unsafe_set res i (f i (uget v1 i) (uget v2 i)) done; res end let map3 f v1 v2 v3 = let len1 = Array.length v1 in let () = if len1 <> Array.length v2 || len1 <> Array.length v3 then invalid_arg "Array.map3" in if Int.equal len1 0 then [| |] else begin let res = Array.make len1 (f (uget v1 0) (uget v2 0) (uget v3 0)) in for i = 1 to pred len1 do Array.unsafe_set res i (f (uget v1 i) (uget v2 i) (uget v3 i)) done; res end let map3_i f v1 v2 v3 = let len1 = Array.length v1 in let len2 = Array.length v2 in let len3 = Array.length v3 in let () = if not (Int.equal len1 len2 && Int.equal len1 len3) then invalid_arg "Array.map3_i" in if Int.equal len1 0 then [| |] else begin let res = Array.make len1 (f 0 (uget v1 0) (uget v2 0) (uget v3 0)) in for i = 1 to pred len1 do Array.unsafe_set res i (f i (uget v1 i) (uget v2 i) (uget v3 i)) done; res end let map_left f a = (* Ocaml does not guarantee Array.map is LR *) let l = Array.length a in (* (even if so), then we rewrite it *) if Int.equal l 0 then [||] else begin let r = Array.make l (f (uget a 0)) in for i = 1 to l - 1 do Array.unsafe_set r i (f (uget a i)) done; r end let iter2_i f v1 v2 = let len1 = Array.length v1 in let len2 = Array.length v2 in let () = if not (Int.equal len2 len1) then invalid_arg "Array.iter2" in for i = 0 to len1 - 1 do f i (uget v1 i) (uget v2 i) done let iter3 f v1 v2 v3 = let len1 = Array.length v1 in let len2 = Array.length v2 in let len3 = Array.length v3 in let () = if not (Int.equal len2 len1) || not (Int.equal len1 len3) then invalid_arg "Array.iter3" in for i = 0 to len1 - 1 do f (uget v1 i) (uget v2 i) (uget v3 i) done let map_right f a = let l = length a in if l = 0 then [||] else begin let r = Array.make l (f (unsafe_get a (l-1))) in for i = l-2 downto 0 do unsafe_set r i (f (unsafe_get a i)) done; r end let map2_right f a b = let l = length a in if l <> length b then invalid_arg "CArray.map2_right: length mismatch"; if l = 0 then [||] else begin let r = Array.make l (f (unsafe_get a (l-1)) (unsafe_get b (l-1))) in for i = l-2 downto 0 do unsafe_set r i (f (unsafe_get a i) (unsafe_get b i)) done; r end let fold_right_map f v e = let e' = ref e in let v' = map_right (fun x -> let (y,e) = f x !e' in e' := e; y) v in (v',!e') let fold_left_map f e v = let e' = ref e in let v' = Array.map (fun x -> let (e,y) = f !e' x in e' := e; y) v in (!e',v') let fold_right2_map f v1 v2 e = let e' = ref e in let v' = map2_right (fun x1 x2 -> let (y,e) = f x1 x2 !e' in e' := e; y) v1 v2 in (v',!e') let fold_left2_map f e v1 v2 = let e' = ref e in let v' = map2 (fun x1 x2 -> let (e,y) = f !e' x1 x2 in e' := e; y) v1 v2 in (!e',v') let fold_left2_map_i f e v1 v2 = let e' = ref e in let v' = map2_i (fun idx x1 x2 -> let (e,y) = f idx !e' x1 x2 in e' := e; y) v1 v2 in (!e',v') let distinct v = let visited = Hashtbl.create 23 in try Array.iter (fun x -> if Hashtbl.mem visited x then raise Exit else Hashtbl.add visited x x) v; true with Exit -> false let rev_to_list a = let rec tolist i res = if i >= Array.length a then res else tolist (i+1) (uget a i :: res) in tolist 0 [] let filter_with filter v = Array.of_list (CList.filter_with filter (Array.to_list v)) module Smart = struct (* If none of the elements is changed by f we return ar itself. The while loop looks for the first such an element. If found, we break here and the new array is produced, but f is not re-applied to elements that are already checked *) let map f (ar : 'a array) = let len = Array.length ar in let i = ref 0 in let break = ref true in let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let v' = f v in if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin (* The array is not the same as the original one *) let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ans !i in let v' = f v in if v != v' then Array.unsafe_set ans !i v'; incr i done; ans end else ar (* Same as map_i but smart *) let map_i f (ar : 'a array) = let len = Array.length ar in let i = ref 0 in let break = ref true in let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let v' = f !i v in if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin (* The array is not the same as the original one *) let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ans !i in let v' = f !i v in if v != v' then Array.unsafe_set ans !i v'; incr i done; ans end else ar let map2 f aux_ar ar = let len = Array.length ar in let aux_len = Array.length aux_ar in let () = if not (Int.equal len aux_len) then invalid_arg "Array.Smart.map2" in let i = ref 0 in let break = ref true in let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let w = Array.unsafe_get aux_ar !i in let v' = f w v in if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin (* The array is not the same as the original one *) let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ans !i in let w = Array.unsafe_get aux_ar !i in let v' = f w v in if v != v' then Array.unsafe_set ans !i v'; incr i done; ans end else ar (** Same as [Smart.map] but threads a state meanwhile *) let fold_left_map f accu (ar : 'a array) = let len = Array.length ar in let i = ref 0 in let break = ref true in let r = ref accu in (* This variable is never accessed unset *) let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let (accu, v') = f !r v in r := accu; if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ar !i in let (accu, v') = f !r v in r := accu; if v != v' then Array.unsafe_set ans !i v'; incr i done; !r, ans end else !r, ar (** Same as [Smart.map2] but threads a state meanwhile *) let fold_left2_map f accu aux_ar ar = let len = Array.length ar in let aux_len = Array.length aux_ar in let () = if not (Int.equal len aux_len) then invalid_arg "Array.Smart.fold_left2_map" in let i = ref 0 in let break = ref true in let r = ref accu in (* This variable is never accessed unset *) let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let w = Array.unsafe_get aux_ar !i in let (accu, v') = f !r w v in r := accu; if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ar !i in let w = Array.unsafe_get aux_ar !i in let (accu, v') = f !r w v in r := accu; if v != v' then Array.unsafe_set ans !i v'; incr i done; !r, ans end else !r, ar end module Fun1 = struct let map f arg v = match v with | [| |] -> [| |] | _ -> let len = Array.length v in let x0 = Array.unsafe_get v 0 in let ans = Array.make len (f arg x0) in for i = 1 to pred len do let x = Array.unsafe_get v i in Array.unsafe_set ans i (f arg x) done; ans let iter f arg v = let len = Array.length v in for i = 0 to pred len do let x = uget v i in f arg x done let iter2 f arg v1 v2 = let len1 = Array.length v1 in let len2 = Array.length v2 in let () = if not (Int.equal len2 len1) then invalid_arg "Array.Fun1.iter2" in for i = 0 to pred len1 do let x1 = uget v1 i in let x2 = uget v2 i in f arg x1 x2 done module Smart = struct let map f arg (ar : 'a array) = let len = Array.length ar in let i = ref 0 in let break = ref true in let temp = ref None in while !break && (!i < len) do let v = Array.unsafe_get ar !i in let v' = f arg v in if v == v' then incr i else begin break := false; temp := Some v'; end done; if !i < len then begin (* The array is not the same as the original one *) let ans : 'a array = Array.copy ar in let v = match !temp with None -> assert false | Some x -> x in Array.unsafe_set ans !i v; incr i; while !i < len do let v = Array.unsafe_get ans !i in let v' = f arg v in if v != v' then Array.unsafe_set ans !i v'; incr i done; ans end else ar end end coq-8.15.0/clib/cArray.mli000066400000000000000000000166551417001151100152300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a -> int) -> 'a array -> 'a array -> int (** First size comparison, then lexicographic order. *) val equal : ('a -> 'a -> bool) -> 'a array -> 'a array -> bool (** Lift equality to array type. *) val equal_norefl : ('a -> 'a -> bool) -> 'a array -> 'a array -> bool (** Like {!equal} but does not assume that equality is reflexive: no optimisation is performed if both arrays are physically the same. *) val is_empty : 'a array -> bool (** True whenever the array is empty. *) val exists2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool val for_all2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool val for_all3 : ('a -> 'b -> 'c -> bool) -> 'a array -> 'b array -> 'c array -> bool val for_all4 : ('a -> 'b -> 'c -> 'd -> bool) -> 'a array -> 'b array -> 'c array -> 'd array -> bool val for_all_i : (int -> 'a -> bool) -> int -> 'a array -> bool val findi : (int -> 'a -> bool) -> 'a array -> int option val hd : 'a array -> 'a (** First element of an array, or [Failure "Array.hd"] if empty. *) val tl : 'a array -> 'a array (** Remaining part of [hd], or [Failure "Array.tl"] if empty. *) val last : 'a array -> 'a (** Last element of an array, or [Failure "Array.last"] if empty. *) val cons : 'a -> 'a array -> 'a array (** Append an element on the left. *) val rev : 'a array -> unit (** In place reversal. *) val fold_right_i : (int -> 'b -> 'a -> 'a) -> 'b array -> 'a -> 'a val fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b array -> 'a val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a array -> 'b array -> 'c -> 'c val fold_right3 : ('a -> 'b -> 'c -> 'd -> 'd) -> 'a array -> 'b array -> 'c array -> 'd -> 'd val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b array -> 'c array -> 'a val fold_left3 : ('a -> 'b -> 'c -> 'd -> 'a) -> 'a -> 'b array -> 'c array -> 'd array -> 'a val fold_left4 : ('a -> 'b -> 'c -> 'd -> 'e -> 'a) -> 'a -> 'b array -> 'c array -> 'd array -> 'e array -> 'a val fold_left2_i : (int -> 'a -> 'b -> 'c -> 'a) -> 'a -> 'b array -> 'c array -> 'a val fold_left_from : int -> ('a -> 'b -> 'a) -> 'a -> 'b array -> 'a val map_to_list : ('a -> 'b) -> 'a array -> 'b list (** Composition of [map] and [to_list]. *) val map_of_list : ('a -> 'b) -> 'a list -> 'b array (** Composition of [map] and [of_list]. *) val chop : int -> 'a array -> 'a array * 'a array (** [chop i a] returns [(a1, a2)] s.t. [a = a1 + a2] and [length a1 = n]. Raise [Failure "Array.chop"] if [i] is not a valid index. *) val split : ('a * 'b) array -> 'a array * 'b array val map2_i : (int -> 'a -> 'b -> 'c) -> 'a array -> 'b array -> 'c array val map3 : ('a -> 'b -> 'c -> 'd) -> 'a array -> 'b array -> 'c array -> 'd array val map3_i : (int -> 'a -> 'b -> 'c -> 'd) -> 'a array -> 'b array -> 'c array -> 'd array val map_left : ('a -> 'b) -> 'a array -> 'b array (** As [map] but guaranteed to be left-to-right. *) val iter2_i : (int -> 'a -> 'b -> unit) -> 'a array -> 'b array -> unit (** Iter on two arrays. Raise [Invalid_argument "Array.iter2_i"] if sizes differ. *) val iter3 : ('a -> 'b -> 'c -> unit) -> 'a array -> 'b array -> 'c array -> unit (** Iter on three arrays. Raise [Invalid_argument "Array.iter3"] if sizes differ. *) val fold_left_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b array -> 'a * 'c array (** [fold_left_map f e_0 [|l_1...l_n|] = e_n,[|k_1...k_n|]] where [(e_i,k_i)=f e_{i-1} l_i]; see also [Smart.fold_left_map] *) val fold_right_map : ('a -> 'c -> 'b * 'c) -> 'a array -> 'c -> 'b array * 'c (** Same, folding on the right *) val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b array -> 'c array -> 'a * 'd array (** Same with two arrays, folding on the left; see also [Smart.fold_left2_map] *) val fold_left2_map_i : (int -> 'a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b array -> 'c array -> 'a * 'd array (** Same than [fold_left2_map] but passing the index of the array *) val fold_right2_map : ('a -> 'b -> 'c -> 'd * 'c) -> 'a array -> 'b array -> 'c -> 'd array * 'c (** Same with two arrays, folding on the right *) val distinct : 'a array -> bool (** Return [true] if every element of the array is unique (for default equality). *) val rev_of_list : 'a list -> 'a array (** [rev_of_list l] is equivalent to [Array.of_list (List.rev l)]. *) val rev_to_list : 'a array -> 'a list (** [rev_to_list a] is equivalent to [List.rev (List.of_array a)]. *) val filter_with : bool list -> 'a array -> 'a array (** [filter_with b a] selects elements of [a] whose corresponding element in [b] is [true]. Raise [Invalid_argument _] when sizes differ. *) module Smart : sig val map : ('a -> 'a) -> 'a array -> 'a array (** [Smart.map f a] behaves as [map f a] but returns [a] instead of a copy when [f x == x] for all [x] in [a]. *) val map_i : (int -> 'a -> 'a) -> 'a array -> 'a array val map2 : ('a -> 'b -> 'b) -> 'a array -> 'b array -> 'b array (** [Smart.map2 f a b] behaves as [map2 f a b] but returns [a] instead of a copy when [f x y == y] for all [x] in [a] and [y] in [b] pointwise. *) val fold_left_map : ('a -> 'b -> 'a * 'b) -> 'a -> 'b array -> 'a * 'b array (** [Smart.fold_left_mapf a b] behaves as [fold_left_map] but returns [b] as second component instead of a copy of [b] when the output array is pointwise the same as the input array [b] *) val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'c) -> 'a -> 'b array -> 'c array -> 'a * 'c array (** [Smart.fold_left2_map f a b c] behaves as [fold_left2_map] but returns [c] as second component instead of a copy of [c] when the output array is pointwise the same as the input array [c] *) end (** The functions defined in this module are optimized specializations of the main ones, when the returned array is of same type as one of the original array. *) module Fun1 : sig val map : ('r -> 'a -> 'b) -> 'r -> 'a array -> 'b array (** [Fun1.map f x v = map (f x) v] *) val iter : ('r -> 'a -> unit) -> 'r -> 'a array -> unit (** [Fun1.iter f x v = iter (f x) v] *) val iter2 : ('r -> 'a -> 'b -> unit) -> 'r -> 'a array -> 'b array -> unit (** [Fun1.iter2 f x v1 v2 = iter (f x) v1 v2] *) module Smart : sig val map : ('r -> 'a -> 'a) -> 'r -> 'a array -> 'a array (** [Fun1.Smart.map f x v = Smart.map (f x) v] *) end end (** The functions defined in this module are the same as the main ones, except that they are all higher-order, and their function arguments have an additional parameter. This allows us to prevent closure creation in critical cases. *) end include ExtS coq-8.15.0/clib/cEphemeron.ml000066400000000000000000000067171417001151100157210ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* a | _ -> b *) if x.0 == constr_Y then a else b and the polymorphic comparison function works like: let equal = fun (c1, ...) (c2, ...) -> c1.id == c2.id In every new extension constructor, the name field is a constant string and the id field is filled with an unique[1] value returned by %caml_fresh_oo_id. Moreover, every value of an extensible variant type is allocated as a new block. [1]: On 64-bit systems. On 32-bit systems, calling %caml_fresh_oo_id 2**30 times will result in a wraparound. Note that this does not affect soundness because constructors are compared by physical equality during matching. See OCaml PR7809 for code demonstrating this. An extensible variant can be marshalled and unmarshalled, and is guaranteed to not be equal to itself after unmarshalling, since the id field is filled with another unique value. Note that the explanation above is purely informative and we do not depend on the exact representation of extensible variants, only on the fact that no two constructor representations ever alias. In particular, if the definition of constr is replaced with: type constr = int (where the value is truly unique for every created constructor), correctness is preserved. *) type 'a typ = .. (* Erases the contained type so that the key can be put in a hash table. *) type boxkey = Box : 'a typ -> boxkey [@@unboxed] (* Carry the type we just erased with the actual key. *) type 'a key = 'a typ * boxkey module EHashtbl = Ephemeron.K1.Make(struct type t = boxkey let equal = (==) let hash = Hashtbl.hash end) type value = { get : 'k. 'k typ -> 'k } [@@unboxed] let values : value EHashtbl.t = EHashtbl.create 1001 let create : type v. v -> v key = fun value -> let module M = struct type _ typ += Typ : v typ let get : type k. k typ -> k = fun typ -> match typ with | Typ -> value | _ -> assert false let boxkey = Box Typ let key = Typ, boxkey let value = { get } end in EHashtbl.add values M.boxkey M.value; M.key (* Avoid raising Not_found *) exception InvalidKey let get (typ, boxkey) = try (EHashtbl.find values boxkey).get typ with Not_found -> raise InvalidKey let default (typ, boxkey) default = try (EHashtbl.find values boxkey).get typ with Not_found -> default let clean () = EHashtbl.clean values coq-8.15.0/clib/cEphemeron.mli000066400000000000000000000046111417001151100160610ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a key exception InvalidKey val get : 'a key -> 'a (** May raise InvalidKey *) val default : 'a key -> 'a -> 'a (** Never fails. *) val clean : unit -> unit coq-8.15.0/clib/cList.ml000066400000000000000000000725021417001151100147050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int type 'a eq = 'a -> 'a -> bool module type S = module type of List module type ExtS = sig include S val compare : 'a cmp -> 'a list cmp val equal : 'a eq -> 'a list eq val is_empty : 'a list -> bool val mem_f : 'a eq -> 'a -> 'a list -> bool val for_all_i : (int -> 'a -> bool) -> int -> 'a list -> bool val for_all2eq : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool val exists_i : (int -> 'a -> bool) -> int -> 'a list -> bool val prefix_of : 'a eq -> 'a list -> 'a list -> bool val same_length : 'a list -> 'b list -> bool val interval : int -> int -> int list val make : int -> 'a -> 'a list val addn : int -> 'a -> 'a list -> 'a list val init : int -> (int -> 'a) -> 'a list val append : 'a list -> 'a list -> 'a list val concat : 'a list list -> 'a list val flatten : 'a list list -> 'a list val assign : 'a list -> int -> 'a -> 'a list val filter : ('a -> bool) -> 'a list -> 'a list val filter2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> 'a list * 'b list val filteri : (int -> 'a -> bool) -> 'a list -> 'a list val filter_with : bool list -> 'a list -> 'a list val map_filter : ('a -> 'b option) -> 'a list -> 'b list val map_filter_i : (int -> 'a -> 'b option) -> 'a list -> 'b list val partitioni : (int -> 'a -> bool) -> 'a list -> 'a list * 'a list val map : ('a -> 'b) -> 'a list -> 'b list val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list val map_left : ('a -> 'b) -> 'a list -> 'b list val map_i : (int -> 'a -> 'b) -> int -> 'a list -> 'b list val map2_i : (int -> 'a -> 'b -> 'c) -> int -> 'a list -> 'b list -> 'c list val map3 : ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list val map4 : ('a -> 'b -> 'c -> 'd -> 'e) -> 'a list -> 'b list -> 'c list -> 'd list -> 'e list val map_of_array : ('a -> 'b) -> 'a array -> 'b list val map_append : ('a -> 'b list) -> 'a list -> 'b list val map_append2 : ('a -> 'b -> 'c list) -> 'a list -> 'b list -> 'c list val extend : bool list -> 'a -> 'a list -> 'a list val count : ('a -> bool) -> 'a list -> int val index : 'a eq -> 'a -> 'a list -> int val safe_index : 'a eq -> 'a -> 'a list -> int option val index0 : 'a eq -> 'a -> 'a list -> int val fold_left_until : ('c -> 'a -> 'c CSig.until) -> 'c -> 'a list -> 'c val fold_right_i : (int -> 'a -> 'b -> 'b) -> int -> 'a list -> 'b -> 'b val fold_left_i : (int -> 'a -> 'b -> 'a) -> int -> 'a -> 'b list -> 'a val fold_right_and_left : ('a -> 'b -> 'b list -> 'a) -> 'b list -> 'a -> 'a val fold_left3 : ('a -> 'b -> 'c -> 'd -> 'a) -> 'a -> 'b list -> 'c list -> 'd list -> 'a val fold_left2_set : exn -> ('a -> 'b -> 'c -> 'b list -> 'c list -> 'a) -> 'a -> 'b list -> 'c list -> 'a val fold_left_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b list -> 'a * 'c list val fold_right_map : ('b -> 'a -> 'c * 'a) -> 'b list -> 'a -> 'c list * 'a val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b list -> 'c list -> 'a * 'd list val fold_right2_map : ('b -> 'c -> 'a -> 'd * 'a) -> 'b list -> 'c list -> 'a -> 'd list * 'a val fold_left3_map : ('a -> 'b -> 'c -> 'd -> 'a * 'e) -> 'a -> 'b list -> 'c list -> 'd list -> 'a * 'e list val fold_left4_map : ('a -> 'b -> 'c -> 'd -> 'e -> 'a * 'r) -> 'a -> 'b list -> 'c list -> 'd list -> 'e list -> 'a * 'r list val except : 'a eq -> 'a -> 'a list -> 'a list val remove : 'a eq -> 'a -> 'a list -> 'a list val remove_first : ('a -> bool) -> 'a list -> 'a list val extract_first : ('a -> bool) -> 'a list -> 'a list * 'a val find_map : ('a -> 'b option) -> 'a list -> 'b exception IndexOutOfRange val goto : int -> 'a list -> 'a list * 'a list val split_when : ('a -> bool) -> 'a list -> 'a list * 'a list val sep_last : 'a list -> 'a * 'a list val drop_last : 'a list -> 'a list val last : 'a list -> 'a val lastn : int -> 'a list -> 'a list val chop : int -> 'a list -> 'a list * 'a list val firstn : int -> 'a list -> 'a list val skipn : int -> 'a list -> 'a list val skipn_at_least : int -> 'a list -> 'a list val drop_prefix : 'a eq -> 'a list -> 'a list -> 'a list val insert : ('a -> 'a -> bool) -> 'a -> 'a list -> 'a list val share_tails : 'a list -> 'a list -> 'a list * 'a list * 'a list val map_assoc : ('a -> 'b) -> ('c * 'a) list -> ('c * 'b) list val assoc_f : 'a eq -> 'a -> ('a * 'b) list -> 'b val remove_assoc_f : 'a eq -> 'a -> ('a * 'b) list -> ('a * 'b) list val mem_assoc_f : 'a eq -> 'a -> ('a * 'b) list -> bool val factorize_left : 'a eq -> ('a * 'b) list -> ('a * 'b list) list val split : ('a * 'b) list -> 'a list * 'b list val combine : 'a list -> 'b list -> ('a * 'b) list val split3 : ('a * 'b * 'c) list -> 'a list * 'b list * 'c list val split4 : ('a * 'b * 'c * 'd) list -> 'a list * 'b list * 'c list * 'd list val combine3 : 'a list -> 'b list -> 'c list -> ('a * 'b * 'c) list val add_set : 'a eq -> 'a -> 'a list -> 'a list val eq_set : 'a eq -> 'a list -> 'a list -> bool val subset : 'a list -> 'a list -> bool val merge_set : 'a cmp -> 'a list -> 'a list -> 'a list val intersect : 'a eq -> 'a list -> 'a list -> 'a list val union : 'a eq -> 'a list -> 'a list -> 'a list val unionq : 'a list -> 'a list -> 'a list val subtract : 'a eq -> 'a list -> 'a list -> 'a list val subtractq : 'a list -> 'a list -> 'a list val distinct : 'a list -> bool val distinct_f : 'a cmp -> 'a list -> bool val duplicates : 'a eq -> 'a list -> 'a list val uniquize_key : ('a -> 'b) -> 'a list -> 'a list val uniquize : 'a list -> 'a list val sort_uniquize : 'a cmp -> 'a list -> 'a list val min : 'a cmp -> 'a list -> 'a val cartesian : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list val cartesians : ('a -> 'b -> 'b) -> 'b -> 'a list list -> 'b list val combinations : 'a list list -> 'a list list val cartesians_filter : ('a -> 'b -> 'b option) -> 'b -> 'a list list -> 'b list module Smart : sig val map : ('a -> 'a) -> 'a list -> 'a list end module type MonoS = sig type elt val equal : elt list -> elt list -> bool val mem : elt -> elt list -> bool val assoc : elt -> (elt * 'a) list -> 'a val mem_assoc : elt -> (elt * 'a) list -> bool val remove_assoc : elt -> (elt * 'a) list -> (elt * 'a) list val mem_assoc_sym : elt -> ('a * elt) list -> bool end end include List (** Tail-rec implementation of usual functions. This is a well-known trick used in, for instance, ExtLib and Batteries. *) type 'a cell = { head : 'a; mutable tail : 'a list; } external cast : 'a cell -> 'a list = "%identity" (** Extensions and redefinitions of OCaml Stdlib *) (** {6 Equality, testing} *) let rec same_length l1 l2 = match l1, l2 with | [], [] -> true | _ :: l1, _ :: l2 -> same_length l1 l2 | ([], _ :: _) | (_ :: _, []) -> false let rec compare cmp l1 l2 = if l1 == l2 then 0 else match l1,l2 with | [], [] -> 0 | _::_, [] -> 1 | [], _::_ -> -1 | x1::l1, x2::l2 -> match cmp x1 x2 with | 0 -> compare cmp l1 l2 | c -> c let rec equal cmp l1 l2 = l1 == l2 || match l1, l2 with | [], [] -> true | x1 :: l1, x2 :: l2 -> cmp x1 x2 && equal cmp l1 l2 | _ -> false let is_empty = function | [] -> true | _ -> false let mem_f cmp x l = List.exists (cmp x) l let for_all_i p = let rec for_all_p i = function | [] -> true | a::l -> p i a && for_all_p (i+1) l in for_all_p let for_all2eq f l1 l2 = try List.for_all2 f l1 l2 with Invalid_argument _ -> false let exists_i p = let rec exists_p i = function | [] -> false | a::l -> p i a || exists_p (i+1) l in exists_p let prefix_of cmp prefl l = let rec prefrec = function | (h1::t1, h2::t2) -> cmp h1 h2 && prefrec (t1,t2) | ([], _) -> true | _ -> false in prefrec (prefl,l) (** {6 Creating lists} *) let interval n m = let rec interval_n (l,m) = if n > m then l else interval_n (m::l, pred m) in interval_n ([], m) let addn n v = let rec aux n l = if Int.equal n 0 then l else aux (pred n) (v :: l) in if n < 0 then invalid_arg "List.addn" else aux n let make n v = addn n v [] let rec init_loop len f p i = if Int.equal i len then () else let c = { head = f i; tail = [] } in p.tail <- cast c; init_loop len f c (succ i) let init len f = if len < 0 then invalid_arg "List.init" else if Int.equal len 0 then [] else let c = { head = f 0; tail = [] } in init_loop len f c 1; cast c let rec append_loop p tl = function | [] -> p.tail <- tl | x :: l -> let c = { head = x; tail = [] } in p.tail <- cast c; append_loop c tl l let append l1 l2 = match l1 with | [] -> l2 | x :: l -> let c = { head = x; tail = [] } in append_loop c l2 l; cast c let rec copy p = function | [] -> p | x :: l -> let c = { head = x; tail = [] } in p.tail <- cast c; copy c l let rec concat_loop p = function | [] -> () | x :: l -> concat_loop (copy p x) l let concat l = let dummy = { head = Obj.magic 0; tail = [] } in concat_loop dummy l; dummy.tail let flatten = concat (** {6 Lists as arrays} *) let assign l n e = let rec assrec stk l i = match l, i with | (h :: t, 0) -> List.rev_append stk (e :: t) | (h :: t, n) -> assrec (h :: stk) t (pred n) | ([], _) -> failwith "List.assign" in assrec [] l n (** {6 Filtering} *) let rec filter_loop f p = function | [] -> () | x :: l' as l -> let b = f x in filter_loop f p l'; if b then if p.tail == l' then p.tail <- l else p.tail <- x :: p.tail let rec filter f = function | [] -> [] | x :: l' as l -> if f x then let c = { head = x; tail = [] } in filter_loop f c l'; if c.tail == l' then l else cast c else filter f l' let rec filter2_loop f p q l1 l2 = match l1, l2 with | [], [] -> () | x :: l1', y :: l2' -> let b = f x y in filter2_loop f p q l1' l2'; if b then if p.tail == l1' then begin p.tail <- l1; q.tail <- l2 end else begin p.tail <- x :: p.tail; q.tail <- y :: q.tail end | _ -> invalid_arg "List.filter2" let rec filter2 f l1 l2 = match l1, l2 with | [], [] -> ([],[]) | x1 :: l1', x2 :: l2' -> let b = f x1 x2 in if b then let c1 = { head = x1; tail = [] } in let c2 = { head = x2; tail = [] } in filter2_loop f c1 c2 l1' l2'; if c1.tail == l1' then (l1, l2) else (cast c1, cast c2) else filter2 f l1' l2' | _ -> invalid_arg "List.filter2" let filteri p = let rec filter_i_rec i = function | [] -> [] | x :: l -> let l' = filter_i_rec (succ i) l in if p i x then x :: l' else l' in filter_i_rec 0 let rec filter_with_loop filter p l = match filter, l with | [], [] -> () | b :: filter, x :: l' -> filter_with_loop filter p l'; if b then if p.tail == l' then p.tail <- l else p.tail <- x :: p.tail | _ -> invalid_arg "List.filter_with" let rec filter_with filter l = match filter, l with | [], [] -> [] | b :: filter, x :: l' -> if b then let c = { head = x; tail = [] } in filter_with_loop filter c l'; if c.tail == l' then l else cast c else filter_with filter l' | _ -> invalid_arg "List.filter_with" let rec map_filter_loop f p = function | [] -> () | x :: l -> match f x with | None -> map_filter_loop f p l | Some y -> let c = { head = y; tail = [] } in p.tail <- cast c; map_filter_loop f c l let rec map_filter f = function | [] -> [] | x :: l' -> match f x with | None -> map_filter f l' | Some y -> let c = { head = y; tail = [] } in map_filter_loop f c l'; cast c let rec map_filter_i_loop f i p = function | [] -> () | x :: l -> match f i x with | None -> map_filter_i_loop f (succ i) p l | Some y -> let c = { head = y; tail = [] } in p.tail <- cast c; map_filter_i_loop f (succ i) c l let rec map_filter_i_loop' f i = function | [] -> [] | x :: l' -> match f i x with | None -> map_filter_i_loop' f (succ i) l' | Some y -> let c = { head = y; tail = [] } in map_filter_i_loop f (succ i) c l'; cast c let map_filter_i f l = map_filter_i_loop' f 0 l let partitioni p = let rec aux i = function | [] -> [], [] | x :: l -> let (l1, l2) = aux (succ i) l in if p i x then (x :: l1, l2) else (l1, x :: l2) in aux 0 (** {6 Applying functorially} *) let rec map_loop f p = function | [] -> () | x :: l -> let c = { head = f x; tail = [] } in p.tail <- cast c; map_loop f c l let map f = function | [] -> [] | x :: l -> let c = { head = f x; tail = [] } in map_loop f c l; cast c let rec map2_loop f p l1 l2 = match l1, l2 with | [], [] -> () | x :: l1, y :: l2 -> let c = { head = f x y; tail = [] } in p.tail <- cast c; map2_loop f c l1 l2 | _ -> invalid_arg "List.map2" let map2 f l1 l2 = match l1, l2 with | [], [] -> [] | x :: l1, y :: l2 -> let c = { head = f x y; tail = [] } in map2_loop f c l1 l2; cast c | _ -> invalid_arg "List.map2" (** Like OCaml [List.mapi] but tail-recursive *) let rec map_i_loop f i p = function | [] -> () | x :: l -> let c = { head = f i x; tail = [] } in p.tail <- cast c; map_i_loop f (succ i) c l let map_i f i = function | [] -> [] | x :: l -> let c = { head = f i x; tail = [] } in map_i_loop f (succ i) c l; cast c let map_left = map let map2_i f i l1 l2 = let rec map_i i = function | ([], []) -> [] | (h1 :: t1, h2 :: t2) -> let v = f i h1 h2 in v :: map_i (succ i) (t1,t2) | (_, _) -> invalid_arg "map2_i" in map_i i (l1,l2) let rec map3_loop f p l1 l2 l3 = match l1, l2, l3 with | [], [], [] -> () | x :: l1, y :: l2, z :: l3 -> let c = { head = f x y z; tail = [] } in p.tail <- cast c; map3_loop f c l1 l2 l3 | _ -> invalid_arg "List.map3" let map3 f l1 l2 l3 = match l1, l2, l3 with | [], [], [] -> [] | x :: l1, y :: l2, z :: l3 -> let c = { head = f x y z; tail = [] } in map3_loop f c l1 l2 l3; cast c | _ -> invalid_arg "List.map3" let rec map4_loop f p l1 l2 l3 l4 = match l1, l2, l3, l4 with | [], [], [], [] -> () | x :: l1, y :: l2, z :: l3, t :: l4 -> let c = { head = f x y z t; tail = [] } in p.tail <- cast c; map4_loop f c l1 l2 l3 l4 | _ -> invalid_arg "List.map4" let map4 f l1 l2 l3 l4 = match l1, l2, l3, l4 with | [], [], [], [] -> [] | x :: l1, y :: l2, z :: l3, t :: l4 -> let c = { head = f x y z t; tail = [] } in map4_loop f c l1 l2 l3 l4; cast c | _ -> invalid_arg "List.map4" let rec map_of_array_loop f p a i l = if Int.equal i l then () else let c = { head = f (Array.unsafe_get a i); tail = [] } in p.tail <- cast c; map_of_array_loop f c a (i + 1) l let map_of_array f a = let l = Array.length a in if Int.equal l 0 then [] else let c = { head = f (Array.unsafe_get a 0); tail = [] } in map_of_array_loop f c a 1 l; cast c let map_append f l = flatten (map f l) let map_append2 f l1 l2 = flatten (map2 f l1 l2) let rec extend l a l' = match l,l' with | true :: l, b :: l' -> b :: extend l a l' | false :: l, l' -> a :: extend l a l' | [], [] -> [] | _ -> invalid_arg "extend" let count f l = let rec aux acc = function | [] -> acc | h :: t -> if f h then aux (acc + 1) t else aux acc t in aux 0 l (** {6 Finding position} *) let rec index_f f x l n = match l with | [] -> raise Not_found | y :: l -> if f x y then n else index_f f x l (succ n) let index f x l = index_f f x l 1 let safe_index f x l = try Some (index f x l) with Not_found -> None let index0 f x l = index_f f x l 0 (** {6 Folding} *) let fold_left_until f accu s = let rec aux accu = function | [] -> accu | x :: xs -> match f accu x with CSig.Stop x -> x | CSig.Cont i -> aux i xs in aux accu s let fold_right_i f i l = let rec it_f i l a = match l with | [] -> a | b :: l -> f (i-1) b (it_f (i-1) l a) in it_f (List.length l + i) l let fold_left_i f = let rec it_list_f i a = function | [] -> a | b :: l -> it_list_f (i+1) (f i a b) l in it_list_f let rec fold_left3 f accu l1 l2 l3 = match (l1, l2, l3) with | ([], [], []) -> accu | (a1 :: l1, a2 :: l2, a3 :: l3) -> fold_left3 f (f accu a1 a2 a3) l1 l2 l3 | (_, _, _) -> invalid_arg "List.fold_left3" let rec fold_left4 f accu l1 l2 l3 l4 = match (l1, l2, l3, l4) with | ([], [], [], []) -> accu | (a1 :: l1, a2 :: l2, a3 :: l3, a4 :: l4) -> fold_left4 f (f accu a1 a2 a3 a4) l1 l2 l3 l4 | (_,_, _, _) -> invalid_arg "List.fold_left4" (* [fold_right_and_left f [a1;...;an] hd = f (f (... (f (f hd an [an-1;...;a1]) an-1 [an-2;...;a1]) ...) a2 [a1]) a1 []] *) let fold_right_and_left f l hd = let rec aux tl = function | [] -> hd | a :: l -> let hd = aux (a :: tl) l in f hd a tl in aux [] l (* Match sets as lists according to a matching function, also folding a side effect *) let rec fold_left2_set e f x l1 l2 = match l1 with | a1 :: l1 -> let rec find seen = function | [] -> raise e | a2 :: l2 -> try fold_left2_set e f (f x a1 a2 l1 l2) l1 (List.rev_append seen l2) with e' when e' = e -> find (a2 :: seen) l2 in find [] l2 | [] -> if l2 = [] then x else raise e (* Poor man's monadic map *) let rec fold_left_map f e = function | [] -> (e,[]) | h :: t -> let e',h' = f e h in let e'',t' = fold_left_map f e' t in e'',h' :: t' (* (* tail-recursive version of the above function *) let fold_left_map f e l = let g (e,b') h = let (e',h') = f e h in (e',h'::b') in let (e',lrev) = List.fold_left g (e,[]) l in (e',List.rev lrev) *) (* The same, based on fold_right, with the effect accumulated on the right *) let fold_right_map f l e = List.fold_right (fun x (l,e) -> let (y,e) = f x e in (y::l,e)) l ([],e) let on_snd f (x,y) = (x,f y) let fold_left2_map f e l l' = on_snd List.rev @@ List.fold_left2 (fun (e,l) x x' -> let (e,y) = f e x x' in (e, y::l) ) (e, []) l l' let fold_right2_map f l l' e = List.fold_right2 (fun x x' (l,e) -> let (y,e) = f x x' e in (y::l,e)) l l' ([],e) let fold_left3_map f e l l' l'' = on_snd List.rev @@ fold_left3 (fun (e,l) x x' x'' -> let (e,y) = f e x x' x'' in (e,y::l)) (e,[]) l l' l'' let fold_left4_map f e l1 l2 l3 l4 = on_snd List.rev @@ fold_left4 (fun (e,l) x1 x2 x3 x4 -> let (e,y) = f e x1 x2 x3 x4 in (e,y::l)) (e,[]) l1 l2 l3 l4 (** {6 Splitting} *) let except cmp x l = List.filter (fun y -> not (cmp x y)) l let remove = except (* Alias *) let rec remove_first p = function | b :: l when p b -> l | b :: l -> b :: remove_first p l | [] -> raise Not_found let extract_first p li = let rec loop rev_left = function | [] -> raise Not_found | x :: right -> if p x then List.rev_append rev_left right, x else loop (x :: rev_left) right in loop [] li let insert p v l = let rec insrec = function | [] -> [v] | h :: tl -> if p v h then v :: h :: tl else h :: insrec tl in insrec l let rec find_map f = function | [] -> raise Not_found | x :: l -> match f x with | None -> find_map f l | Some y -> y (* FIXME: again, generic hash function *) let subset l1 l2 = let t2 = Hashtbl.create 151 in List.iter (fun x -> Hashtbl.add t2 x ()) l2; let rec look = function | [] -> true | x :: ll -> try Hashtbl.find t2 x; look ll with Not_found -> false in look l1 (** [goto i l] splits [l] into two lists [(l1,l2)] such that [(List.rev l1)++l2=l] and [l1] has length [i]. It raises [IndexOutOfRange] when [i] is negative or greater than the length of [l]. *) exception IndexOutOfRange let goto n l = let rec goto i acc = function | tl when Int.equal i 0 -> (acc, tl) | h :: t -> goto (pred i) (h :: acc) t | [] -> raise IndexOutOfRange in goto n [] l (* [chop i l] splits [l] into two lists [(l1,l2)] such that [l1++l2=l] and [l1] has length [i]. It raises [Failure] when [i] is negative or greater than the length of [l] *) let chop n l = try let (h,t) = goto n l in (List.rev h,t) with IndexOutOfRange -> failwith "List.chop" (* spiwack: should raise [IndexOutOfRange] but I'm afraid of missing a try/with when replacing the exception. *) (* [split_when p l] splits [l] into two lists [(l1,a::l2)] such that [l1++(a::l2)=l], [p a=true] and [p b = false] for every element [b] of [l1]. If there is no such [a], then it returns [(l,[])] instead *) let split_when p = let rec split_when_loop x y = match y with | [] -> (List.rev x,[]) | (a :: l) -> if (p a) then (List.rev x,y) else split_when_loop (a :: x) l in split_when_loop [] let firstn n l = let rec aux acc n l = match n, l with | 0, _ -> List.rev acc | n, h :: t -> aux (h :: acc) (pred n) t | _ -> failwith "firstn" in aux [] n l let rec sep_last = function | [] -> failwith "sep_last" | hd :: [] -> (hd,[]) | hd :: tl -> let (l,tl) = sep_last tl in (l,hd :: tl) (* Drop the last element of a list *) let rec drop_last = function | [] -> failwith "drop_last" | hd :: [] -> [] | hd :: tl -> hd :: drop_last tl let rec last = function | [] -> failwith "List.last" | hd :: [] -> hd | _ :: tl -> last tl let lastn n l = let len = List.length l in let rec aux m l = if Int.equal m n then l else aux (m - 1) (List.tl l) in if len < n then failwith "lastn" else aux len l let rec skipn n l = match n,l with | 0, _ -> l | _, [] -> failwith "List.skipn" | n, _ :: l -> skipn (pred n) l let skipn_at_least n l = try skipn n l with Failure _ when n >= 0 -> [] (** if [l=p++t] then [drop_prefix p l] is [t] else [l] *) let drop_prefix cmp p l = let rec drop_prefix_rec = function | (h1 :: tp, h2 :: tl) when cmp h1 h2 -> drop_prefix_rec (tp,tl) | ([], tl) -> tl | _ -> l in drop_prefix_rec (p,l) let share_tails l1 l2 = let rec shr_rev acc = function | (x1 :: l1, x2 :: l2) when x1 == x2 -> shr_rev (x1 :: acc) (l1,l2) | (l1, l2) -> (List.rev l1, List.rev l2, acc) in shr_rev [] (List.rev l1, List.rev l2) (** {6 Association lists} *) let map_assoc f = map (fun (x,a) -> (x,f a)) let rec assoc_f f a = function | (x, e) :: xs -> if f a x then e else assoc_f f a xs | [] -> raise Not_found let remove_assoc_f f a l = try remove_first (fun (x,_) -> f a x) l with Not_found -> l let mem_assoc_f f a l = List.exists (fun (x,_) -> f a x) l (** {6 Operations on lists of tuples} *) let rec split_loop p q = function | [] -> () | (x, y) :: l -> let cl = { head = x; tail = [] } in let cr = { head = y; tail = [] } in p.tail <- cast cl; q.tail <- cast cr; split_loop cl cr l let split = function | [] -> [], [] | (x, y) :: l -> let cl = { head = x; tail = [] } in let cr = { head = y; tail = [] } in split_loop cl cr l; (cast cl, cast cr) let rec combine_loop p l1 l2 = match l1, l2 with | [], [] -> () | x :: l1, y :: l2 -> let c = { head = (x, y); tail = [] } in p.tail <- cast c; combine_loop c l1 l2 | _ -> invalid_arg "List.combine" let combine l1 l2 = match l1, l2 with | [], [] -> [] | x :: l1, y :: l2 -> let c = { head = (x, y); tail = [] } in combine_loop c l1 l2; cast c | _ -> invalid_arg "List.combine" let rec split3_loop p q r = function | [] -> () | (x, y, z) :: l -> let cp = { head = x; tail = [] } in let cq = { head = y; tail = [] } in let cr = { head = z; tail = [] } in p.tail <- cast cp; q.tail <- cast cq; r.tail <- cast cr; split3_loop cp cq cr l let split3 = function | [] -> [], [], [] | (x, y, z) :: l -> let cp = { head = x; tail = [] } in let cq = { head = y; tail = [] } in let cr = { head = z; tail = [] } in split3_loop cp cq cr l; (cast cp, cast cq, cast cr) (** XXX TODO tailrec *) let rec split4 = function | [] -> ([], [], [], []) | (a,b,c,d)::l -> let (ra, rb, rc, rd) = split4 l in (a::ra, b::rb, c::rc, d::rd) let rec combine3_loop p l1 l2 l3 = match l1, l2, l3 with | [], [], [] -> () | x :: l1, y :: l2, z :: l3 -> let c = { head = (x, y, z); tail = [] } in p.tail <- cast c; combine3_loop c l1 l2 l3 | _ -> invalid_arg "List.combine3" let combine3 l1 l2 l3 = match l1, l2, l3 with | [], [], [] -> [] | x :: l1, y :: l2, z :: l3 -> let c = { head = (x, y, z); tail = [] } in combine3_loop c l1 l2 l3; cast c | _ -> invalid_arg "List.combine3" (** {6 Operations on lists seen as sets, preserving uniqueness of elements} *) (** Add an element, preserving uniqueness of elements *) let add_set cmp x l = if mem_f cmp x l then l else x :: l (** List equality up to permutation (but considering multiple occurrences) *) let eq_set cmp l1 l2 = let rec aux l1 = function | [] -> is_empty l1 | a :: l2 -> aux (remove_first (cmp a) l1) l2 in try aux l1 l2 with Not_found -> false let rec merge_set cmp l1 l2 = match l1, l2 with | [], l2 -> l2 | l1, [] -> l1 | h1 :: t1, h2 :: t2 -> let c = cmp h1 h2 in if Int.equal c 0 then h1 :: merge_set cmp t1 t2 else if c <= 0 then h1 :: merge_set cmp t1 l2 else h2 :: merge_set cmp l1 t2 let intersect cmp l1 l2 = filter (fun x -> mem_f cmp x l2) l1 let union cmp l1 l2 = let rec urec = function | [] -> l2 | a :: l -> if mem_f cmp a l2 then urec l else a :: urec l in urec l1 let subtract cmp l1 l2 = if is_empty l2 then l1 else List.filter (fun x -> not (mem_f cmp x l2)) l1 let unionq l1 l2 = union (==) l1 l2 let subtractq l1 l2 = subtract (==) l1 l2 (** {6 Uniqueness and duplication} *) (* FIXME: we should avoid relying on the generic hash function, just as we'd better avoid Pervasives.compare *) let distinct l = let visited = Hashtbl.create 23 in let rec loop = function | h :: t -> if Hashtbl.mem visited h then false else begin Hashtbl.add visited h h; loop t end | [] -> true in loop l let distinct_f cmp l = let rec loop = function | a :: b :: _ when Int.equal (cmp a b) 0 -> false | a :: l -> loop l | [] -> true in loop (List.sort cmp l) (* FIXME: again, generic hash function *) let uniquize_key f l = let visited = Hashtbl.create 23 in let rec aux acc changed = function | h :: t -> let x = f h in if Hashtbl.mem visited x then aux acc true t else begin Hashtbl.add visited x x; aux (h :: acc) changed t end | [] -> if changed then List.rev acc else l in aux [] false l let uniquize l = uniquize_key (fun x -> x) l (** [sort_uniquize] might be an alternative to the hashtbl-based [uniquize], when the order of the elements is irrelevant *) let rec uniquize_sorted cmp = function | a :: b :: l when Int.equal (cmp a b) 0 -> uniquize_sorted cmp (a :: l) | a :: l -> a :: uniquize_sorted cmp l | [] -> [] let sort_uniquize cmp l = uniquize_sorted cmp (List.sort cmp l) let min cmp l = let rec aux cur = function | [] -> cur | x :: l -> if cmp x cur < 0 then aux x l else aux cur l in match l with | x :: l -> aux x l | [] -> raise Not_found let rec duplicates cmp = function | [] -> [] | x :: l -> let l' = duplicates cmp l in if mem_f cmp x l then add_set cmp x l' else l' (** {6 Cartesian product} *) (* A generic cartesian product: for any operator (**), [cartesian (**) [x1;x2] [y1;y2] = [x1**y1; x1**y2; x2**y1; x2**y1]], and so on if there are more elements in the lists. *) let cartesian op l1 l2 = map_append (fun x -> map (op x) l2) l1 (* [cartesians] is an n-ary cartesian product: it iterates [cartesian] over a list of lists. *) let cartesians op init ll = List.fold_right (cartesian op) ll [init] (* combinations [[a;b];[c;d]] gives [[a;c];[a;d];[b;c];[b;d]] *) let combinations l = cartesians (fun x l -> x :: l) [] l (* Keep only those products that do not return None *) let cartesian_filter op l1 l2 = map_append (fun x -> map_filter (op x) l2) l1 (* Keep only those products that do not return None *) let cartesians_filter op init ll = List.fold_right (cartesian_filter op) ll [init] (* Factorize lists of pairs according to the left argument *) let rec factorize_left cmp = function | (a,b) :: l -> let al,l' = partition (fun (a',_) -> cmp a a') l in (a,(b :: map snd al)) :: factorize_left cmp l' | [] -> [] module Smart = struct let rec map f l = match l with | [] -> l | h :: tl -> let h' = f h in let tl' = map f tl in if h' == h && tl' == tl then l else h' :: tl' end module type MonoS = sig type elt val equal : elt list -> elt list -> bool val mem : elt -> elt list -> bool val assoc : elt -> (elt * 'a) list -> 'a val mem_assoc : elt -> (elt * 'a) list -> bool val remove_assoc : elt -> (elt * 'a) list -> (elt * 'a) list val mem_assoc_sym : elt -> ('a * elt) list -> bool end coq-8.15.0/clib/cList.mli000066400000000000000000000412721417001151100150560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int type 'a eq = 'a -> 'a -> bool (** Module type [S] is the one from OCaml Stdlib. *) module type S = module type of List module type ExtS = sig include S (** {6 Equality, testing} *) val compare : 'a cmp -> 'a list cmp (** Lexicographic order on lists. *) val equal : 'a eq -> 'a list eq (** Lift equality to list type. *) val is_empty : 'a list -> bool (** Check whether a list is empty *) val mem_f : 'a eq -> 'a -> 'a list -> bool (** Same as [List.mem], for some specific equality *) val for_all_i : (int -> 'a -> bool) -> int -> 'a list -> bool (** Same as [List.for_all] but with an index *) val for_all2eq : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool (** Same as [List.for_all2] but returning [false] when of different length *) val exists_i : (int -> 'a -> bool) -> int -> 'a list -> bool (** Same as [List.exists] but with an index *) val prefix_of : 'a eq -> 'a list eq (** [prefix_of eq l1 l2] returns [true] if [l1] is a prefix of [l2], [false] otherwise. It uses [eq] to compare elements *) val same_length : 'a list -> 'b list -> bool (** A more efficient variant of [for_all2eq (fun _ _ -> true)] *) (** {6 Creating lists} *) val interval : int -> int -> int list (** [interval i j] creates the list [[i; i + 1; ...; j]], or [[]] when [j <= i]. *) val make : int -> 'a -> 'a list (** [make n x] returns a list made of [n] times [x]. Raise [Invalid_argument _] if [n] is negative. *) val addn : int -> 'a -> 'a list -> 'a list (** [addn n x l] adds [n] times [x] on the left of [l]. *) val init : int -> (int -> 'a) -> 'a list (** [init n f] constructs the list [f 0; ... ; f (n - 1)]. Raise [Invalid_argument _] if [n] is negative *) val append : 'a list -> 'a list -> 'a list (** Like OCaml's [List.append] but tail-recursive. *) val concat : 'a list list -> 'a list (** Like OCaml's [List.concat] but tail-recursive. *) val flatten : 'a list list -> 'a list (** Synonymous of [concat] *) (** {6 Lists as arrays} *) val assign : 'a list -> int -> 'a -> 'a list (** [assign l i x] sets the [i]-th element of [l] to [x], starting from [0]. Raise [Failure _] if [i] is out of range. *) (** {6 Filtering} *) val filter : ('a -> bool) -> 'a list -> 'a list (** Like OCaml [List.filter] but tail-recursive and physically returns the original list if the predicate holds for all elements. *) val filter2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> 'a list * 'b list (** Like [List.filter] but with 2 arguments, raise [Invalid_argument _] if the lists are not of same length. *) val filteri : (int -> 'a -> bool) -> 'a list -> 'a list (** Like [List.filter] but with an index starting from [0] *) val filter_with : bool list -> 'a list -> 'a list (** [filter_with bl l] selects elements of [l] whose corresponding element in [bl] is [true]. Raise [Invalid_argument _] if sizes differ. *) val map_filter : ('a -> 'b option) -> 'a list -> 'b list (** Like [map] but keeping only non-[None] elements *) val map_filter_i : (int -> 'a -> 'b option) -> 'a list -> 'b list (** Like [map_filter] but with an index starting from [0] *) val partitioni : (int -> 'a -> bool) -> 'a list -> 'a list * 'a list (** Like [List.partition] but with an index starting from [0] *) (** {6 Applying functorially} *) val map : ('a -> 'b) -> 'a list -> 'b list (** Like OCaml [List.map] but tail-recursive *) val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list (** Like OCaml [List.map2] but tail-recursive *) val map_left : ('a -> 'b) -> 'a list -> 'b list (** As [map] but ensures the left-to-right order of evaluation. *) val map_i : (int -> 'a -> 'b) -> int -> 'a list -> 'b list (** Like OCaml [List.mapi] but tail-recursive. Alternatively, like [map] but with an index *) val map2_i : (int -> 'a -> 'b -> 'c) -> int -> 'a list -> 'b list -> 'c list (** Like [map2] but with an index *) val map3 : ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list (** Like [map] but for 3 lists. *) val map4 : ('a -> 'b -> 'c -> 'd -> 'e) -> 'a list -> 'b list -> 'c list -> 'd list -> 'e list (** Like [map] but for 4 lists. *) val map_of_array : ('a -> 'b) -> 'a array -> 'b list (** [map_of_array f a] behaves as [List.map f (Array.to_list a)] *) val map_append : ('a -> 'b list) -> 'a list -> 'b list (** [map_append f [x1; ...; xn]] returns [f x1 @ ... @ f xn]. *) val map_append2 : ('a -> 'b -> 'c list) -> 'a list -> 'b list -> 'c list (** Like [map_append] but for two lists; raises [Invalid_argument _] if the two lists do not have the same length. *) val extend : bool list -> 'a -> 'a list -> 'a list (** [extend l a [a1..an]] assumes that the number of [true] in [l] is [n]; it extends [a1..an] by inserting [a] at the position of [false] in [l] *) val count : ('a -> bool) -> 'a list -> int (** Count the number of elements satisfying a predicate *) (** {6 Finding position} *) val index : 'a eq -> 'a -> 'a list -> int (** [index] returns the 1st index of an element in a list (counting from 1). *) val safe_index : 'a eq -> 'a -> 'a list -> int option (** [safe_index] returns the 1st index of an element in a list (counting from 1) and None otherwise. *) val index0 : 'a eq -> 'a -> 'a list -> int (** [index0] behaves as [index] except that it starts counting at 0. *) (** {6 Folding} *) val fold_left_until : ('c -> 'a -> 'c CSig.until) -> 'c -> 'a list -> 'c (** acts like [fold_left f acc s] while [f] returns [Cont acc']; it stops returning [c] as soon as [f] returns [Stop c]. *) val fold_right_i : (int -> 'a -> 'b -> 'b) -> int -> 'a list -> 'b -> 'b (** Like [List.fold_right] but with an index *) val fold_left_i : (int -> 'a -> 'b -> 'a) -> int -> 'a -> 'b list -> 'a (** Like [List.fold_left] but with an index *) val fold_right_and_left : ('b -> 'a -> 'a list -> 'b) -> 'a list -> 'b -> 'b (** [fold_right_and_left f [a1;...;an] hd] is [f (f (... (f (f hd an [an-1;...;a1]) an-1 [an-2;...;a1]) ...) a2 [a1]) a1 []] *) val fold_left3 : ('a -> 'b -> 'c -> 'd -> 'a) -> 'a -> 'b list -> 'c list -> 'd list -> 'a (** Like [List.fold_left] but for 3 lists; raise [Invalid_argument _] if not all lists of the same size *) val fold_left2_set : exn -> ('a -> 'b -> 'c -> 'b list -> 'c list -> 'a) -> 'a -> 'b list -> 'c list -> 'a (** Fold sets, i.e. lists up to order; the folding function tells when elements match by returning a value and raising the given exception otherwise; sets should have the same size; raise the given exception if no pairing of the two sets is found;; complexity in O(n^2) *) val fold_left_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b list -> 'a * 'c list (** [fold_left_map f e_0 [a1;...;an]] is [e_n,[k_1...k_n]] where [(e_i,k_i)] is [f e_{i-1} ai] for each i<=n *) val fold_right_map : ('b -> 'a -> 'c * 'a) -> 'b list -> 'a -> 'c list * 'a (** Same, folding on the right *) val fold_left2_map : ('a -> 'b -> 'c -> 'a * 'd) -> 'a -> 'b list -> 'c list -> 'a * 'd list (** Same with two lists, folding on the left *) val fold_right2_map : ('b -> 'c -> 'a -> 'd * 'a) -> 'b list -> 'c list -> 'a -> 'd list * 'a (** Same with two lists, folding on the right *) val fold_left3_map : ('a -> 'b -> 'c -> 'd -> 'a * 'e) -> 'a -> 'b list -> 'c list -> 'd list -> 'a * 'e list (** Same with three lists, folding on the left *) val fold_left4_map : ('a -> 'b -> 'c -> 'd -> 'e -> 'a * 'r) -> 'a -> 'b list -> 'c list -> 'd list -> 'e list -> 'a * 'r list (** Same with four lists, folding on the left *) (** {6 Splitting} *) val except : 'a eq -> 'a -> 'a list -> 'a list (** [except eq a l] Remove all occurrences of [a] in [l] *) val remove : 'a eq -> 'a -> 'a list -> 'a list (** Alias of [except] *) val remove_first : ('a -> bool) -> 'a list -> 'a list (** Remove the first element satisfying a predicate, or raise [Not_found] *) val extract_first : ('a -> bool) -> 'a list -> 'a list * 'a (** Remove and return the first element satisfying a predicate, or raise [Not_found] *) val find_map : ('a -> 'b option) -> 'a list -> 'b (** Returns the first element that is mapped to [Some _]. Raise [Not_found] if there is none. *) exception IndexOutOfRange val goto: int -> 'a list -> 'a list * 'a list (** [goto i l] splits [l] into two lists [(l1,l2)] such that [(List.rev l1)++l2=l] and [l1] has length [i]. It raises [IndexOutOfRange] when [i] is negative or greater than the length of [l]. *) val split_when : ('a -> bool) -> 'a list -> 'a list * 'a list (** [split_when p l] splits [l] into two lists [(l1,a::l2)] such that [l1++(a::l2)=l], [p a=true] and [p b = false] for every element [b] of [l1]. if there is no such [a], then it returns [(l,[])] instead. *) val sep_last : 'a list -> 'a * 'a list (** [sep_last l] returns [(a,l')] such that [l] is [l'@[a]]. It raises [Failure _] if the list is empty. *) val drop_last : 'a list -> 'a list (** Remove the last element of the list. It raises [Failure _] if the list is empty. This is the second part of [sep_last]. *) val last : 'a list -> 'a (** Return the last element of the list. It raises [Failure _] if the list is empty. This is the first part of [sep_last]. *) val lastn : int -> 'a list -> 'a list (** [lastn n l] returns the [n] last elements of [l]. It raises [Failure _] if [n] is less than 0 or larger than the length of [l] *) val chop : int -> 'a list -> 'a list * 'a list (** [chop i l] splits [l] into two lists [(l1,l2)] such that [l1++l2=l] and [l1] has length [i]. It raises [Failure _] when [i] is negative or greater than the length of [l]. *) val firstn : int -> 'a list -> 'a list (** [firstn n l] Returns the [n] first elements of [l]. It raises [Failure _] if [n] negative or too large. This is the first part of [chop]. *) val skipn : int -> 'a list -> 'a list (** [skipn n l] drops the [n] first elements of [l]. It raises [Failure _] if [n] is less than 0 or larger than the length of [l]. This is the second part of [chop]. *) val skipn_at_least : int -> 'a list -> 'a list (** Same as [skipn] but returns [] if [n] is larger than the length of the list. *) val drop_prefix : 'a eq -> 'a list -> 'a list -> 'a list (** [drop_prefix eq l1 l] returns [l2] if [l=l1++l2] else return [l]. *) val insert : 'a eq -> 'a -> 'a list -> 'a list (** Insert at the (first) position so that if the list is ordered wrt to the total order given as argument, the order is preserved *) val share_tails : 'a list -> 'a list -> 'a list * 'a list * 'a list (** [share_tails l1 l2] returns [(l1',l2',l)] such that [l1] is [l1'\@l] and [l2] is [l2'\@l] and [l] is maximal amongst all such decompositions *) (** {6 Association lists} *) val map_assoc : ('a -> 'b) -> ('c * 'a) list -> ('c * 'b) list (** Applies a function on the codomain of an association list *) val assoc_f : 'a eq -> 'a -> ('a * 'b) list -> 'b (** Like [List.assoc] but using the equality given as argument *) val remove_assoc_f : 'a eq -> 'a -> ('a * 'b) list -> ('a * 'b) list (** Remove first matching element; unchanged if no such element *) val mem_assoc_f : 'a eq -> 'a -> ('a * 'b) list -> bool (** Like [List.mem_assoc] but using the equality given as argument *) val factorize_left : 'a eq -> ('a * 'b) list -> ('a * 'b list) list (** Create a list of associations from a list of pairs *) (** {6 Operations on lists of tuples} *) val split : ('a * 'b) list -> 'a list * 'b list (** Like OCaml's [List.split] but tail-recursive. *) val combine : 'a list -> 'b list -> ('a * 'b) list (** Like OCaml's [List.combine] but tail-recursive. *) val split3 : ('a * 'b * 'c) list -> 'a list * 'b list * 'c list (** Like [split] but for triples *) val split4 : ('a * 'b * 'c * 'd) list -> 'a list * 'b list * 'c list * 'd list (** Like [split] but for quads *) val combine3 : 'a list -> 'b list -> 'c list -> ('a * 'b * 'c) list (** Like [combine] but for triples *) (** {6 Operations on lists seen as sets, preserving uniqueness of elements} *) val add_set : 'a eq -> 'a -> 'a list -> 'a list (** [add_set x l] adds [x] in [l] if it is not already there, or returns [l] otherwise. *) val eq_set : 'a eq -> 'a list eq (** Test equality up to permutation. It respects multiple occurrences and thus works also on multisets. *) val subset : 'a list eq (** Tell if a list is a subset of another up to permutation. It expects each element to occur only once. *) val merge_set : 'a cmp -> 'a list -> 'a list -> 'a list (** Merge two sorted lists and preserves the uniqueness property. *) val intersect : 'a eq -> 'a list -> 'a list -> 'a list (** Return the intersection of two lists, assuming and preserving uniqueness of elements *) val union : 'a eq -> 'a list -> 'a list -> 'a list (** Return the union of two lists, assuming and preserving uniqueness of elements *) val unionq : 'a list -> 'a list -> 'a list (** [union] specialized to physical equality *) val subtract : 'a eq -> 'a list -> 'a list -> 'a list (** Remove from the first list all elements from the second list. *) val subtractq : 'a list -> 'a list -> 'a list (** [subtract] specialized to physical equality *) (** {6 Uniqueness and duplication} *) val distinct : 'a list -> bool (** Return [true] if all elements of the list are distinct. *) val distinct_f : 'a cmp -> 'a list -> bool (** Like [distinct] but using the equality given as argument *) val duplicates : 'a eq -> 'a list -> 'a list (** Return the list of unique elements which appear at least twice. Elements are kept in the order of their first appearance. *) val uniquize_key : ('a -> 'b) -> 'a list -> 'a list (** Return the list of elements without duplicates using the function to associate a comparison key to each element. This is the list unchanged if there was none. *) val uniquize : 'a list -> 'a list (** Return the list of elements without duplicates. This is the list unchanged if there was none. *) val sort_uniquize : 'a cmp -> 'a list -> 'a list (** Return a sorted version of a list without duplicates according to some comparison function. *) val min : 'a cmp -> 'a list -> 'a (** Return minimum element according to some comparison function. @raise Not_found on an empty list. *) (** {6 Cartesian product} *) val cartesian : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list (** A generic binary cartesian product: for any operator (**), [cartesian (**) [x1;x2] [y1;y2] = [x1**y1; x1**y2; x2**y1; x2**y1]], and so on if there are more elements in the lists. *) val cartesians : ('a -> 'b -> 'b) -> 'b -> 'a list list -> 'b list (** [cartesians op init l] is an n-ary cartesian product: it builds the list of all [op a1 .. (op an init) ..] for [a1], ..., [an] in the product of the elements of the lists *) val combinations : 'a list list -> 'a list list (** [combinations l] returns the list of [n_1] * ... * [n_p] tuples [[a11;...;ap1];...;[a1n_1;...;apn_pd]] whenever [l] is a list [[a11;..;a1n_1];...;[ap1;apn_p]]; otherwise said, it is [cartesians (::) [] l] *) val cartesians_filter : ('a -> 'b -> 'b option) -> 'b -> 'a list list -> 'b list (** Like [cartesians op init l] but keep only the tuples for which [op] returns [Some _] on all the elements of the tuple. *) module Smart : sig val map : ('a -> 'a) -> 'a list -> 'a list (** [Smart.map f [a1...an] = List.map f [a1...an]] but if for all i [f ai == ai], then [Smart.map f l == l] *) end module type MonoS = sig type elt val equal : elt list -> elt list -> bool val mem : elt -> elt list -> bool val assoc : elt -> (elt * 'a) list -> 'a val mem_assoc : elt -> (elt * 'a) list -> bool val remove_assoc : elt -> (elt * 'a) list -> (elt * 'a) list val mem_assoc_sym : elt -> ('a * elt) list -> bool end end include ExtS coq-8.15.0/clib/cMap.ml000066400000000000000000000244241417001151100145070ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type MonadS = sig type +'a t val return : 'a -> 'a t val (>>=) : 'a t -> ('a -> 'b t) -> 'b t end module type S = Map.S module type ExtS = sig include CSig.MapS module Set : CSig.SetS with type elt = key val get : key -> 'a t -> 'a val set : key -> 'a -> 'a t -> 'a t val modify : key -> (key -> 'a -> 'a) -> 'a t -> 'a t val domain : 'a t -> Set.t val bind : (key -> 'a) -> Set.t -> 'a t val fold_left : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b val fold_right : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b val height : 'a t -> int val filter_range : (key -> int) -> 'a t -> 'a t val update: key -> ('a option -> 'a option) -> 'a t -> 'a t module Smart : sig val map : ('a -> 'a) -> 'a t -> 'a t val mapi : (key -> 'a -> 'a) -> 'a t -> 'a t end module Unsafe : sig val map : (key -> 'a -> key * 'b) -> 'a t -> 'b t end module Monad(M : MonadS) : sig val fold : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t val fold_left : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t val fold_right : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t end end module MapExt (M : Map.OrderedType) : sig type 'a map = 'a Map.Make(M).t val set : M.t -> 'a -> 'a map -> 'a map val get : M.t -> 'a map -> 'a val modify : M.t -> (M.t -> 'a -> 'a) -> 'a map -> 'a map val domain : 'a map -> Set.Make(M).t val bind : (M.t -> 'a) -> Set.Make(M).t -> 'a map val fold_left : (M.t -> 'a -> 'b -> 'b) -> 'a map -> 'b -> 'b val fold_right : (M.t -> 'a -> 'b -> 'b) -> 'a map -> 'b -> 'b val height : 'a map -> int val filter_range : (M.t -> int) -> 'a map -> 'a map val update: M.t -> ('a option -> 'a option) -> 'a map -> 'a map module Smart : sig val map : ('a -> 'a) -> 'a map -> 'a map val mapi : (M.t -> 'a -> 'a) -> 'a map -> 'a map end module Unsafe : sig val map : (M.t -> 'a -> M.t * 'b) -> 'a map -> 'b map end module Monad(MS : MonadS) : sig val fold : (M.t -> 'a -> 'b -> 'b MS.t) -> 'a map -> 'b -> 'b MS.t val fold_left : (M.t -> 'a -> 'b -> 'b MS.t) -> 'a map -> 'b -> 'b MS.t val fold_right : (M.t -> 'a -> 'b -> 'b MS.t) -> 'a map -> 'b -> 'b MS.t end end = struct (** This unsafe module is a way to access to the actual implementations of OCaml sets and maps without reimplementing them ourselves. It is quite dubious that these implementations will ever be changed... Nonetheless, if this happens, we can still implement a less clever version of [domain]. *) module F = Map.Make(M) type 'a map = 'a F.t module S = Set.Make(M) type set = S.t type 'a _map = | MEmpty | MNode of {l:'a map; v:F.key; d:'a; r:'a map; h:int} type _set = | SEmpty | SNode of set * M.t * set * int let map_prj : 'a map -> 'a _map = Obj.magic let map_inj : 'a _map -> 'a map = Obj.magic let set_prj : set -> _set = Obj.magic let set_inj : _set -> set = Obj.magic let rec set k v (s : 'a map) : 'a map = match map_prj s with | MEmpty -> raise Not_found | MNode {l; v=k'; d=v'; r; h} -> let c = M.compare k k' in if c < 0 then let l' = set k v l in if l == l' then s else map_inj (MNode {l=l'; v=k'; d=v'; r; h}) else if c = 0 then if v' == v then s else map_inj (MNode {l; v=k'; d=v; r; h}) else let r' = set k v r in if r == r' then s else map_inj (MNode {l; v=k'; d=v'; r=r'; h}) let rec get k (s:'a map) : 'a = match map_prj s with | MEmpty -> assert false | MNode {l; v=k'; d=v; r; h} -> let c = M.compare k k' in if c < 0 then get k l else if c = 0 then v else get k r let rec modify k f (s : 'a map) : 'a map = match map_prj s with | MEmpty -> raise Not_found | MNode {l; v; d; r; h} -> let c = M.compare k v in if c < 0 then let l' = modify k f l in if l == l' then s else map_inj (MNode {l=l'; v; d; r; h}) else if c = 0 then let d' = f v d in if d' == d then s else map_inj (MNode {l; v; d=d'; r; h}) else let r' = modify k f r in if r == r' then s else map_inj (MNode {l; v; d; r=r'; h}) let rec domain (s : 'a map) : set = match map_prj s with | MEmpty -> set_inj SEmpty | MNode {l; v; r; h; _} -> set_inj (SNode (domain l, v, domain r, h)) (** This function is essentially identity, but OCaml current stdlib does not take advantage of the similarity of the two structures, so we introduce this unsafe loophole. *) let rec bind f (s : set) : 'a map = match set_prj s with | SEmpty -> map_inj MEmpty | SNode (l, k, r, h) -> map_inj (MNode { l=bind f l; v=k; d=f k; r=bind f r; h}) (** Dual operation of [domain]. *) let rec fold_left f (s : 'a map) accu = match map_prj s with | MEmpty -> accu | MNode {l; v=k; d=v; r; h} -> let accu = f k v (fold_left f l accu) in fold_left f r accu let rec fold_right f (s : 'a map) accu = match map_prj s with | MEmpty -> accu | MNode {l; v=k; d=v; r; h} -> let accu = f k v (fold_right f r accu) in fold_right f l accu let height s = match map_prj s with | MEmpty -> 0 | MNode {h;_} -> h (* Filter based on a range *) let filter_range in_range m = let rec aux m = function | MEmpty -> m | MNode {l; v; d; r; _} -> let vr = in_range v in (* the range is below the current value *) if vr < 0 then aux m (map_prj l) (* the range is above the current value *) else if vr > 0 then aux m (map_prj r) (* The current value is in the range *) else let m = aux m (map_prj l) in let m = aux m (map_prj r) in F.add v d m in aux F.empty (map_prj m) (* Imported from OCaml upstream until we can bump the version *) let create l x d r = let hl = height l and hr = height r in map_inj @@ MNode{l; v=x; d; r; h=(if hl >= hr then hl + 1 else hr + 1)} let bal l x d r = let hl = match map_prj l with MEmpty -> 0 | MNode {h} -> h in let hr = match map_prj r with MEmpty -> 0 | MNode {h} -> h in if hl > hr + 2 then begin match map_prj l with | MEmpty -> invalid_arg "Map.bal" | MNode{l=ll; v=lv; d=ld; r=lr} -> if height ll >= height lr then create ll lv ld (create lr x d r) else begin match map_prj lr with | MEmpty -> invalid_arg "Map.bal" | MNode{l=lrl; v=lrv; d=lrd; r=lrr}-> create (create ll lv ld lrl) lrv lrd (create lrr x d r) end end else if hr > hl + 2 then begin match map_prj r with | MEmpty -> invalid_arg "Map.bal" | MNode{l=rl; v=rv; d=rd; r=rr} -> if height rr >= height rl then create (create l x d rl) rv rd rr else begin match map_prj rl with | MEmpty -> invalid_arg "Map.bal" | MNode{l=rll; v=rlv; d=rld; r=rlr} -> create (create l x d rll) rlv rld (create rlr rv rd rr) end end else map_inj @@ MNode{l; v=x; d; r; h=(if hl >= hr then hl + 1 else hr + 1)} let rec remove_min_binding m = match map_prj m with | MEmpty -> invalid_arg "Map.remove_min_elt" | MNode {l;v;d;r;_} -> match map_prj l with | MEmpty -> r | _ -> bal (remove_min_binding l) v d r let merge t1 t2 = match (map_prj t1, map_prj t2) with (MEmpty, t) -> map_inj t | (t, MEmpty) -> map_inj t | (_, _) -> let (x, d) = F.min_binding t2 in bal t1 x d (remove_min_binding t2) let rec update x f m = match map_prj m with | MEmpty -> begin match f None with | None -> map_inj MEmpty | Some data -> map_inj @@ MNode{l=map_inj MEmpty; v=x; d=data; r=map_inj MEmpty; h=1} end | MNode {l; v; d; r; h} as m -> let c = M.compare x v in if c = 0 then begin match f (Some d) with | None -> merge l r | Some data -> if d == data then map_inj m else map_inj @@ MNode{l; v=x; d=data; r; h} end else if c < 0 then let ll = update x f l in if l == ll then map_inj m else bal ll v d r else let rr = update x f r in if r == rr then map_inj m else bal l v d rr (* End of Imported OCaml *) module Smart = struct let rec map f (s : 'a map) = match map_prj s with | MEmpty -> map_inj MEmpty | MNode {l; v=k; d=v; r; h} -> let l' = map f l in let r' = map f r in let v' = f v in if l == l' && r == r' && v == v' then s else map_inj (MNode {l=l'; v=k; d=v'; r=r'; h}) let rec mapi f (s : 'a map) = match map_prj s with | MEmpty -> map_inj MEmpty | MNode {l; v=k; d=v; r; h} -> let l' = mapi f l in let r' = mapi f r in let v' = f k v in if l == l' && r == r' && v == v' then s else map_inj (MNode {l=l'; v=k; d=v'; r=r'; h}) end module Unsafe = struct let rec map f (s : 'a map) : 'b map = match map_prj s with | MEmpty -> map_inj MEmpty | MNode {l; v=k; d=v; r; h} -> let (k, v) = f k v in map_inj (MNode {l=map f l; v=k; d=v; r=map f r; h}) end module Monad(M : MonadS) = struct open M let rec fold_left f s accu = match map_prj s with | MEmpty -> return accu | MNode {l; v=k; d=v; r; h} -> fold_left f l accu >>= fun accu -> f k v accu >>= fun accu -> fold_left f r accu let rec fold_right f s accu = match map_prj s with | MEmpty -> return accu | MNode {l; v=k; d=v; r; h} -> fold_right f r accu >>= fun accu -> f k v accu >>= fun accu -> fold_right f l accu let fold = fold_left end end module Make(M : Map.OrderedType) = struct include Map.Make(M) include MapExt(M) end coq-8.15.0/clib/cMap.mli000066400000000000000000000077221417001151100146620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type MonadS = sig type +'a t val return : 'a -> 'a t val (>>=) : 'a t -> ('a -> 'b t) -> 'b t end module type S = Map.S module type ExtS = sig include CSig.MapS (** The underlying Map library *) module Set : CSig.SetS with type elt = key (** Sets used by the domain function *) val get : key -> 'a t -> 'a (** Same as {!find} but fails an assertion instead of raising [Not_found] *) val set : key -> 'a -> 'a t -> 'a t (** Same as [add], but expects the key to be present, and thus faster. @raise Not_found when the key is unbound in the map. *) val modify : key -> (key -> 'a -> 'a) -> 'a t -> 'a t (** Apply the given function to the binding of the given key. @raise Not_found when the key is unbound in the map. *) val domain : 'a t -> Set.t (** Recover the set of keys defined in the map. *) val bind : (key -> 'a) -> Set.t -> 'a t (** [bind f s] transform the set [x1; ...; xn] into [x1 := f x1; ...; xn := f xn]. *) val fold_left : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b (** Alias for {!fold}, to easily track where we depend on fold order. *) val fold_right : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b (** Folding keys in decreasing order. *) val height : 'a t -> int (** An indication of the logarithmic size of a map *) val filter_range : (key -> int) -> 'a t -> 'a t (** [find_range in_range m] Given a comparison function [in_range x], that tests if [x] is below, above, or inside a given range [filter_range] returns the submap of [m] whose keys are in range. Note that [in_range] has to define a continouous range. *) val update: key -> ('a option -> 'a option) -> 'a t -> 'a t (** [update x f m] returns a map containing the same bindings as [m], except for the binding of [x]. Depending on the value of [y] where [y] is [f (find_opt x m)], the binding of [x] is added, removed or updated. If [y] is [None], the binding is removed if it exists; otherwise, if [y] is [Some z] then [x] is associated to [z] in the resulting map. If [x] was already bound in [m] to a value that is physically equal to [z], [m] is returned unchanged (the result of the function is then physically equal to [m]). *) module Smart : sig val map : ('a -> 'a) -> 'a t -> 'a t (** As [map] but tries to preserve sharing. *) val mapi : (key -> 'a -> 'a) -> 'a t -> 'a t (** As [mapi] but tries to preserve sharing. *) end module Unsafe : sig val map : (key -> 'a -> key * 'b) -> 'a t -> 'b t (** As the usual [map], but also allows modifying the key of a binding. It is required that the mapping function [f] preserves key equality, i.e.: for all (k : key) (x : 'a), compare (fst (f k x)) k = 0. *) end module Monad(M : MonadS) : sig val fold : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t val fold_left : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t val fold_right : (key -> 'a -> 'b -> 'b M.t) -> 'a t -> 'b -> 'b M.t end (** Fold operators parameterized by any monad. *) end module Make(M : Map.OrderedType) : ExtS with type key = M.t and type 'a t = 'a Map.Make(M).t and module Set := Set.Make(M) coq-8.15.0/clib/cObj.ml000066400000000000000000000141731417001151100145040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* = Obj.no_scan_tag then if Obj.tag t = Obj.string_tag then (c := !c + Obj.size t; b := !b + 1; m := max d !m) else if Obj.tag t = Obj.double_tag then (s := !s + 2; b := !b + 1; m := max d !m) else if Obj.tag t = Obj.double_array_tag then (s := !s + 2 * Obj.size t; b := !b + 1; m := max d !m) else (b := !b + 1; m := max d !m) else let n = Obj.size t in s := !s + n; b := !b + 1; block_stats (d + 1) (n - 1) t and block_stats d i t = if i >= 0 then (obj_stats d (Obj.field t i); block_stats d (i-1) t) let obj_stats a = c := 0; s:= 0; b:= 0; m:= 0; obj_stats 0 (Obj.repr a); (!c, !s + !b, !m) (** {6 Physical sizes} *) (*s Pointers already visited are stored in a hash-table, where comparisons are done using physical equality. *) module H = Hashtbl.Make( struct type t = Obj.t let equal = (==) let hash = Hashtbl.hash end) let node_table = (H.create 257 : unit H.t) let in_table o = try H.find node_table o; true with Not_found -> false let add_in_table o = H.add node_table o () let reset_table () = H.clear node_table (*s Objects are traversed recursively, as soon as their tags are less than [no_scan_tag]. [count] records the numbers of words already visited. *) let size_of_double = Obj.size (Obj.repr 1.0) let count = ref 0 let rec traverse t = if not (in_table t) && Obj.is_block t then begin add_in_table t; let n = Obj.size t in let tag = Obj.tag t in if tag < Obj.no_scan_tag then begin count := !count + 1 + n; for i = 0 to n - 1 do traverse (Obj.field t i) done end else if tag = Obj.string_tag then count := !count + 1 + n else if tag = Obj.double_tag then count := !count + size_of_double else if tag = Obj.double_array_tag then count := !count + 1 + size_of_double * n else incr count end (*s Sizes of objects in words and in bytes. The size in bytes is computed system-independently according to [Sys.word_size]. *) let size o = reset_table (); count := 0; traverse (Obj.repr o); !count let size_b o = (size o) * (Sys.word_size / 8) let size_kb o = (size o) / (8192 / Sys.word_size) (** {6 Physical sizes with sharing} *) (** This time, all the size of objects are computed with respect to a larger object containing them all, and we only count the new blocks not already seen earlier in the left-to-right visit of the englobing object. The very same object could have a zero size or not, depending of the occurrence we're considering in the englobing object. For speaking of occurrences, we use an [int list] for a path of field indexes from the outmost block to the one we're looking. In the list, the leftmost integer is the field index in the deepest block. *) (** We now store in the hashtable the size (with sharing), and also the position of the first occurrence of the object *) let node_sizes = (H.create 257 : (int*int list) H.t) let get_size o = H.find node_sizes o let add_size o n pos = H.replace node_sizes o (n,pos) let reset_sizes () = H.clear node_sizes let global_object = ref (Obj.repr 0) (** [sum n f] is [f 0 + f 1 + ... + f (n-1)], evaluated from left to right *) let sum n f = let rec loop k acc = if k >= n then acc else loop (k+1) (acc + f k) in loop 0 0 (** Recursive visit of the main object, filling the hashtable *) let rec compute_size o pos = if not (Obj.is_block o) then 0 else try let _ = get_size o in 0 (* already seen *) with Not_found -> let n = Obj.size o in add_size o (-1) pos (* temp size, for cyclic values *); let tag = Obj.tag o in let size = if tag < Obj.no_scan_tag then 1 + n + sum n (fun i -> compute_size (Obj.field o i) (i::pos)) else if tag = Obj.string_tag then 1 + n else if tag = Obj.double_tag then size_of_double else if tag = Obj.double_array_tag then size_of_double * n else 1 in add_size o size pos; size (** Provides the global object in which we'll search shared sizes *) let register_shared_size t = let o = Obj.repr t in reset_sizes (); global_object := o; ignore (compute_size o []) (** Shared size of an object with respect to the global object given by the last [register_shared_size] *) let shared_size pos o = if not (Obj.is_block o) then 0 else let size,pos' = try get_size o with Not_found -> failwith "shared_size: unregistered structure ?" in match pos with | Some p when p <> pos' -> 0 | _ -> size let shared_size_of_obj t = shared_size None (Obj.repr t) (** Shared size of the object at some positiion in the global object given by the last [register_shared_size] *) let shared_size_of_pos pos = let rec obj_of_pos o = function | [] -> o | n::pos' -> let o' = obj_of_pos o pos' in assert (Obj.is_block o' && n < Obj.size o'); Obj.field o' n in shared_size (Some pos) (obj_of_pos !global_object pos) (*s Total size of the allocated ocaml heap. *) let heap_size () = let stat = Gc.stat () and control = Gc.get () in let max_words_total = stat.Gc.heap_words + control.Gc.minor_heap_size in (max_words_total * (Sys.word_size / 8)) let heap_size_kb () = (heap_size () + 1023) / 1024 coq-8.15.0/clib/cObj.mli000066400000000000000000000044351417001151100146550ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int (** Physical size of an object in words. *) val size_b : 'a -> int (** Same as [size] in bytes. *) val size_kb : 'a -> int (** Same as [size] in kilobytes. *) (** {6 Physical size of an ocaml value with sharing.} *) (** This time, all the size of objects are computed with respect to a larger object containing them all, and we only count the new blocks not already seen earlier in the left-to-right visit of the englobing object. *) (** Provides the global object in which we'll search shared sizes *) val register_shared_size : 'a -> unit (** Shared size (in word) of an object with respect to the global object given by the last [register_shared_size]. *) val shared_size_of_obj : 'a -> int (** Same, with an object indicated by its occurrence in the global object. The very same object could have a zero size or not, depending of the occurrence we're considering in the englobing object. For speaking of occurrences, we use an [int list] for a path of field indexes (leftmost = deepest block, rightmost = top block of the global object). *) val shared_size_of_pos : int list -> int (** {6 Logical size of an OCaml value.} *) val obj_stats : 'a -> int * int * int (** Return the (logical) value size, the string size, and the maximum depth of the object. This loops on cyclic structures. *) (** {6 Total size of the allocated ocaml heap. } *) val heap_size : unit -> int (** Heap size, in words. *) val heap_size_kb : unit -> int (** Heap size, in kilobytes. *) coq-8.15.0/clib/cSet.ml000066400000000000000000000036041417001151100145220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type S = Set.S module Make(M : OrderedType)= Set.Make(M) module type HashedType = sig type t val hash : t -> int end module Hashcons(M : OrderedType)(H : HashedType with type t = M.t) = struct module Set = Make(M) type set = Set.t type _set = | SEmpty | SNode of set * M.t * set * int let set_prj : set -> _set = Obj.magic let set_inj : _set -> set = Obj.magic let rec spine s accu = match set_prj s with | SEmpty -> accu | SNode (l, v, r, _) -> spine l ((v, r) :: accu) let rec umap f s = match set_prj s with | SEmpty -> set_inj SEmpty | SNode (l, v, r, h) -> let l' = umap f l in let r' = umap f r in let v' = f v in set_inj (SNode (l', v', r', h)) let rec eqeq s1 s2 = match s1, s2 with | [], [] -> true | (v1, r1) :: s1, (v2, r2) :: s2 -> v1 == v2 && eqeq (spine r1 s1) (spine r2 s2) | _ -> false module Hashed = struct open Hashset.Combine type t = set type u = M.t -> M.t let eq s1 s2 = s1 == s2 || eqeq (spine s1 []) (spine s2 []) let hash s = Set.fold (fun v accu -> combine (H.hash v) accu) s 0 let hashcons = umap end include Hashcons.Make(Hashed) end coq-8.15.0/clib/cSet.mli000066400000000000000000000022351417001151100146720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type S = Set.S module Make(M : OrderedType) : S with type elt = M.t and type t = Set.Make(M).t module type HashedType = sig type t val hash : t -> int end module Hashcons (M : OrderedType) (H : HashedType with type t = M.t) : Hashcons.S with type t = Set.Make(M).t and type u = M.t -> M.t (** Create hash-consing for sets. The hashing function provided must be compatible with the comparison function. *) coq-8.15.0/clib/cSig.mli000066400000000000000000000064271417001151100146700ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool val mem: elt -> t -> bool val add: elt -> t -> t val singleton: elt -> t val remove: elt -> t -> t val union: t -> t -> t val inter: t -> t -> t val diff: t -> t -> t val compare: t -> t -> int val equal: t -> t -> bool val subset: t -> t -> bool val iter: (elt -> unit) -> t -> unit val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a val for_all: (elt -> bool) -> t -> bool val exists: (elt -> bool) -> t -> bool val filter: (elt -> bool) -> t -> t val partition: (elt -> bool) -> t -> t * t val cardinal: t -> int val elements: t -> elt list val min_elt: t -> elt val max_elt: t -> elt val choose: t -> elt val split: elt -> t -> t * bool * t end (** Redeclaration of OCaml set signature, to preserve compatibility. See OCaml documentation for more information. *) module type MapS = sig type key type (+'a) t val empty: 'a t val is_empty: 'a t -> bool val mem: key -> 'a t -> bool val add: key -> 'a -> 'a t -> 'a t (* when Coq requires OCaml 4.06 or later, can add: val update : key -> ('a option -> 'a option) -> 'a t -> 'a t allowing Coq to use OCaml's "update" *) val singleton: key -> 'a -> 'a t val remove: key -> 'a t -> 'a t val merge: (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t val union: (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool val iter: (key -> 'a -> unit) -> 'a t -> unit val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b val for_all: (key -> 'a -> bool) -> 'a t -> bool val exists: (key -> 'a -> bool) -> 'a t -> bool val filter: (key -> 'a -> bool) -> 'a t -> 'a t val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t val cardinal: 'a t -> int val bindings: 'a t -> (key * 'a) list val min_binding: 'a t -> (key * 'a) val max_binding: 'a t -> (key * 'a) val choose: 'a t -> (key * 'a) val choose_opt: 'a t -> (key * 'a) option val split: key -> 'a t -> 'a t * 'a option * 'a t val find: key -> 'a t -> 'a val find_opt : key -> 'a t -> 'a option val map: ('a -> 'b) -> 'a t -> 'b t val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t end coq-8.15.0/clib/cString.ml000066400000000000000000000104721417001151100152360ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int val is_empty : string -> bool val explode : string -> string list val implode : string list -> string val drop_simple_quotes : string -> string val string_index_from : string -> int -> string -> int val string_contains : where:string -> what:string -> bool val plural : int -> string -> string val conjugate_verb_to_be : int -> string val ordinal : int -> string val is_sub : string -> string -> int -> bool val is_prefix : string -> string -> bool val is_suffix : string -> string -> bool module Set : Set.S with type elt = t module Map : CMap.ExtS with type key = t and module Set := Set module Pred : Predicate.S with type elt = t module List : CList.MonoS with type elt = t val hcons : string -> string end include String let rec hash len s i accu = if i = len then accu else let c = Char.code (String.unsafe_get s i) in hash len s (succ i) (accu * 19 + c) let hash s = let len = String.length s in hash len s 0 0 let explode s = let rec explode_rec n = if n >= String.length s then [] else String.make 1 (String.get s n) :: explode_rec (succ n) in explode_rec 0 let implode sl = String.concat "" sl let is_empty s = String.length s = 0 let drop_simple_quotes s = let n = String.length s in if n > 2 && s.[0] = '\'' && s.[n-1] = '\'' then String.sub s 1 (n-2) else s (* substring searching... *) (* gdzie = where, co = what *) (* gdzie=gdzie(string) gl=gdzie(length) gi=gdzie(index) *) let rec raw_is_sub gdzie gl gi co cl ci = (ci>=cl) || ((String.unsafe_get gdzie gi = String.unsafe_get co ci) && (raw_is_sub gdzie gl (gi+1) co cl (ci+1))) let rec raw_str_index i gdzie l c co cl = (* First adapt to ocaml 3.11 new semantics of index_from *) if (i+cl > l) then raise Not_found; (* Then proceed as in ocaml < 3.11 *) let i' = String.index_from gdzie i c in if (i'+cl <= l) && (raw_is_sub gdzie l i' co cl 0) then i' else raw_str_index (i'+1) gdzie l c co cl let string_index_from gdzie i co = if co="" then i else raw_str_index i gdzie (String.length gdzie) (String.unsafe_get co 0) co (String.length co) let string_contains ~where ~what = try let _ = string_index_from where 0 what in true with Not_found -> false let is_sub p s off = let lp = String.length p in let ls = String.length s in if ls < off + lp then false else let rec aux i = if lp <= i then true else let cp = String.unsafe_get p i in let cs = String.unsafe_get s (off + i) in if cp = cs then aux (succ i) else false in aux 0 let is_prefix p s = is_sub p s 0 let is_suffix p s = is_sub p s (String.length s - String.length p) let plural n s = if n<>1 then s^"s" else s let conjugate_verb_to_be n = if n<>1 then "are" else "is" let ordinal n = let s = if (n / 10) mod 10 = 1 then "th" else match n mod 10 with | 1 -> "st" | 2 -> "nd" | 3 -> "rd" | _ -> "th" in string_of_int n ^ s (* string parsing *) module Self = struct type t = string let compare = compare end module Set = Set.Make(Self) module Map = CMap.Make(Self) module Pred = Predicate.Make(Self) module List = struct type elt = string let mem id l = List.exists (fun s -> equal id s) l let assoc id l = CList.assoc_f equal id l let remove_assoc id l = CList.remove_assoc_f equal id l let mem_assoc id l = List.exists (fun (a,_) -> equal id a) l let mem_assoc_sym id l = List.exists (fun (_,b) -> equal id b) l let equal l l' = CList.equal equal l l' end let hcons = Hashcons.simple_hcons Hashcons.Hstring.generate Hashcons.Hstring.hcons () coq-8.15.0/clib/cString.mli000066400000000000000000000052371417001151100154120ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int (** Hashing on strings. Should be compatible with generic one. *) val is_empty : string -> bool (** Test whether a string is empty. *) val explode : string -> string list (** [explode "x1...xn"] returns [["x1"; ...; "xn"]] *) val implode : string list -> string (** [implode [s1; ...; sn]] returns [s1 ^ ... ^ sn] *) val drop_simple_quotes : string -> string (** Remove the eventual first surrounding simple quotes of a string. *) val string_index_from : string -> int -> string -> int (** As [index_from], but takes a string instead of a char as pattern argument *) val string_contains : where:string -> what:string -> bool (** As [contains], but takes a string instead of a char as pattern argument *) val plural : int -> string -> string (** [plural n s] adds a optional 's' to the [s] when [2 <= n]. *) val conjugate_verb_to_be : int -> string (** [conjugate_verb_to_be] returns "is" when [n=1] and "are" otherwise *) val ordinal : int -> string (** Generate the ordinal number in English. *) val is_sub : string -> string -> int -> bool (** [is_sub p s off] tests whether [s] contains [p] at offset [off]. *) val is_prefix : string -> string -> bool (** [is_prefix p s] tests whether [p] is a prefix of [s]. *) val is_suffix : string -> string -> bool (** [is_suffix suf s] tests whether [suf] is a suffix of [s]. *) (** {6 Generic operations} **) module Set : Set.S with type elt = t (** Finite sets on [string] *) module Map : CMap.ExtS with type key = t and module Set := Set (** Finite maps on [string] *) module Pred : Predicate.S with type elt = t module List : CList.MonoS with type elt = t (** Association lists with [string] as keys *) val hcons : string -> string (** Hashconsing on [string] *) end include ExtS coq-8.15.0/clib/cThread.ml000066400000000000000000000106661417001151100152040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* loop () in loop () let thread_friendly_read ic s ~off ~len = try let fd = Unix.descr_of_in_channel ic in thread_friendly_read_fd fd s ~off ~len with Unix.Unix_error _ -> 0 let really_read_fd fd s off len = let i = ref 0 in while !i < len do let off = off + !i in let len = len - !i in let r = thread_friendly_read_fd fd s ~off ~len in if r = 0 then raise End_of_file; i := !i + r done let really_read_fd_2_oc fd oc len = let i = ref 0 in let size = 4096 in let s = Bytes.create size in while !i < len do let len = len - !i in let r = thread_friendly_read_fd fd s ~off:0 ~len:(min len size) in if r = 0 then raise End_of_file; i := !i + r; output oc s 0 r; done let thread_friendly_really_read ic s ~off ~len = try let fd = Unix.descr_of_in_channel ic in really_read_fd fd s off len with Unix.Unix_error _ -> raise End_of_file let thread_friendly_really_read_line ic = try let fd = Unix.descr_of_in_channel ic in let b = Buffer.create 1024 in let s = Bytes.make 1 '\000' in let endl = Bytes.of_string "\n" in (* Bytes.equal is in 4.03.0 *) while Bytes.compare s endl <> 0 do let n = thread_friendly_read_fd fd s ~off:0 ~len:1 in if n = 0 then raise End_of_file; if Bytes.compare s endl <> 0 then Buffer.add_bytes b s; done; Buffer.contents b with Unix.Unix_error _ -> raise End_of_file let thread_friendly_input_value ic = try let fd = Unix.descr_of_in_channel ic in let header = Bytes.create Marshal.header_size in really_read_fd fd header 0 Marshal.header_size; let body_size = Marshal.data_size header 0 in let desired_size = body_size + Marshal.header_size in if desired_size <= Sys.max_string_length then begin let msg = Bytes.create desired_size in Bytes.blit header 0 msg 0 Marshal.header_size; really_read_fd fd msg Marshal.header_size body_size; Marshal.from_bytes msg 0 end else begin (* Workaround for 32 bit systems and data > 16M *) let name, oc = Filename.open_temp_file ~mode:[Open_binary] "coq" "marshal" in try output oc header 0 Marshal.header_size; really_read_fd_2_oc fd oc body_size; close_out oc; let ic = open_in_bin name in let data = Marshal.from_channel ic in close_in ic; Sys.remove name; data with e -> Sys.remove name; raise e end with Unix.Unix_error _ | Sys_error _ -> raise End_of_file (* On the ocaml runtime used in some opam-for-windows version the * [Thread.sigmask] API raises Invalid_argument "not implemented", * hence we protect the call and turn the exception into a no-op *) let mask_sigalrm f x = begin try ignore(Thread.sigmask Unix.SIG_BLOCK [Sys.sigalrm]) with Invalid_argument _ -> () end; f x let create f x = Thread.create (mask_sigalrm f) x (* Atomic mutex lock taken from https://gitlab.com/gadmm/memprof-limits/-/blob/master/src/thread_map.ml#L23-34 Critical sections : - Mutex.lock does not poll on leaving the blocking section since 4.12. - Never inline, to avoid theoretically-possible reorderings with flambda. (workaround to the lack of masking) *) (* We inline the call to Mutex.unlock to avoid polling in bytecode mode *) external unlock: Mutex.t -> unit = "caml_mutex_unlock" let[@inline never] with_lock m ~scope = let () = Mutex.lock m (* BEGIN ATOMIC *) in match (* END ATOMIC *) scope () with | (* BEGIN ATOMIC *) x -> unlock m ; (* END ATOMIC *) x | (* BEGIN ATOMIC *) exception e -> unlock m ; (* END ATOMIC *) raise e coq-8.15.0/clib/cThread.mli000066400000000000000000000033721417001151100153510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* thread_ic val thread_friendly_input_value : thread_ic -> 'a val thread_friendly_read : thread_ic -> Bytes.t -> off:int -> len:int -> int val thread_friendly_really_read : thread_ic -> Bytes.t -> off:int -> len:int -> unit val thread_friendly_really_read_line : thread_ic -> string (* Wrapper around Thread.create that blocks signals such as Sys.sigalrm (used * for Timeout *) val create : ('a -> 'b) -> 'a -> Thread.t (* Atomic mutex lock taken from https://gitlab.com/gadmm/memprof-limits/-/blob/master/src/thread_map.ml#L23-34 *) val with_lock : Mutex.t -> scope:(unit -> 'a) -> 'a coq-8.15.0/clib/cUnix.ml000066400000000000000000000132111417001151100147050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (* We give up to find a canonical name and just simplify it... *) current ^ dirsep ^ strip_path p let make_suffix name suffix = if Filename.check_suffix name suffix then name else (name ^ suffix) let correct_path f dir = if Filename.is_relative f then Filename.concat dir f else f let file_readable_p name = try Unix.access name [Unix.R_OK];true with Unix.Unix_error (_, _, _) -> false (* As for [Unix.close_process], a [Unix.waipid] that ignores all [EINTR] *) let rec waitpid_non_intr pid = try snd (Unix.waitpid [] pid) with Unix.Unix_error (Unix.EINTR, _, _) -> waitpid_non_intr pid (** [run_command com] launches command [com] (via /bin/sh), and returns the contents of stdout and stderr. If given, [~hook] is called on each elements read on stdout or stderr. *) let run_command ?(hook=(fun _ ->())) c = let result = Buffer.create 127 in let cin,cout,cerr = Unix.open_process_full c (Unix.environment ()) in let buff = Bytes.make 127 ' ' in let buffe = Bytes.make 127 ' ' in let n = ref 0 in let ne = ref 0 in while n:= input cin buff 0 127 ; ne := input cerr buffe 0 127 ; !n+ !ne <> 0 do let r = Bytes.sub buff 0 !n in (hook r; Buffer.add_bytes result r); let r = Bytes.sub buffe 0 !ne in (hook r; Buffer.add_bytes result r); done; (Unix.close_process_full (cin,cout,cerr), Buffer.contents result) (** [sys_command] launches program [prog] with arguments [args]. It behaves like [Sys.command], except that we rely on [Unix.create_process], it's hardly more complex and avoids dealing with shells. In particular, no need to quote arguments (against whitespace or other funny chars in paths), hence no need to care about the different quoting conventions of /bin/sh and cmd.exe. *) let sys_command prog args = let argv = Array.of_list (prog::args) in let pid = Unix.create_process prog argv Unix.stdin Unix.stdout Unix.stderr in waitpid_non_intr pid (* checks if two file names refer to the same (existing) file by comparing their device and inode. It seems that under Windows, inode is always 0, so we cannot accurately check if *) (* Optimised for partial application (in case many candidates must be compared to f1). *) let same_file f1 = try let s1 = Unix.stat f1 in (fun f2 -> try let s2 = Unix.stat f2 in s1.Unix.st_dev = s2.Unix.st_dev && if Sys.os_type = "Win32" then f1 = f2 else s1.Unix.st_ino = s2.Unix.st_ino with Unix.Unix_error _ -> false) with Unix.Unix_error _ -> (fun _ -> false) (* Copied from ocaml filename.ml *) let prng = lazy(Random.State.make_self_init ()) let temp_file_name temp_dir prefix suffix = let rnd = (Random.State.bits (Lazy.force prng)) land 0xFFFFFF in Filename.concat temp_dir (Printf.sprintf "%s%06x%s" prefix rnd suffix) let mktemp_dir ?(temp_dir=Filename.get_temp_dir_name()) prefix suffix = let rec try_name counter = let name = temp_file_name temp_dir prefix suffix in match Unix.mkdir name 0o700 with | () -> name | exception (Sys_error _ as e) -> if counter >= 1000 then raise e else try_name (counter + 1) in try_name 0 coq-8.15.0/clib/cUnix.mli000066400000000000000000000053601417001151100150640ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* physical_path val string_of_physical_path : physical_path -> string (** Escape what has to be escaped (e.g. surround with quotes if with spaces) *) val escaped_string_of_physical_path : physical_path -> string val canonical_path_name : string -> string (** Remove all initial "./" in a path *) val remove_path_dot : string -> string (** If a path [p] starts with the current directory $PWD then [strip_path p] returns the sub-path relative to $PWD. Any leading "./" are also removed from the result. *) val strip_path : string -> string (** correct_path f dir = dir/f if f is relative *) val correct_path : string -> string -> string val path_to_list : string -> string list (** [make_suffix file suf] catenate [file] with [suf] when [file] does not already end with [suf]. *) val make_suffix : string -> string -> string val file_readable_p : string -> bool (** {6 Executing commands } *) (** [run_command com] launches command [com], and returns the contents of stdout and stderr. If given, [~hook] is called on each elements read on stdout or stderr. *) val run_command : ?hook:(bytes->unit) -> string -> Unix.process_status * string (** [sys_command] launches program [prog] with arguments [args]. It behaves like [Sys.command], except that we rely on [Unix.create_process], it's hardly more complex and avoids dealing with shells. In particular, no need to quote arguments (against whitespace or other funny chars in paths), hence no need to care about the different quoting conventions of /bin/sh and cmd.exe. *) val sys_command : string -> string list -> Unix.process_status (** A version of [Unix.waitpid] immune to EINTR exceptions *) val waitpid_non_intr : int -> Unix.process_status (** Check if two file names refer to the same (existing) file *) val same_file : string -> string -> bool (** Like [Stdlib.Filename.temp_file] but producing a directory. *) val mktemp_dir : ?temp_dir:string -> string -> string -> string coq-8.15.0/clib/diff2.ml000066400000000000000000000110731417001151100146150ustar00rootroot00000000000000(* copied from https://github.com/leque/ocaml-diff.git and renamed from "diff.ml" *) (* * Copyright (C) 2016 OOHASHI Daichi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. *) type 'a common = [ `Common of int * int * 'a ] type 'a edit = [ `Added of int * 'a | `Removed of int * 'a | 'a common ] module type SeqType = sig type t type elem val get : t -> int -> elem val length : t -> int end module type S = sig type t type elem val lcs : ?equal:(elem -> elem -> bool) -> t -> t -> elem common list val diff : ?equal:(elem -> elem -> bool) -> t -> t -> elem edit list val fold_left : ?equal:(elem -> elem -> bool) -> f:('a -> elem edit -> 'a) -> init:'a -> t -> t -> 'a val iter : ?equal:(elem -> elem -> bool) -> f:(elem edit -> unit) -> t -> t -> unit end module Make(M : SeqType) : (S with type t = M.t and type elem = M.elem) = struct type t = M.t type elem = M.elem let lcs ?(equal = (=)) a b = let n = M.length a in let m = M.length b in let mn = m + n in let sz = 2 * mn + 1 in let vd = Array.make sz 0 in let vl = Array.make sz 0 in let vr = Array.make sz [] in let get v i = Array.get v (i + mn) in let set v i x = Array.set v (i + mn) x in let finish () = let rec loop i maxl r = if i > mn then List.rev r else if get vl i > maxl then loop (i + 1) (get vl i) (get vr i) else loop (i + 1) maxl r in loop (- mn) 0 [] in if mn = 0 then [] else (* For d <- 0 to mn Do *) let rec dloop d = assert (d <= mn); (* For k <- -d to d in steps of 2 Do *) let rec kloop k = if k > d then dloop @@ d + 1 else let x, l, r = if k = -d || (k <> d && get vd (k - 1) < get vd (k + 1)) then get vd (k + 1), get vl (k + 1), get vr (k + 1) else get vd (k - 1) + 1, get vl (k - 1), get vr (k - 1) in let x, y, l, r = let rec xyloop x y l r = if x < n && y < m && equal (M.get a x) (M.get b y) then xyloop (x + 1) (y + 1) (l + 1) (`Common(x, y, M.get a x) :: r) else x, y, l, r in xyloop x (x - k) l r in set vd k x; set vl k l; set vr k r; if x >= n && y >= m then (* Stop *) finish () else kloop @@ k + 2 in kloop @@ -d in dloop 0 let fold_left ?(equal = (=)) ~f ~init a b = let ff x y = f y x in let fold_map f g x from to_ init = let rec loop i init = if i >= to_ then init else loop (i + 1) (f (g i @@ M.get x i) init) in loop from init in let added i x = `Added (i, x) in let removed i x = `Removed (i, x) in let rec loop cs apos bpos init = match cs with | [] -> init |> fold_map ff removed a apos (M.length a) |> fold_map ff added b bpos (M.length b) | `Common (aoff, boff, _) as e :: rest -> init |> fold_map ff removed a apos aoff |> fold_map ff added b bpos boff |> ff e |> loop rest (aoff + 1) (boff + 1) in loop (lcs ~equal a b) 0 0 init let diff ?(equal = (=)) a b = fold_left ~equal ~f:(fun xs x -> x::xs) ~init:[] a b let iter ?(equal = (=)) ~f a b = fold_left a b ~equal ~f:(fun () x -> f x) ~init:() end coq-8.15.0/clib/diff2.mli000066400000000000000000000057611417001151100147750ustar00rootroot00000000000000(* copied from https://github.com/leque/ocaml-diff.git and renamed from "diff.mli" *) (** An implementation of Eugene Myers' O(ND) Difference Algorithm\[1\]. This implementation is a port of util.lcs module of {{:http://practical-scheme.net/gauche} Gauche Scheme interpreter}. - \[1\] Eugene Myers, An O(ND) Difference Algorithm and Its Variations, Algorithmica Vol. 1 No. 2, pp. 251-266, 1986. *) type 'a common = [ `Common of int * int * 'a ] (** an element of lcs of seq1 and seq2 *) type 'a edit = [ `Removed of int * 'a | `Added of int * 'a | 'a common ] (** an element of diff of seq1 and seq2. *) module type SeqType = sig type t (** The type of the sequence. *) type elem (** The type of the elements of the sequence. *) val get : t -> int -> elem (** [get t n] returns [n]-th element of the sequence [t]. *) val length : t -> int (** [length t] returns the length of the sequence [t]. *) end (** Input signature of {!Diff.Make}. *) module type S = sig type t (** The type of input sequence. *) type elem (** The type of the elements of result / input sequence. *) val lcs : ?equal:(elem -> elem -> bool) -> t -> t -> elem common list (** [lcs ~equal seq1 seq2] computes the LCS (longest common sequence) of [seq1] and [seq2]. Elements of [seq1] and [seq2] are compared with [equal]. [equal] defaults to [Pervasives.(=)]. Elements of lcs are [`Common (pos1, pos2, e)] where [e] is an element, [pos1] is a position in [seq1], and [pos2] is a position in [seq2]. *) val diff : ?equal:(elem -> elem -> bool) -> t -> t -> elem edit list (** [diff ~equal seq1 seq2] computes the diff of [seq1] and [seq2]. Elements of [seq1] and [seq2] are compared with [equal]. Elements only in [seq1] are represented as [`Removed (pos, e)] where [e] is an element, and [pos] is a position in [seq1]; those only in [seq2] are represented as [`Added (pos, e)] where [e] is an element, and [pos] is a position in [seq2]; those common in [seq1] and [seq2] are represented as [`Common (pos1, pos2, e)] where [e] is an element, [pos1] is a position in [seq1], and [pos2] is a position in [seq2]. *) val fold_left : ?equal:(elem -> elem -> bool) -> f:('a -> elem edit -> 'a) -> init:'a -> t -> t -> 'a (** [fold_left ~equal ~f ~init seq1 seq2] is same as [diff ~equal seq1 seq2 |> ListLabels.fold_left ~f ~init], but does not create an intermediate list. *) val iter : ?equal:(elem -> elem -> bool) -> f:(elem edit -> unit) -> t -> t -> unit (** [iter ~equal ~f seq1 seq2] is same as [diff ~equal seq1 seq2 |> ListLabels.iter ~f], but does not create an intermediate list. *) end (** Output signature of {!Diff.Make}. *) module Make : functor (M : SeqType) -> (S with type t = M.t and type elem = M.elem) (** Functor building an implementation of the diff structure given a sequence type. *) coq-8.15.0/clib/dune000066400000000000000000000003001417001151100141360ustar00rootroot00000000000000(library (name clib) (synopsis "Coq's Utility Library [general purpose]") (public_name coq-core.clib) (wrapped false) (modules_without_implementation cSig) (libraries str unix threads)) coq-8.15.0/clib/dyn.ml000066400000000000000000000116131417001151100144150ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a value -> t -> t val remove : 'a key -> t -> t val find : 'a key -> t -> 'a value val mem : 'a key -> t -> bool type map = { map : 'a. 'a key -> 'a value -> 'a value } val map : map -> t -> t type any = Any : 'a key * 'a value -> any val iter : (any -> unit) -> t -> unit val fold : (any -> 'r -> 'r) -> t -> 'r -> 'r end module type PreS = sig type 'a tag type t = Dyn : 'a tag * 'a -> t val create : string -> 'a tag val anonymous : int -> 'a tag val eq : 'a tag -> 'b tag -> ('a, 'b) CSig.eq option val repr : 'a tag -> string val dump : unit -> (int * string) list type any = Any : 'a tag -> any val name : string -> any option module Map(Value : ValueS) : MapS with type 'a key = 'a tag and type 'a value = 'a Value.t module HMap (V1 : ValueS)(V2 : ValueS) : sig type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t } val map : map -> Map(V1).t -> Map(V2).t end end module type S = sig include PreS module Easy : sig val make_dyn_tag : string -> ('a -> t) * (t -> 'a) * 'a tag val make_dyn : string -> ('a -> t) * (t -> 'a) val inj : 'a -> 'a tag -> t val prj : t -> 'a tag -> 'a option end end module Make () = struct module Self : PreS = struct (* Dynamics, programmed with DANGER !!! *) type 'a tag = int type t = Dyn : 'a tag * 'a -> t type any = Any : 'a tag -> any let dyntab = ref (Int.Map.empty : string Int.Map.t) (** Instead of working with tags as strings, which are costly, we use their hash. We ensure unicity of the hash in the [create] function. If ever a collision occurs, which is unlikely, it is sufficient to tweak the offending dynamic tag. *) let create (s : string) = let hash = Hashtbl.hash s in if Int.Map.mem hash !dyntab then begin let old = Int.Map.find hash !dyntab in Printf.eprintf "Dynamic tag collision: %s vs. %s\n%!" s old; assert false end; dyntab := Int.Map.add hash s !dyntab; hash let anonymous n = if Int.Map.mem n !dyntab then begin Printf.eprintf "Dynamic tag collision: %d\n%!" n; assert false end; dyntab := Int.Map.add n "" !dyntab; n let eq : 'a 'b. 'a tag -> 'b tag -> ('a, 'b) CSig.eq option = fun h1 h2 -> if Int.equal h1 h2 then Some (Obj.magic CSig.Refl) else None let repr s = try Int.Map.find s !dyntab with Not_found -> let () = Printf.eprintf "Unknown dynamic tag %i\n%!" s in assert false let name s = let hash = Hashtbl.hash s in if Int.Map.mem hash !dyntab then Some (Any hash) else None let dump () = Int.Map.bindings !dyntab module Map(Value: ValueS) = struct type t = Obj.t Value.t Int.Map.t type 'a key = 'a tag type 'a value = 'a Value.t let cast : 'a value -> 'b value = Obj.magic let empty = Int.Map.empty let add tag v m = Int.Map.add tag (cast v) m let remove tag m = Int.Map.remove tag m let find tag m = cast (Int.Map.find tag m) let mem = Int.Map.mem type map = { map : 'a. 'a tag -> 'a value -> 'a value } let map f m = Int.Map.mapi f.map m type any = Any : 'a tag * 'a value -> any let iter f m = Int.Map.iter (fun k v -> f (Any (k, v))) m let fold f m accu = Int.Map.fold (fun k v accu -> f (Any (k, v)) accu) m accu end module HMap (V1 : ValueS) (V2 : ValueS) = struct type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t } let map (f : map) (m : Map(V1).t) : Map(V2).t = Int.Map.mapi f.map m end end include Self module Easy = struct (* now tags are opaque, we can do the trick *) let make_dyn_tag (s : string) = (fun (type a) (tag : a tag) -> let infun : (a -> t) = fun x -> Dyn (tag, x) in let outfun : (t -> a) = fun (Dyn (t, x)) -> match eq tag t with | None -> assert false | Some CSig.Refl -> x in infun, outfun, tag) (create s) let make_dyn (s : string) = let inf, outf, _ = make_dyn_tag s in inf, outf let inj x tag = Dyn(tag,x) let prj : type a. t -> a tag -> a option = fun (Dyn(tag',x)) tag -> match eq tag tag' with | None -> None | Some CSig.Refl -> Some x end end coq-8.15.0/clib/dyn.mli000066400000000000000000000062361417001151100145730ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a value -> t -> t val remove : 'a key -> t -> t val find : 'a key -> t -> 'a value val mem : 'a key -> t -> bool type map = { map : 'a. 'a key -> 'a value -> 'a value } val map : map -> t -> t type any = Any : 'a key * 'a value -> any val iter : (any -> unit) -> t -> unit val fold : (any -> 'r -> 'r) -> t -> 'r -> 'r end module type S = sig type 'a tag (** Type of dynamic tags *) type t = Dyn : 'a tag * 'a -> t (** Type of dynamic values *) val create : string -> 'a tag (** [create n] returns a tag describing a type called [n]. [create] raises an exception if [n] is already registered. Type names are hashed, so [create] may raise even if no type with the exact same name was registered due to a collision. *) val anonymous : int -> 'a tag (** [anonymous i] returns a tag describing an [i]-th anonymous type. If [anonymous] is not used together with [create], [max_int] anonymous types are available. [anonymous] raises an exception if [i] is already registered. *) val eq : 'a tag -> 'b tag -> ('a, 'b) CSig.eq option (** [eq t1 t2] returns [Some witness] if [t1] is the same as [t2], [None] otherwise. *) val repr : 'a tag -> string (** [repr tag] returns the name of the type represented by [tag]. *) val dump : unit -> (int * string) list (** [dump ()] returns a list of (tag, name) pairs for every type tag registered in this [Dyn.Make] instance. *) type any = Any : 'a tag -> any (** Type of boxed dynamic tags *) val name : string -> any option (** [name n] returns [Some t] where t is a boxed tag previously registered with [create n], or [None] if there is no such tag. *) module Map(Value : ValueS) : MapS with type 'a key = 'a tag and type 'a value = 'a Value.t (** Map from type tags to values parameterized by the tag type *) module HMap (V1 : ValueS)(V2 : ValueS) : sig type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t } val map : map -> Map(V1).t -> Map(V2).t end module Easy : sig (* To create a dynamic type on the fly *) val make_dyn_tag : string -> ('a -> t) * (t -> 'a) * 'a tag val make_dyn : string -> ('a -> t) * (t -> 'a) (* For types declared with the [create] function above *) val inj : 'a -> 'a tag -> t val prj : t -> 'a tag -> 'a option end end module Make () : S coq-8.15.0/clib/exninfo.ml000066400000000000000000000074031417001151100152730ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t (** Create a new piece of information. *) val null : info (** No information *) val add : info -> 'a t -> 'a -> info (** Add information to an exception. *) val get : info -> 'a t -> 'a option (** Get information worn by an exception. Returns [None] if undefined. *) val info : exn -> info (** Retrieve the information of the last exception raised. *) type backtrace val get_backtrace : info -> backtrace option (** [get_backtrace info] does get the backtrace associated to info *) val backtrace_to_string : backtrace -> string (** [backtrace_to_string info] does get the backtrace associated to info *) val record_backtrace : bool -> unit val capture : exn -> iexn (** Add the current backtrace information to the given exception. The intended use case is of the form: {[ try foo with | Bar -> bar | exn -> let exn = Exninfo.capture err in baz ]} where [baz] should re-raise using [iraise] below. WARNING: any intermediate code between the [with] and the handler may modify the backtrace. Yes, that includes [when] clauses. Ideally, what you should do is something like: {[ try foo with exn -> let exn = Exninfo.capture exn in match err with | Bar -> bar | err -> baz ]} I admit that's a bit heavy, but there is not much to do... *) val iraise : iexn -> 'a (** Raise the given enriched exception. *) val reify : unit -> info coq-8.15.0/clib/hMap.ml000066400000000000000000000263551417001151100145210ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int val hash : t -> int end module SetMake(M : HashedType) = struct (** Hash Sets use hashes to prevent doing too many comparison tests. They associate to each hash the set of keys having that hash. Invariants: 1. There is no empty set in the intmap. 2. All values in the same set have the same hash, which is the int to which it is associated in the intmap. *) module Set = Set.Make(M) type elt = M.t type t = Set.t Int.Map.t let empty = Int.Map.empty let is_empty = Int.Map.is_empty let mem x s = let h = M.hash x in try let m = Int.Map.find h s in Set.mem x m with Not_found -> false let add x s = let h = M.hash x in try let m = Int.Map.find h s in let m = Set.add x m in Int.Map.set h m s with Not_found -> let m = Set.singleton x in Int.Map.add h m s let singleton x = let h = M.hash x in let m = Set.singleton x in Int.Map.singleton h m let remove x s = let h = M.hash x in try let m = Int.Map.find h s in let m = Set.remove x m in if Set.is_empty m then Int.Map.remove h s else Int.Map.set h m s with Not_found -> s let height s = Int.Map.height s let is_smaller s1 s2 = height s1 <= height s2 + 3 (** Assumes s1 << s2 *) let fast_union s1 s2 = let fold h s accu = try Int.Map.modify h (fun _ s' -> Set.fold Set.add s s') accu with Not_found -> Int.Map.add h s accu in Int.Map.fold fold s1 s2 let union s1 s2 = if is_smaller s1 s2 then fast_union s1 s2 else if is_smaller s2 s1 then fast_union s2 s1 else let fu _ m1 m2 = match m1, m2 with | None, None -> None | (Some _ as m), None | None, (Some _ as m) -> m | Some m1, Some m2 -> Some (Set.union m1 m2) in Int.Map.merge fu s1 s2 (** Assumes s1 << s2 *) let fast_inter s1 s2 = let fold h s accu = try let s' = Int.Map.find h s2 in let si = Set.filter (fun e -> Set.mem e s') s in if Set.is_empty si then accu else Int.Map.add h si accu with Not_found -> accu in Int.Map.fold fold s1 Int.Map.empty let inter s1 s2 = if is_smaller s1 s2 then fast_inter s1 s2 else if is_smaller s2 s1 then fast_inter s2 s1 else let fu _ m1 m2 = match m1, m2 with | None, None -> None | Some _, None | None, Some _ -> None | Some m1, Some m2 -> let m = Set.inter m1 m2 in if Set.is_empty m then None else Some m in Int.Map.merge fu s1 s2 (** Assumes s1 << s2 *) let fast_diff_l s1 s2 = let fold h s accu = try let s' = Int.Map.find h s2 in let si = Set.filter (fun e -> not (Set.mem e s')) s in if Set.is_empty si then accu else Int.Map.add h si accu with Not_found -> Int.Map.add h s accu in Int.Map.fold fold s1 Int.Map.empty (** Assumes s2 << s1 *) let fast_diff_r s1 s2 = let fold h s accu = try let s' = Int.Map.find h accu in let si = Set.filter (fun e -> not (Set.mem e s)) s' in if Set.is_empty si then Int.Map.remove h accu else Int.Map.set h si accu with Not_found -> accu in Int.Map.fold fold s2 s1 let diff s1 s2 = if is_smaller s1 s2 then fast_diff_l s1 s2 else if is_smaller s2 s2 then fast_diff_r s1 s2 else let fu _ m1 m2 = match m1, m2 with | None, None -> None | (Some _ as m), None -> m | None, Some _ -> None | Some m1, Some m2 -> let m = Set.diff m1 m2 in if Set.is_empty m then None else Some m in Int.Map.merge fu s1 s2 let compare s1 s2 = Int.Map.compare Set.compare s1 s2 let equal s1 s2 = Int.Map.equal Set.equal s1 s2 let subset s1 s2 = let check h m1 = let m2 = try Int.Map.find h s2 with Not_found -> Set.empty in Set.subset m1 m2 in Int.Map.for_all check s1 let iter f s = let fi _ m = Set.iter f m in Int.Map.iter fi s let fold f s accu = let ff _ m accu = Set.fold f m accu in Int.Map.fold ff s accu let for_all f s = let ff _ m = Set.for_all f m in Int.Map.for_all ff s let exists f s = let fe _ m = Set.exists f m in Int.Map.exists fe s let filter f s = let ff m = Set.filter f m in let s = Int.Map.map ff s in Int.Map.filter (fun _ m -> not (Set.is_empty m)) s let partition f s = let fold h m (sl, sr) = let (ml, mr) = Set.partition f m in let sl = if Set.is_empty ml then sl else Int.Map.add h ml sl in let sr = if Set.is_empty mr then sr else Int.Map.add h mr sr in (sl, sr) in Int.Map.fold fold s (Int.Map.empty, Int.Map.empty) let cardinal s = let fold _ m accu = accu + Set.cardinal m in Int.Map.fold fold s 0 let elements s = let fold _ m accu = Set.fold (fun x accu -> x :: accu) m accu in Int.Map.fold fold s [] let min_elt _ = assert false (** Cannot be implemented efficiently *) let max_elt _ = assert false (** Cannot be implemented efficiently *) let choose s = let (_, m) = Int.Map.choose s in Set.choose m let split s x = assert false (** Cannot be implemented efficiently *) end module Make(M : HashedType) = struct (** This module is essentially the same as SetMake, except that we have maps instead of sets in the intmap. Invariants are the same. *) module Set = SetMake(M) module Map = CMap.Make(M) type key = M.t type 'a t = 'a Map.t Int.Map.t let empty = Int.Map.empty let is_empty = Int.Map.is_empty let mem k s = let h = M.hash k in try let m = Int.Map.find h s in Map.mem k m with Not_found -> false let add k x s = let h = M.hash k in try let m = Int.Map.find h s in let m = Map.add k x m in Int.Map.set h m s with Not_found -> let m = Map.singleton k x in Int.Map.add h m s (* when Coq requires OCaml 4.06 or later, the module type CSig.MapS may include the signature of OCaml's "update", requiring an implementation here, which could be just: let update k f s = assert false (* not implemented *) *) let singleton k x = let h = M.hash k in Int.Map.singleton h (Map.singleton k x) let remove k s = let h = M.hash k in try let m = Int.Map.find h s in let m = Map.remove k m in if Map.is_empty m then Int.Map.remove h s else Int.Map.set h m s with Not_found -> s let merge f s1 s2 = let fm h m1 m2 = match m1, m2 with | None, None -> None | Some m, None -> let m = Map.merge f m Map.empty in if Map.is_empty m then None else Some m | None, Some m -> let m = Map.merge f Map.empty m in if Map.is_empty m then None else Some m | Some m1, Some m2 -> let m = Map.merge f m1 m2 in if Map.is_empty m then None else Some m in Int.Map.merge fm s1 s2 let union f s1 s2 = let fm h m1 m2 = let m = Map.union f m1 m2 in if Map.is_empty m then None else Some m in Int.Map.union fm s1 s2 let compare f s1 s2 = let fc m1 m2 = Map.compare f m1 m2 in Int.Map.compare fc s1 s2 let equal f s1 s2 = let fe m1 m2 = Map.equal f m1 m2 in Int.Map.equal fe s1 s2 let iter f s = let fi _ m = Map.iter f m in Int.Map.iter fi s let fold f s accu = let ff _ m accu = Map.fold f m accu in Int.Map.fold ff s accu let for_all f s = let ff _ m = Map.for_all f m in Int.Map.for_all ff s let exists f s = let fe _ m = Map.exists f m in Int.Map.exists fe s let filter f s = let ff m = Map.filter f m in let s = Int.Map.map ff s in Int.Map.filter (fun _ m -> not (Map.is_empty m)) s let partition f s = let fold h m (sl, sr) = let (ml, mr) = Map.partition f m in let sl = if Map.is_empty ml then sl else Int.Map.add h ml sl in let sr = if Map.is_empty mr then sr else Int.Map.add h mr sr in (sl, sr) in Int.Map.fold fold s (Int.Map.empty, Int.Map.empty) let cardinal s = let fold _ m accu = accu + Map.cardinal m in Int.Map.fold fold s 0 let bindings s = let fold _ m accu = Map.fold (fun k x accu -> (k, x) :: accu) m accu in Int.Map.fold fold s [] let min_binding _ = assert false (** Cannot be implemented efficiently *) let max_binding _ = assert false (** Cannot be implemented efficiently *) let fold_left _ _ _ = assert false (** Cannot be implemented efficiently *) let fold_right _ _ _ = assert false (** Cannot be implemented efficiently *) let choose s = let (_, m) = Int.Map.choose s in Map.choose m let choose_opt s = try Some (choose s) with Not_found -> None let find k s = let h = M.hash k in let m = Int.Map.find h s in Map.find k m let find_opt k s = let h = M.hash k in match Int.Map.find_opt h s with | None -> None | Some m -> Map.find_opt k m let get k s = let h = M.hash k in let m = Int.Map.get h s in Map.get k m let split k s = assert false (** Cannot be implemented efficiently *) let map f s = let fs m = Map.map f m in Int.Map.map fs s let mapi f s = let fs m = Map.mapi f m in Int.Map.map fs s let modify k f s = let h = M.hash k in let m = Int.Map.find h s in let m = Map.modify k f m in Int.Map.set h m s let bind f s = let fb m = Map.bind f m in Int.Map.map fb s let domain s = Int.Map.map Map.domain s let set k x s = let h = M.hash k in let m = Int.Map.find h s in let m = Map.set k x m in Int.Map.set h m s module Smart = struct let map f s = let fs m = Map.Smart.map f m in Int.Map.Smart.map fs s let mapi f s = let fs m = Map.Smart.mapi f m in Int.Map.Smart.map fs s end let height s = Int.Map.height s (* Not as efficient as the original version *) let filter_range f s = filter (fun x _ -> f x = 0) s let update k f m = let aux = function | None -> (match f None with | None -> None | Some v -> Some (Map.singleton k v)) | Some m -> let m = Map.update k f m in if Map.is_empty m then None else Some m in Int.Map.update (M.hash k) aux m module Unsafe = struct let map f s = let fs m = Map.Unsafe.map f m in Int.Map.map fs s end module Monad(M : CMap.MonadS) = struct module IntM = Int.Map.Monad(M) module ExtM = Map.Monad(M) let fold f s accu = let ff _ m accu = ExtM.fold f m accu in IntM.fold ff s accu let fold_left _ _ _ = assert false let fold_right _ _ _ = assert false end end coq-8.15.0/clib/hMap.mli000066400000000000000000000025701417001151100146630ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int (** Total ordering *) val hash : t -> int (** Hashing function compatible with [compare], i.e. [compare x y = 0] implies [hash x = hash y]. *) end (** Hash maps are maps that take advantage of having a hash on keys. This is essentially a hash table, except that it uses purely functional maps instead of arrays. CAVEAT: order-related functions like [fold] or [iter] do not respect the provided order anymore! It's your duty to do something sensible to prevent this if you need it. In particular, [min_binding] and [max_binding] are now made meaningless. *) module Make(M : HashedType) : CMap.ExtS with type key = M.t coq-8.15.0/clib/hashcons.ml000066400000000000000000000106561417001151100154370ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t1)*(t2->t2)*...). * [hashcons u x] is a function that hash-cons the sub-structures of x using * the hash-consing functions u provides. * [eq] is a comparison function. It is allowed to use physical equality * on the sub-terms hash-consed by the hashcons function. * [hash] is the hash function given to the Hashtbl.Make function * * Note that this module type coerces to the argument of Hashtbl.Make. *) module type HashconsedType = sig type t type u val hashcons : u -> t -> t val eq : t -> t -> bool val hash : t -> int end (** The output is a function [generate] such that [generate args] creates a hash-table of the hash-consed objects, together with [hcons], a function taking a table and an object, and hashcons it. For simplicity of use, we use the wrapper functions defined below. *) module type S = sig type t type u type table val generate : u -> table val hcons : table -> t -> t val stats : table -> Hashset.statistics end module Make (X : HashconsedType) : (S with type t = X.t and type u = X.u) = struct type t = X.t type u = X.u (* We create the type of hashtables for t, with our comparison fun. * An invariant is that the table never contains two entries equals * w.r.t (=), although the equality on keys is X.eq. This is * granted since we hcons the subterms before looking up in the table. *) module Htbl = Hashset.Make(X) type table = (Htbl.t * u) let generate u = let tab = Htbl.create 97 in (tab, u) let hcons (tab, u) x = let y = X.hashcons u x in Htbl.repr (X.hash y) y tab let stats (tab, _) = Htbl.stats tab end (* A few useful wrappers: * takes as argument the function [generate] above and build a function of type * u -> t -> t that creates a fresh table each time it is applied to the * sub-hcons functions. *) (* For non-recursive types it is quite easy. *) let simple_hcons h f u = let table = h u in fun x -> f table x (* For a recursive type T, we write the module of sig Comp with u equals * to (T -> T) * u0 * The first component will be used to hash-cons the recursive subterms * The second one to hashcons the other sub-structures. * We just have to take the fixpoint of h *) let recursive_hcons h f u = let loop = ref (fun _ -> assert false) in let self x = !loop x in let table = h (self, u) in let hrec x = f table x in let () = loop := hrec in hrec (* Basic hashcons modules for string and obj. Integers do not need be hashconsed. *) module type HashedType = sig type t val hash : t -> int end (* list *) module Hlist (D:HashedType) = Make( struct type t = D.t list type u = (t -> t) * (D.t -> D.t) let hashcons (hrec,hdata) = function | x :: l -> hdata x :: hrec l | l -> l let eq l1 l2 = l1 == l2 || match l1, l2 with | [], [] -> true | x1::l1, x2::l2 -> x1==x2 && l1==l2 | _ -> false let rec hash accu = function | [] -> accu | x :: l -> let accu = Hashset.Combine.combine (D.hash x) accu in hash accu l let hash l = hash 0 l end) (* string *) module Hstring = Make( struct type t = string type u = unit let hashcons () s =(* incr accesstr;*) s let eq = String.equal (** Copy from CString *) let rec hash len s i accu = if i = len then accu else let c = Char.code (String.unsafe_get s i) in hash len s (succ i) (accu * 19 + c) let hash s = let len = String.length s in hash len s 0 0 end) coq-8.15.0/clib/hashcons.mli000066400000000000000000000071741417001151100156110ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> t (** The actual hashconsing function, using its fist argument to recursively hashcons substructures. It should be compatible with [eq], that is [eq x (hashcons f x) = true]. *) val eq : t -> t -> bool (** A comparison function. It is allowed to use physical equality on the sub-terms hashconsed by the [hashcons] function, but it should be insensible to shallow copy of the compared object. *) val hash : t -> int (** A hash function passed to the underlying hashtable structure. [hash] should be compatible with [eq], i.e. if [eq x y = true] then [hash x = hash y]. *) end module type S = sig type t (** Type of objects to hashcons. *) type u (** Type of hashcons functions for the sub-structures contained in [t]. *) type table (** Type of hashconsing tables *) val generate : u -> table (** This create a hashtable of the hashconsed objects. *) val hcons : table -> t -> t (** Perform the hashconsing of the given object within the table. *) val stats : table -> Hashset.statistics (** Recover statistics of the hashconsing table. *) end module Make (X : HashconsedType) : (S with type t = X.t and type u = X.u) (** Create a new hashconsing, given canonicalization functions. *) (** {6 Wrappers} *) (** These are intended to be used together with instances of the [Make] functor. *) val simple_hcons : ('u -> 'tab) -> ('tab -> 't -> 't) -> 'u -> 't -> 't (** [simple_hcons f sub obj] creates a new table each time it is applied to any sub-hash function [sub]. *) val recursive_hcons : (('t -> 't) * 'u -> 'tab) -> ('tab -> 't -> 't) -> ('u -> 't -> 't) (** As [simple_hcons] but intended to be used with well-founded data structures. *) (** {6 Hashconsing of usual structures} *) module type HashedType = sig type t val hash : t -> int end module Hstring : (S with type t = string and type u = unit) (** Hashconsing of strings. *) module Hlist (D:HashedType) : (S with type t = D.t list and type u = (D.t list -> D.t list)*(D.t->D.t)) (** Hashconsing of lists. *) coq-8.15.0/clib/hashset.ml000066400000000000000000000171261417001151100152670ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool end type statistics = { num_bindings: int; num_buckets: int; max_bucket_length: int; bucket_histogram: int array } module type S = sig type elt type t val create : int -> t val clear : t -> unit val repr : int -> elt -> t -> elt val stats : t -> statistics end module Make (E : EqType) = struct type elt = E.t let emptybucket = Weak.create 0 type t = { mutable table : elt Weak.t array; mutable hashes : int array array; mutable limit : int; (* bucket size limit *) mutable oversize : int; (* number of oversize buckets *) mutable rover : int; (* for internal bookkeeping *) } let get_index t h = (h land max_int) mod (Array.length t) let limit = 7 let over_limit = 2 let create sz = let sz = if sz < 7 then 7 else sz in let sz = if sz > Sys.max_array_length then Sys.max_array_length else sz in { table = Array.make sz emptybucket; hashes = Array.make sz [| |]; limit = limit; oversize = 0; rover = 0; } let clear t = for i = 0 to Array.length t.table - 1 do t.table.(i) <- emptybucket; t.hashes.(i) <- [| |]; done; t.limit <- limit; t.oversize <- 0 let iter_weak f t = let rec iter_bucket i j b = if i >= Weak.length b then () else match Weak.check b i with | true -> f b t.hashes.(j) i; iter_bucket (i+1) j b | false -> iter_bucket (i+1) j b in for i = 0 to pred (Array.length t.table) do iter_bucket 0 i (Array.unsafe_get t.table i) done let rec count_bucket i b accu = if i >= Weak.length b then accu else count_bucket (i+1) b (accu + (if Weak.check b i then 1 else 0)) let min x y = if x - y < 0 then x else y let next_sz n = min (3 * n / 2 + 3) Sys.max_array_length let prev_sz n = ((n - 3) * 2 + 2) / 3 let test_shrink_bucket t = let bucket = t.table.(t.rover) in let hbucket = t.hashes.(t.rover) in let len = Weak.length bucket in let prev_len = prev_sz len in let live = count_bucket 0 bucket 0 in if live <= prev_len then begin let rec loop i j = if j >= prev_len then begin if Weak.check bucket i then loop (i + 1) j else if Weak.check bucket j then begin Weak.blit bucket j bucket i 1; hbucket.(i) <- hbucket.(j); loop (i + 1) (j - 1); end else loop i (j - 1); end; in loop 0 (Weak.length bucket - 1); if prev_len = 0 then begin t.table.(t.rover) <- emptybucket; t.hashes.(t.rover) <- [| |]; end else begin let newbucket = Weak.create prev_len in Weak.blit bucket 0 newbucket 0 prev_len; t.table.(t.rover) <- newbucket; t.hashes.(t.rover) <- Array.sub hbucket 0 prev_len end; if len > t.limit && prev_len <= t.limit then t.oversize <- t.oversize - 1; end; t.rover <- (t.rover + 1) mod (Array.length t.table) let rec resize t = let oldlen = Array.length t.table in let newlen = next_sz oldlen in if newlen > oldlen then begin let newt = create newlen in let add_weak ob oh oi = let setter nb ni _ = Weak.blit ob oi nb ni 1 in let h = oh.(oi) in add_aux newt setter None h (get_index newt.table h); in iter_weak add_weak t; t.table <- newt.table; t.hashes <- newt.hashes; t.limit <- newt.limit; t.oversize <- newt.oversize; t.rover <- t.rover mod Array.length newt.table; end else begin t.limit <- max_int; (* maximum size already reached *) t.oversize <- 0; end and add_aux t setter d h index = let bucket = t.table.(index) in let hashes = t.hashes.(index) in let sz = Weak.length bucket in let rec loop i = if i >= sz then begin let newsz = min (3 * sz / 2 + 3) (Sys.max_array_length - 1) in if newsz <= sz then failwith "Weak.Make: hash bucket cannot grow more"; let newbucket = Weak.create newsz in let newhashes = Array.make newsz 0 in Weak.blit bucket 0 newbucket 0 sz; Array.blit hashes 0 newhashes 0 sz; setter newbucket sz d; newhashes.(sz) <- h; t.table.(index) <- newbucket; t.hashes.(index) <- newhashes; if sz <= t.limit && newsz > t.limit then begin t.oversize <- t.oversize + 1; for _i = 0 to over_limit do test_shrink_bucket t done; end; if t.oversize > Array.length t.table / over_limit then resize t end else if Weak.check bucket i then begin loop (i + 1) end else begin setter bucket i d; hashes.(i) <- h end in loop 0 let repr h d t = let table = t.table in let index = get_index table h in let bucket = table.(index) in let hashes = t.hashes.(index) in let sz = Weak.length bucket in let pos = ref 0 in let ans = ref None in while !pos < sz && !ans == None do let i = !pos in if Int.equal h hashes.(i) then begin match Weak.get bucket i with | Some v as res when E.eq v d -> ans := res | _ -> incr pos end else incr pos done; if !pos >= sz then let () = add_aux t Weak.set (Some d) h index in d else match !ans with | None -> assert false | Some v -> v let stats t = let fold accu bucket = max (count_bucket 0 bucket 0) accu in let max_length = Array.fold_left fold 0 t.table in let histogram = Array.make (max_length + 1) 0 in let iter bucket = let len = count_bucket 0 bucket 0 in histogram.(len) <- succ histogram.(len) in let () = Array.iter iter t.table in let fold (num, len, i) k = (num + k * i, len + k, succ i) in let (num, len, _) = Array.fold_left fold (0, 0, 0) histogram in { num_bindings = num; num_buckets = len; max_bucket_length = Array.length histogram; bucket_histogram = histogram; } end module Combine = struct (* These are helper functions to combine the hash keys in a similar way as [Hashtbl.hash] does. The constants [alpha] and [beta] must be prime numbers. There were chosen empirically. Notice that the problem of hashing trees is hard and there are plenty of study on this topic. Therefore, there must be room for improvement here. *) let alpha = 65599 let beta = 7 let combine x y = x * alpha + y let combine3 x y z = combine x (combine y z) let combine4 x y z t = combine x (combine3 y z t) let combine5 x y z t u = combine x (combine4 y z t u) let combinesmall x y = beta * x + y end coq-8.15.0/clib/hashset.mli000066400000000000000000000042141417001151100154320ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool end type statistics = { num_bindings: int; num_buckets: int; max_bucket_length: int; bucket_histogram: int array } module type S = sig type elt (** Type of hashsets elements. *) type t (** Type of hashsets. *) val create : int -> t (** [create n] creates a fresh hashset with initial size [n]. *) val clear : t -> unit (** Clear the contents of a hashset. *) val repr : int -> elt -> t -> elt (** [repr key constr set] uses [key] to look for [constr] in the hashet [set]. If [constr] is in [set], returns the specific representation that is stored in [set]. Otherwise, [constr] is stored in [set] and will be used as the canonical representation of this value in the future. *) val stats : t -> statistics (** Recover statistics on the table. *) end module Make (E : EqType) : S with type elt = E.t module Combine : sig val combine : int -> int -> int val combinesmall : int -> int -> int val combine3 : int -> int -> int -> int val combine4 : int -> int -> int -> int -> int val combine5 : int -> int -> int -> int -> int -> int end coq-8.15.0/clib/heap.ml000066400000000000000000000071551417001151100145460ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type S =sig (* Type of functional heaps *) type t (* Type of elements *) type elt (* The empty heap *) val empty : t (* [add x h] returns a new heap containing the elements of [h], plus [x]; complexity $O(log(n))$ *) val add : elt -> t -> t (* [maximum h] returns the maximum element of [h]; raises [EmptyHeap] when [h] is empty; complexity $O(1)$ *) val maximum : t -> elt (* [remove h] returns a new heap containing the elements of [h], except the maximum of [h]; raises [EmptyHeap] when [h] is empty; complexity $O(log(n))$ *) val remove : t -> t (* usual iterators and combinators; elements are presented in arbitrary order *) val iter : (elt -> unit) -> t -> unit val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a end exception EmptyHeap (*s Functional implementation *) module Functional(X : Ordered) = struct (* Heaps are encoded as Braun trees, that are binary trees where size r <= size l <= size r + 1 for each node Node (l, x, r) *) type t = | Leaf | Node of t * X.t * t type elt = X.t let empty = Leaf let rec add x = function | Leaf -> Node (Leaf, x, Leaf) | Node (l, y, r) -> if X.compare x y >= 0 then Node (add y r, x, l) else Node (add x r, y, l) let rec extract = function | Leaf -> assert false | Node (Leaf, y, r) -> assert (r = Leaf); y, Leaf | Node (l, y, r) -> let x, l = extract l in x, Node (r, y, l) let is_above x = function | Leaf -> true | Node (_, y, _) -> X.compare x y >= 0 let rec replace_min x = function | Node (l, _, r) when is_above x l && is_above x r -> Node (l, x, r) | Node ((Node (_, lx, _) as l), _, r) when is_above lx r -> (* lx <= x, rx necessarily *) Node (replace_min x l, lx, r) | Node (l, _, (Node (_, rx, _) as r)) -> (* rx <= x, lx necessarily *) Node (l, rx, replace_min x r) | Leaf | Node (Leaf, _, _) | Node (_, _, Leaf) -> assert false (* merges two Braun trees [l] and [r], with the assumption that [size r <= size l <= size r + 1] *) let rec merge l r = match l, r with | _, Leaf -> l | Node (ll, lx, lr), Node (_, ly, _) -> if X.compare lx ly >= 0 then Node (r, lx, merge ll lr) else let x, l = extract l in Node (replace_min x r, ly, l) | Leaf, _ -> assert false (* contradicts the assumption *) let maximum = function | Leaf -> raise EmptyHeap | Node (_, x, _) -> x let remove = function | Leaf -> raise EmptyHeap | Node (l, _, r) -> merge l r let rec iter f = function | Leaf -> () | Node (l, x, r) -> iter f l; f x; iter f r let rec fold f h x0 = match h with | Leaf -> x0 | Node (l, x, r) -> fold f l (fold f r (f x x0)) end coq-8.15.0/clib/heap.mli000066400000000000000000000032531417001151100147120ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type S =sig (** Type of functional heaps *) type t (** Type of elements *) type elt (** The empty heap *) val empty : t (** [add x h] returns a new heap containing the elements of [h], plus [x]; complexity {% $ %}O(log(n)){% $ %} *) val add : elt -> t -> t (** [maximum h] returns the maximum element of [h]; raises [EmptyHeap] when [h] is empty; complexity {% $ %}O(1){% $ %} *) val maximum : t -> elt (** [remove h] returns a new heap containing the elements of [h], except the maximum of [h]; raises [EmptyHeap] when [h] is empty; complexity {% $ %}O(log(n)){% $ %} *) val remove : t -> t (** usual iterators and combinators; elements are presented in arbitrary order *) val iter : (elt -> unit) -> t -> unit val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a end exception EmptyHeap (** {6 Functional implementation. } *) module Functional(X: Ordered) : S with type elt=X.t coq-8.15.0/clib/iStream.ml000066400000000000000000000042411417001151100152260ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Nil | Cons (x,s) -> app_node (peek (f x)) (concat_map f s) and concat_map f l = lazy (concat_map_node f (peek l)) coq-8.15.0/clib/iStream.mli000066400000000000000000000044371417001151100154060ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t -> 'a t (** Append an element in front of a stream. *) val thunk : (unit -> 'a node) -> 'a t (** Internalize the laziness of a stream. *) (** {6 Destructors} *) val is_empty : 'a t -> bool (** Whethere a stream is empty. *) val peek : 'a t -> 'a node (** Return the head and the tail of a stream, if any. *) (** {6 Standard operations} All stream-returning functions are lazy. The other ones are eager. *) val app : 'a t -> 'a t -> 'a t (** Append two streams. Not tail-rec. *) val map : ('a -> 'b) -> 'a t -> 'b t (** Mapping of streams. Not tail-rec. *) val iter : ('a -> unit) -> 'a t -> unit (** Iteration over streams. *) val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a (** Fold over streams. *) val concat : 'a t t -> 'a t (** Appends recursively a stream of streams. *) val map_filter : ('a -> 'b option) -> 'a t -> 'b t (** Mixing [map] and [filter]. Not tail-rec. *) val concat_map : ('a -> 'b t) -> 'a t -> 'b t (** [concat_map f l] is the same as [concat (map f l)]. *) (** {6 Conversions} *) val of_list : 'a list -> 'a t (** Convert a list into a stream. *) val to_list : 'a t -> 'a list (** Convert a stream into a list. *) (** {6 Other}*) val force : 'a t -> 'a t (** Forces the whole stream. *) coq-8.15.0/clib/int.ml000066400000000000000000000134251417001151100144200ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int -> bool = "%eq" external compare : int -> int -> int = "caml_int_compare" let hash i = i land 0x3FFFFFFF module Self = struct type t = int let compare = compare end module Set = Set.Make(Self) module Map = struct include CMap.Make(Self) type 'a map = 'a CMap.Make(Self).t type 'a _map = | MEmpty | MNode of 'a map * int * 'a * 'a map * int let map_prj : 'a map -> 'a _map = Obj.magic let rec find i s = match map_prj s with | MEmpty -> raise Not_found | MNode (l, k, v, r, h) -> if i < k then find i l else if i = k then v else find i r let rec get i s = match map_prj s with | MEmpty -> assert false | MNode (l, k, v, r, h) -> if i < k then get i l else if i = k then v else get i r let rec find_opt i s = match map_prj s with | MEmpty -> None | MNode (l, k, v, r, h) -> if i < k then find_opt i l else if i = k then Some v else find_opt i r end module List = struct let mem = List.memq let assoc = List.assq let mem_assoc = List.mem_assq let remove_assoc = List.remove_assq end let min (i : int) j = if i < j then i else j (** Utility function *) let rec next from upto = if from < upto then next (2 * from + 1) upto else from module PArray = struct type 'a t = 'a data ref and 'a data = | Root of 'a option array | DSet of int * 'a option * 'a t let empty n = ref (Root (Array.make n None)) let rec rerootk t k = match !t with | Root _ -> k () | DSet (i, v, t') -> let next () = match !t' with | Root a as n -> let v' = Array.unsafe_get a i in let () = Array.unsafe_set a i v in let () = t := n in let () = t' := DSet (i, v', t) in k () | DSet _ -> assert false in rerootk t' next let reroot t = rerootk t (fun () -> ()) let get t i = let () = assert (0 <= i) in match !t with | Root a -> if Array.length a <= i then None else Array.unsafe_get a i | DSet _ -> let () = reroot t in match !t with | Root a -> if Array.length a <= i then None else Array.unsafe_get a i | DSet _ -> assert false let set t i v = let () = assert (0 <= i) in let () = reroot t in match !t with | DSet _ -> assert false | Root a as n -> let len = Array.length a in if i < len then let old = Array.unsafe_get a i in if old == v then t else let () = Array.unsafe_set a i v in let res = ref n in let () = t := DSet (i, old, res) in res else match v with | None -> t (* Nothing to do! *) | Some _ -> (* we must resize *) let nlen = next len (succ i) in let nlen = min nlen Sys.max_array_length in let () = assert (i < nlen) in let a' = Array.make nlen None in let () = Array.blit a 0 a' 0 len in let () = Array.unsafe_set a' i v in let res = ref (Root a') in let () = t := DSet (i, None, res) in res end module PMap = struct type key = int (** Invariants: 1. an empty map is always [Empty]. 2. the set of the [Map] constructor remembers the present keys. *) type 'a t = Empty | Map of Set.t * 'a PArray.t let empty = Empty let is_empty = function | Empty -> true | Map _ -> false let singleton k x = let len = next 19 (k + 1) in let len = min Sys.max_array_length len in let v = PArray.empty len in let v = PArray.set v k (Some x) in let s = Set.singleton k in Map (s, v) let add k x = function | Empty -> singleton k x | Map (s, v) -> let s = match PArray.get v k with | None -> Set.add k s | Some _ -> s in let v = PArray.set v k (Some x) in Map (s, v) let remove k = function | Empty -> Empty | Map (s, v) -> let s = Set.remove k s in if Set.is_empty s then Empty else let v = PArray.set v k None in Map (s, v) let mem k = function | Empty -> false | Map (_, v) -> match PArray.get v k with | None -> false | Some _ -> true let find k = function | Empty -> raise Not_found | Map (_, v) -> match PArray.get v k with | None -> raise Not_found | Some x -> x let iter f = function | Empty -> () | Map (s, v) -> let iter k = match PArray.get v k with | None -> () | Some x -> f k x in Set.iter iter s let fold f m accu = match m with | Empty -> accu | Map (s, v) -> let fold k accu = match PArray.get v k with | None -> accu | Some x -> f k x accu in Set.fold fold s accu let exists f m = match m with | Empty -> false | Map (s, v) -> let exists k = match PArray.get v k with | None -> false | Some x -> f k x in Set.exists exists s let for_all f m = match m with | Empty -> true | Map (s, v) -> let for_all k = match PArray.get v k with | None -> true | Some x -> f k x in Set.for_all for_all s let cast = function | Empty -> Map.empty | Map (s, v) -> let bind k = match PArray.get v k with | None -> assert false | Some x -> x in Map.bind bind s let domain = function | Empty -> Set.empty | Map (s, _) -> s end coq-8.15.0/clib/int.mli000066400000000000000000000065141417001151100145720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool = "%eq" external compare : t -> t -> int = "caml_int_compare" val hash : t -> int module Set : Set.S with type elt = t module Map : CMap.ExtS with type key = t and module Set := Set module List : sig val mem : int -> int list -> bool val assoc : int -> (int * 'a) list -> 'a val mem_assoc : int -> (int * 'a) list -> bool val remove_assoc : int -> (int * 'a) list -> (int * 'a) list end module PArray : sig type 'a t (** Persistent, auto-resizable arrays. The [get] and [set] functions never fail whenever the index is between [0] and [Sys.max_array_length - 1]. *) val empty : int -> 'a t (** The empty array, with a given starting size. *) val get : 'a t -> int -> 'a option (** Get a value at the given index. Returns [None] if undefined. *) val set : 'a t -> int -> 'a option -> 'a t (** Set/unset a value at the given index. *) end module PMap : sig type key = int type 'a t val empty : 'a t val is_empty : 'a t -> bool val mem : key -> 'a t -> bool val add : key -> 'a -> 'a t -> 'a t val singleton : key -> 'a -> 'a t val remove : key -> 'a t -> 'a t (* val merge : (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t *) (* val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int *) (* val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool *) val iter : (key -> 'a -> unit) -> 'a t -> unit val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b val for_all : (key -> 'a -> bool) -> 'a t -> bool val exists : (key -> 'a -> bool) -> 'a t -> bool (* val filter : (key -> 'a -> bool) -> 'a t -> 'a t *) (* val partition : (key -> 'a -> bool) -> 'a t -> 'a t * 'a t *) (* val cardinal : 'a t -> int *) (* val bindings : 'a t -> (key * 'a) list *) (* val min_binding : 'a t -> key * 'a *) (* val max_binding : 'a t -> key * 'a *) (* val choose : 'a t -> key * 'a *) (* val split : key -> 'a t -> 'a t * 'a option * 'a t *) val find : key -> 'a t -> 'a (* val map : ('a -> 'b) -> 'a t -> 'b t *) (* val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t *) val domain : 'a t -> Set.t val cast : 'a t -> 'a Map.t end (** This is a (partial) implementation of a [Map] interface on integers, except that it internally uses persistent arrays. This ensures O(1) accesses in non-backtracking cases. It is thus better suited for zero-starting, contiguous keys, or otherwise a lot of space will be empty. To keep track of the present keys, a binary tree is also used, so that adding a key is still logarithmic. It is therefore essential that most of the operations are accesses and not add/removes. *) coq-8.15.0/clib/minisys.ml000066400000000000000000000064651417001151100153270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* true | _ -> false (* Check directory can be opened *) let exists_dir dir = (* See BZ#5391 on windows failing on a trailing (back)slash *) let rec strip_trailing_slash dir = let len = String.length dir in if len > 0 && (dir.[len-1] = '/' || dir.[len-1] = '\\') then strip_trailing_slash (String.sub dir 0 (len-1)) else dir in let dir = if Sys.os_type = "Win32" then strip_trailing_slash dir else dir in try Sys.is_directory dir with Sys_error _ -> false let apply_subdir f path name = (* we avoid all files and subdirs starting by '.' (e.g. .svn) *) (* as well as skipped files like CVS, ... *) let base = try Filename.chop_extension name with Invalid_argument _ -> name in if ok_dirname base then let path = if path = "." then name else path//name in match try (Unix.stat path).Unix.st_kind with Unix.Unix_error _ -> Unix.S_BLK with | Unix.S_DIR when name = base -> f (FileDir (path,name)) | Unix.S_REG -> f (FileRegular name) | _ -> () let readdir dir = try Sys.readdir dir with any -> [||] let process_directory f path = Array.iter (apply_subdir f path) (readdir path) let process_subdirectories f path = let f = function FileDir (path,base) -> f path base | FileRegular _ -> () in process_directory f path coq-8.15.0/clib/monad.ml000066400000000000000000000117221417001151100147220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t val (>>=) : 'a t -> ('a -> 'b t) -> 'b t val (>>) : unit t -> 'a t -> 'a t val map : ('a -> 'b) -> 'a t -> 'b t (** The monadic laws must hold: - [(x>>=f)>>=g] = [x>>=fun x' -> (f x'>>=g)] - [return a >>= f] = [f a] - [x>>=return] = [x] As well as the following identities: - [x >> y] = [x >>= fun () -> y] - [map f x] = [x >>= fun x' -> f x'] *) end module type ListS = sig type 'a t (** [List.map f l] maps [f] on the elements of [l] in left to right order. *) val map : ('a -> 'b t) -> 'a list -> 'b list t (** [List.map f l] maps [f] on the elements of [l] in right to left order. *) val map_right : ('a -> 'b t) -> 'a list -> 'b list t (** Like the regular [List.fold_right]. The monadic effects are threaded right to left. Note: many monads behave poorly with right-to-left order. For instance a failure monad would still have to traverse the whole list in order to fail and failure needs to be propagated through the rest of the list in binds which are now spurious. It is also the worst case for substitution monads (aka free monads), exposing the quadratic behaviour.*) val fold_right : ('a -> 'b -> 'b t) -> 'a list -> 'b -> 'b t (** Like the regular [List.fold_left]. The monadic effects are threaded left to right. It is tail-recursive if the [(>>=)] operator calls its second argument in a tail position. *) val fold_left : ('a -> 'b -> 'a t) -> 'a -> 'b list -> 'a t (** Like the regular [List.iter]. The monadic effects are threaded left to right. It is tail-recurisve if the [>>] operator calls its second argument in a tail position. *) val iter : ('a -> unit t) -> 'a list -> unit t (** Like the regular {!CList.map_filter}. The monadic effects are threaded left*) val map_filter : ('a -> 'b option t) -> 'a list -> 'b list t (** {6 Two-list iterators} *) (** [fold_left2 r f s l1 l2] behaves like {!fold_left} but acts simultaneously on two lists. Runs [r] (presumably an exception-raising computation) if both lists do not have the same length. *) val fold_left2 : 'a t -> ('a -> 'b -> 'c -> 'a t) -> 'a -> 'b list -> 'c list -> 'a t end module type S = sig include Def (** List combinators *) module List : ListS with type 'a t := 'a t end module Make (M:Def) : S with type +'a t = 'a M.t = struct include M module List = struct (* The combinators are loop-unrolled to spare a some monadic binds (it is a common optimisation to treat the last of a list of bind specially) and hopefully gain some efficiency using fewer jump. *) let rec map f = function | [] -> return [] | [a] -> M.map (fun a' -> [a']) (f a) | a::b::l -> f a >>= fun a' -> f b >>= fun b' -> M.map (fun l' -> a'::b'::l') (map f l) let rec map_right f = function | [] -> return [] | [a] -> M.map (fun a' -> [a']) (f a) | a::b::l -> map_right f l >>= fun l' -> f b >>= fun b' -> M.map (fun a' -> a'::b'::l') (f a) let rec fold_right f l x = match l with | [] -> return x | [a] -> f a x | a::b::l -> fold_right f l x >>= fun acc -> f b acc >>= fun acc -> f a acc let rec fold_left f x = function | [] -> return x | [a] -> f x a | a::b::l -> f x a >>= fun x' -> f x' b >>= fun x'' -> fold_left f x'' l let rec iter f = function | [] -> return () | [a] -> f a | a::b::l -> f a >> f b >> iter f l let rec map_filter f = function | [] -> return [] | a::l -> f a >>= function | None -> map_filter f l | Some b -> map_filter f l >>= fun filtered -> return (b::filtered) let rec fold_left2 r f x l1 l2 = match l1,l2 with | [] , [] -> return x | [a] , [b] -> f x a b | a1::a2::l1 , b1::b2::l2 -> f x a1 b1 >>= fun x' -> f x' a2 b2 >>= fun x'' -> fold_left2 r f x'' l1 l2 | _ , _ -> r end end coq-8.15.0/clib/monad.mli000066400000000000000000000064261417001151100151000ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t val (>>=) : 'a t -> ('a -> 'b t) -> 'b t val (>>) : unit t -> 'a t -> 'a t val map : ('a -> 'b) -> 'a t -> 'b t (** The monadic laws must hold: - [(x>>=f)>>=g] = [x>>=fun x' -> (f x'>>=g)] - [return a >>= f] = [f a] - [x>>=return] = [x] As well as the following identities: - [x >> y] = [x >>= fun () -> y] - [map f x] = [x >>= fun x' -> f x'] *) end (** List combinators *) module type ListS = sig type 'a t (** [List.map f l] maps [f] on the elements of [l] in left to right order. *) val map : ('a -> 'b t) -> 'a list -> 'b list t (** [List.map f l] maps [f] on the elements of [l] in right to left order. *) val map_right : ('a -> 'b t) -> 'a list -> 'b list t (** Like the regular [List.fold_right]. The monadic effects are threaded right to left. Note: many monads behave poorly with right-to-left order. For instance a failure monad would still have to traverse the whole list in order to fail and failure needs to be propagated through the rest of the list in binds which are now spurious. It is also the worst case for substitution monads (aka free monads), exposing the quadratic behaviour.*) val fold_right : ('a -> 'b -> 'b t) -> 'a list -> 'b -> 'b t (** Like the regular [List.fold_left]. The monadic effects are threaded left to right. It is tail-recursive if the [(>>=)] operator calls its second argument in a tail position. *) val fold_left : ('a -> 'b -> 'a t) -> 'a -> 'b list -> 'a t (** Like the regular [List.iter]. The monadic effects are threaded left to right. It is tail-recurisve if the [>>] operator calls its second argument in a tail position. *) val iter : ('a -> unit t) -> 'a list -> unit t (** Like the regular {!CList.map_filter}. The monadic effects are threaded left to right. *) val map_filter : ('a -> 'b option t) -> 'a list -> 'b list t (** {6 Two-list iterators} *) (** [fold_left2 r f s l1 l2] behaves like {!fold_left} but acts simultaneously on two lists. Runs [r] (presumably an exception-raising computation) if both lists do not have the same length. *) val fold_left2 : 'a t -> ('a -> 'b -> 'c -> 'a t) -> 'a -> 'b list -> 'c list -> 'a t end module type S = sig include Def module List : ListS with type 'a t := 'a t end (** Expands the monadic definition to extra combinators. *) module Make (M:Def) : S with type +'a t = 'a M.t coq-8.15.0/clib/neList.ml000066400000000000000000000023711417001151100150620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* None | y::tl -> Some (y,tl) let singleton x = x,[] let iter f (x,tl) = f x; List.iter f tl let map f (x,tl) = let x = f x in let tl = List.map f tl in x, tl let map2 f (x,tl) (x',tl') = let x = f x x' in let tl = List.map2 f tl tl' in x, tl let map_head f (x,tl) = f x, tl let push x = function | None -> x, [] | Some (y,tl) -> x, y::tl let to_list (x,tl) = x::tl let of_list = function | [] -> invalid_arg "NeList.of_list" | x::tl -> x,tl let repr x = x let of_repr x = x coq-8.15.0/clib/neList.mli000066400000000000000000000022231417001151100152270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a val tail : 'a t -> 'a t option val singleton : 'a -> 'a t val iter : ('a -> unit) -> 'a t -> unit val map : ('a -> 'b) -> 'a t -> 'b t val map2 : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t val map_head : ('a -> 'a) -> 'a t -> 'a t val push : 'a -> 'a t option -> 'a t val to_list : 'a t -> 'a list (** May raise Invalid_argument *) val of_list : 'a list -> 'a t val repr : 'a t -> 'a * 'a list val of_repr : 'a * 'a list -> 'a t coq-8.15.0/clib/option.ml000066400000000000000000000131371417001151100151360ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* false | _ -> true let is_empty = function | None -> true | Some _ -> false (** Lifting equality onto option types. *) let equal f x y = match x, y with | None, None -> true | Some x, Some y -> f x y | _, _ -> false let compare f x y = match x, y with | None, None -> 0 | Some x, Some y -> f x y | None, Some _ -> -1 | Some _, None -> 1 let hash f = function | None -> 0 | Some x -> f x exception IsNone (** [get x] returns [y] where [x] is [Some y]. @raise IsNone if [x] equals [None]. *) let get = function | Some y -> y | _ -> raise IsNone (** [make x] returns [Some x]. *) let make x = Some x (** [bind x f] is [f y] if [x] is [Some y] and [None] otherwise *) let bind x f = match x with Some y -> f y | None -> None let filter f x = bind x (fun v -> if f v then x else None) (** [init b x] returns [Some x] if [b] is [true] and [None] otherwise. *) let init b x = if b then Some x else None (** [flatten x] is [Some y] if [x] is [Some (Some y)] and [None] otherwise. *) let flatten = function | Some (Some y) -> Some y | _ -> None (** [append x y] is the first element of the concatenation of [x] and [y] seen as lists. *) let append o1 o2 = match o1 with | Some _ -> o1 | None -> o2 (** {6 "Iterators"} ***) (** [iter f x] executes [f y] if [x] equals [Some y]. It does nothing otherwise. *) let iter f = function | Some y -> f y | _ -> () exception Heterogeneous (** [iter2 f x y] executes [f z w] if [x] equals [Some z] and [y] equals [Some w]. It does nothing if both [x] and [y] are [None]. And raises [Heterogeneous] otherwise. *) let iter2 f x y = match x,y with | Some z, Some w -> f z w | None,None -> () | _,_ -> raise Heterogeneous (** [map f x] is [None] if [x] is [None] and [Some (f y)] if [x] is [Some y]. *) let map f = function | Some y -> Some (f y) | _ -> None (** [fold_left f a x] is [f a y] if [x] is [Some y], and [a] otherwise. *) let fold_left f a = function | Some y -> f a y | _ -> a (** [fold_left2 f a x y] is [f z w] if [x] is [Some z] and [y] is [Some w]. It is [a] if both [x] and [y] are [None]. Otherwise it raises [Heterogeneous]. *) let fold_left2 f a x y = match x,y with | Some x, Some y -> f a x y | None, None -> a | _ -> raise Heterogeneous (** [fold_right f x a] is [f y a] if [x] is [Some y], and [a] otherwise. *) let fold_right f x a = match x with | Some y -> f y a | _ -> a (** [fold_left_map f a x] is [a, f y] if [x] is [Some y], and [a] otherwise. *) let fold_left_map f a x = match x with | Some y -> let a, z = f a y in a, Some z | _ -> a, None let fold_right_map f x a = match x with | Some y -> let z, a = f y a in Some z, a | _ -> None, a (** [cata f a x] is [a] if [x] is [None] and [f y] if [x] is [Some y]. *) let cata f a = function | Some c -> f c | None -> a (** {6 More Specific operations} ***) (** [default a x] is [y] if [x] is [Some y] and [a] otherwise. *) let default a = function | Some y -> y | _ -> a (** [lift f x] is the same as [map f x]. *) let lift = map (** [lift_right f a x] is [Some (f a y)] if [x] is [Some y], and [None] otherwise. *) let lift_right f a = function | Some y -> Some (f a y) | _ -> None (** [lift_left f x a] is [Some (f y a)] if [x] is [Some y], and [None] otherwise. *) let lift_left f x a = match x with | Some y -> Some (f y a) | _ -> None (** [lift2 f x y] is [Some (f z w)] if [x] equals [Some z] and [y] equals [Some w]. It is [None] otherwise. *) let lift2 f x y = match x,y with | Some z, Some w -> Some (f z w) | _,_ -> None (** {6 Smart operations} *) module Smart = struct (** [Smart.map f x] does the same as [map f x] except that it tries to share some memory. *) let map f = function | Some y as x -> let y' = f y in if y' == y then x else Some y' | _ -> None end (** {6 Operations with Lists} *) module List = struct (** [List.cons x l] equals [y::l] if [x] is [Some y] and [l] otherwise. *) let cons x l = match x with | Some y -> y::l | _ -> l (** [List.flatten l] is the list of all the [y]s such that [l] contains [Some y] (in the same order). *) let rec flatten = function | x::l -> cons x (flatten l) | [] -> [] let rec find f = function | [] -> None | h :: t -> match f h with | None -> find f t | x -> x let map f l = let rec aux f l = match l with | [] -> [] | x :: l -> match f x with | None -> raise Exit | Some y -> y :: aux f l in try Some (aux f l) with Exit -> None end coq-8.15.0/clib/option.mli000066400000000000000000000132771417001151100153140ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool (** Negation of [has_some] *) val is_empty : 'a option -> bool (** [equal f x y] lifts the equality predicate [f] to option types. That is, if both [x] and [y] are [None] then it returns [true], if they are both [Some _] then [f] is called. Otherwise it returns [false]. *) val equal : ('a -> 'a -> bool) -> 'a option -> 'a option -> bool (** Same as [equal], but with comparison. *) val compare : ('a -> 'a -> int) -> 'a option -> 'a option -> int (** Lift a hash to option types. *) val hash : ('a -> int) -> 'a option -> int (** [get x] returns [y] where [x] is [Some y]. @raise IsNone if [x] equals [None]. *) val get : 'a option -> 'a (** [make x] returns [Some x]. *) val make : 'a -> 'a option (** [bind x f] is [f y] if [x] is [Some y] and [None] otherwise *) val bind : 'a option -> ('a -> 'b option) -> 'b option (** [filter f x] is [x] if [x] [Some y] and [f y] is true, [None] otherwise *) val filter : ('a -> bool) -> 'a option -> 'a option (** [init b x] returns [Some x] if [b] is [true] and [None] otherwise. *) val init : bool -> 'a -> 'a option (** [flatten x] is [Some y] if [x] is [Some (Some y)] and [None] otherwise. *) val flatten : 'a option option -> 'a option (** [append x y] is the first element of the concatenation of [x] and [y] seen as lists. In other words, [append (Some a) y] is [Some a], [append None (Some b)] is [Some b], and [append None None] is [None]. *) val append : 'a option -> 'a option -> 'a option (** {6 "Iterators"} *) (** [iter f x] executes [f y] if [x] equals [Some y]. It does nothing otherwise. *) val iter : ('a -> unit) -> 'a option -> unit exception Heterogeneous (** [iter2 f x y] executes [f z w] if [x] equals [Some z] and [y] equals [Some w]. It does nothing if both [x] and [y] are [None]. @raise Heterogeneous otherwise. *) val iter2 : ('a -> 'b -> unit) -> 'a option -> 'b option -> unit (** [map f x] is [None] if [x] is [None] and [Some (f y)] if [x] is [Some y]. *) val map : ('a -> 'b) -> 'a option -> 'b option (** [fold_left f a x] is [f a y] if [x] is [Some y], and [a] otherwise. *) val fold_left : ('b -> 'a -> 'b) -> 'b -> 'a option -> 'b (** [fold_left2 f a x y] is [f z w] if [x] is [Some z] and [y] is [Some w]. It is [a] if both [x] and [y] are [None]. @raise Heterogeneous otherwise. *) val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b option -> 'c option -> 'a (** [fold_right f x a] is [f y a] if [x] is [Some y], and [a] otherwise. *) val fold_right : ('a -> 'b -> 'b) -> 'a option -> 'b -> 'b (** [fold_left_map f a x] is [a, f y] if [x] is [Some y], and [a] otherwise. *) val fold_left_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b option -> 'a * 'c option (** Same as [fold_left_map] on the right *) val fold_right_map : ('b -> 'a -> 'c * 'a) -> 'b option -> 'a -> 'c option * 'a (** [cata f e x] is [e] if [x] is [None] and [f a] if [x] is [Some a] *) val cata : ('a -> 'b) -> 'b -> 'a option -> 'b (** {6 More Specific Operations} *) (** [default a x] is [y] if [x] is [Some y] and [a] otherwise. *) val default : 'a -> 'a option -> 'a (** [lift] is the same as {!map}. *) val lift : ('a -> 'b) -> 'a option -> 'b option (** [lift_right f a x] is [Some (f a y)] if [x] is [Some y], and [None] otherwise. *) val lift_right : ('a -> 'b -> 'c) -> 'a -> 'b option -> 'c option (** [lift_left f x a] is [Some (f y a)] if [x] is [Some y], and [None] otherwise. *) val lift_left : ('a -> 'b -> 'c) -> 'a option -> 'b -> 'c option (** [lift2 f x y] is [Some (f z w)] if [x] equals [Some z] and [y] equals [Some w]. It is [None] otherwise. *) val lift2 : ('a -> 'b -> 'c) -> 'a option -> 'b option -> 'c option (** {6 Smart operations} *) module Smart : sig (** [Smart.map f x] does the same as [map f x] except that it tries to share some memory. *) val map : ('a -> 'a) -> 'a option -> 'a option end (** {6 Operations with Lists} *) module List : sig (** [List.cons x l] equals [y::l] if [x] is [Some y] and [l] otherwise. *) val cons : 'a option -> 'a list -> 'a list (** [List.flatten l] is the list of all the [y]s such that [l] contains [Some y] (in the same order). *) val flatten : 'a option list -> 'a list (** [List.find f l] is the first [f a] different from [None], scrolling through elements [a] of [l] in left-to-right order; it is [None] if no such element exists. *) val find : ('a -> 'b option) -> 'a list -> 'b option (** [List.map f [a1;...;an]] is the list [Some [b1;...;bn]] if for all i, there is a [bi] such that [f ai] is [Some bi]; it is [None] if, for at least one i, [f ai] is [None]. *) val map : ('a -> 'b option) -> 'a list -> 'b list option end coq-8.15.0/clib/orderedType.ml000066400000000000000000000022101417001151100161020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module Pair (M:S) (N:S) = struct type t = M.t * N.t let compare (a,b) (a',b') = let i = M.compare a a' in if Int.equal i 0 then N.compare b b' else i end module UnorderedPair (M:S) = struct type t = M.t * M.t let reorder (a,b as p) = if M.compare a b <= 0 then p else (b,a) let compare p p' = let p = reorder p and p' = reorder p' in let module P = Pair(M)(M) in P.compare p p' end coq-8.15.0/clib/orderedType.mli000066400000000000000000000015201417001151100162560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module Pair (M:S) (N:S) : S with type t = M.t * N.t module UnorderedPair (M:S) : S with type t = M.t * M.t coq-8.15.0/clib/predicate.ml000066400000000000000000000073661417001151100155750ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int end module type S = sig type elt type t val empty: t val full: t val is_empty: t -> bool val is_full: t -> bool val mem: elt -> t -> bool val singleton: elt -> t val add: elt -> t -> t val remove: elt -> t -> t val union: t -> t -> t val inter: t -> t -> t val diff: t -> t -> t val complement: t -> t val equal: t -> t -> bool val subset: t -> t -> bool val elements: t -> bool * elt list val is_finite : t -> bool end module Make(Ord: OrderedType) = struct module EltSet = Set.Make(Ord) type elt = Ord.t (* (false, s) represents a set which is equal to the set s (true, s) represents a set which is equal to the complement of set s *) type t = bool * EltSet.t let is_finite (b,_) = not b let elements (b,s) = (b, EltSet.elements s) let empty = (false,EltSet.empty) let full = (true,EltSet.empty) (* assumes the set is infinite *) let is_empty (b,s) = not b && EltSet.is_empty s let is_full (b,s) = b && EltSet.is_empty s let mem x (b,s) = if b then not (EltSet.mem x s) else EltSet.mem x s let singleton x = (false,EltSet.singleton x) let add x (b,s) = if b then (b,EltSet.remove x s) else (b,EltSet.add x s) let remove x (b,s) = if b then (b,EltSet.add x s) else (b,EltSet.remove x s) let complement (b,s) = (not b, s) let union s1 s2 = match (s1,s2) with ((false,p1),(false,p2)) -> (false,EltSet.union p1 p2) | ((true,n1),(true,n2)) -> (true,EltSet.inter n1 n2) | ((false,p1),(true,n2)) -> (true,EltSet.diff n2 p1) | ((true,n1),(false,p2)) -> (true,EltSet.diff n1 p2) let inter s1 s2 = complement (union (complement s1) (complement s2)) let diff s1 s2 = inter s1 (complement s2) (* assumes the set is infinite *) let subset s1 s2 = match (s1,s2) with ((false,p1),(false,p2)) -> EltSet.subset p1 p2 | ((true,n1),(true,n2)) -> EltSet.subset n2 n1 | ((false,p1),(true,n2)) -> EltSet.is_empty (EltSet.inter p1 n2) | ((true,_),(false,_)) -> false (* assumes the set is infinite *) let equal (b1,s1) (b2,s2) = b1=b2 && EltSet.equal s1 s2 end coq-8.15.0/clib/predicate.mli000066400000000000000000000052701417001151100157360ustar00rootroot00000000000000(** Infinite sets over a chosen [OrderedType]. All operations over sets are purely applicative (no side-effects). *) (** Input signature of the functor [Make]. *) module type OrderedType = sig type t (** The type of the elements in the set. The chosen [t] {b must be infinite}. *) val compare : t -> t -> int (** A total ordering function over the set elements. This is a two-argument function [f] such that: - [f e1 e2] is zero if the elements [e1] and [e2] are equal, - [f e1 e2] is strictly negative if [e1] is smaller than [e2], - and [f e1 e2] is strictly positive if [e1] is greater than [e2]. *) end module type S = sig type elt (** The type of the elements in the set. *) type t (** The type of sets. *) val empty: t (** The empty set. *) val full: t (** The set of all elements (of type [elm]). *) val is_empty: t -> bool (** Test whether a set is empty or not. *) val is_full: t -> bool (** Test whether a set contains the whole type or not. *) val mem: elt -> t -> bool (** [mem x s] tests whether [x] belongs to the set [s]. *) val singleton: elt -> t (** [singleton x] returns the one-element set containing only [x]. *) val add: elt -> t -> t (** [add x s] returns a set containing all elements of [s], plus [x]. If [x] was already in [s], then [s] is returned unchanged. *) val remove: elt -> t -> t (** [remove x s] returns a set containing all elements of [s], except [x]. If [x] was not in [s], then [s] is returned unchanged. *) val union: t -> t -> t (** Set union. *) val inter: t -> t -> t (** Set intersection. *) val diff: t -> t -> t (** Set difference. *) val complement: t -> t (** Set complement. *) val equal: t -> t -> bool (** [equal s1 s2] tests whether the sets [s1] and [s2] are equal, that is, contain equal elements. *) val subset: t -> t -> bool (** [subset s1 s2] tests whether the set [s1] is a subset of the set [s2]. *) val elements: t -> bool * elt list (** Gives a finite representation of the predicate: if the boolean is false, then the predicate is given in extension. if it is true, then the complement is given *) val is_finite : t -> bool (** [true] if the predicate can be given as a finite set (if [elt] is a finite type, we can have [is_finite x = false] yet [x] is finite, but we don't know how to list its elements) *) end (** The [Make] functor constructs an implementation for any [OrderedType]. *) module Make (Ord : OrderedType) : (S with type elt = Ord.t) coq-8.15.0/clib/range.ml000066400000000000000000000051471417001151100147240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* if Int.equal h1 h2 then Cons (1 + h1 + h2, Node (x, t1, t2), rem) else Cons (1, Leaf x, l) | _ -> Cons (1, Leaf x, l) let is_empty = function | Nil -> true | _ -> false let rec tree_get h t i = match t with | Leaf x -> if i = 0 then x else oob () | Node (x, t1, t2) -> if i = 0 then x else let h = h / 2 in if i <= h then tree_get h t1 (i - 1) else tree_get h t2 (i - h - 1) let rec get l i = match l with | Nil -> oob () | Cons (h, t, rem) -> if i < h then tree_get h t i else get rem (i - h) let length l = let rec length accu = function | Nil -> accu | Cons (h, _, l) -> length (h + accu) l in length 0 l let rec tree_map f = function | Leaf x -> Leaf (f x) | Node (x, t1, t2) -> Node (f x, tree_map f t1, tree_map f t2) let rec map f = function | Nil -> Nil | Cons (h, t, l) -> Cons (h, tree_map f t, map f l) let rec tree_fold_left f accu = function | Leaf x -> f accu x | Node (x, t1, t2) -> tree_fold_left f (tree_fold_left f (f accu x) t1) t2 let rec fold_left f accu = function | Nil -> accu | Cons (_, t, l) -> fold_left f (tree_fold_left f accu t) l let rec tree_fold_right f t accu = match t with | Leaf x -> f x accu | Node (x, t1, t2) -> f x (tree_fold_right f t1 (tree_fold_right f t2 accu)) let rec fold_right f l accu = match l with | Nil -> accu | Cons (_, t, l) -> tree_fold_right f t (fold_right f l accu) let hd = function | Nil -> failwith "hd" | Cons (_, Leaf x, _) -> x | Cons (_, Node (x, _, _), _) -> x let tl = function | Nil -> failwith "tl" | Cons (_, Leaf _, l) -> l | Cons (h, Node (_, t1, t2), l) -> let h = h / 2 in Cons (h, t1, Cons (h, t2, l)) let rec skipn n l = if n = 0 then l else if is_empty l then failwith "List.skipn" else skipn (pred n) (tl l) coq-8.15.0/clib/range.mli000066400000000000000000000024321417001151100150670ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t -> 'a t (** {5 List operations} *) val is_empty : 'a t -> bool val length : 'a t -> int val map : ('a -> 'b) -> 'a t -> 'b t val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a val fold_right : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b val hd : 'a t -> 'a val tl : 'a t -> 'a t val skipn : int -> 'a t -> 'a t (** {5 Indexing operations} *) val get : 'a t -> int -> 'a coq-8.15.0/clib/segmenttree.ml000066400000000000000000000122241417001151100161440ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* () | x :: xs -> f i x; loop (i + 1) xs in loop 0 l let log2 x = log x /. log 2. let log2n x = int_of_float (ceil (log2 (float_of_int x))) (** We focus on integers but this module can be generalized. *) type elt = int (** A value of type [domain] is interpreted differently given its position in the tree. On internal nodes, a domain represents the set of integers which are _not_ in the set of keys handled by the tree. On leaves, a domain represents the st of integers which are in the set of keys. *) type domain = | Interval of elt * elt (** On internal nodes, a domain [Interval (a, b)] represents the interval [a + 1; b - 1]. On leaves, it represents [a; b]. We always have [a] <= [b]. *) | Universe (** On internal node or root, a domain [Universe] represents all the integers. When the tree is not a trivial root, [Universe] has no interpretation on leaves. (The lookup function should never reach the leaves.) *) (** We use an array to store the almost complete tree. This array contains at least one element. *) type 'a t = (domain * 'a option) array (** The root is the first item of the array. *) (** Standard layout for left child. *) let left_child i = 2 * i + 1 (** Standard layout for right child. *) let right_child i = 2 * i + 2 (** Extract the annotation of a node, be it internal or a leaf. *) let value_of i t = match t.(i) with (_, Some x) -> x | _ -> raise Not_found (** Initialize the array to store [n] leaves. *) let create n init = Array.make (1 lsl (log2n n + 1) - 1) init (** Make a complete interval tree from a list of disjoint segments. Precondition : the segments must be sorted. *) let make segments = let nsegments = List.length segments in let tree = create nsegments (Universe, None) in let leaves_offset = (1 lsl (log2n nsegments)) - 1 in (* The algorithm proceeds in two steps using an intermediate tree to store minimum and maximum of each subtree as annotation of the node. *) (* We start from leaves: the last level of the tree is initialized with the given segments... *) list_iteri (fun i ((start, stop), value) -> let k = leaves_offset + i in let i = Interval (start, stop) in tree.(k) <- (i, Some i)) segments; (* ... the remaining leaves are initialized with neutral information. *) for k = leaves_offset + nsegments to Array.length tree -1 do tree.(k) <- (Universe, Some Universe) done; (* We traverse the tree bottom-up and compute the interval and annotation associated to each node from the annotations of its children. *) for k = leaves_offset - 1 downto 0 do let node, annotation = match value_of (left_child k) tree, value_of (right_child k) tree with | Interval (left_min, left_max), Interval (right_min, right_max) -> (Interval (left_max, right_min), Interval (left_min, right_max)) | Interval (min, max), Universe -> (Interval (max, max), Interval (min, max)) | Universe, Universe -> Universe, Universe | Universe, _ -> assert false in tree.(k) <- (node, Some annotation) done; (* Finally, annotation are replaced with the image related to each leaf. *) let final_tree = Array.mapi (fun i (segment, value) -> (segment, None)) tree in list_iteri (fun i ((start, stop), value) -> final_tree.(leaves_offset + i) <- (Interval (start, stop), Some value)) segments; final_tree (** [lookup k t] looks for an image for key [k] in the interval tree [t]. Raise [Not_found] if it fails. *) let lookup k t = let i = ref 0 in while (snd t.(!i) = None) do match fst t.(!i) with | Interval (start, stop) -> if k <= start then i := left_child !i else if k >= stop then i:= right_child !i else raise Not_found | Universe -> raise Not_found done; match fst t.(!i) with | Interval (start, stop) -> if k >= start && k <= stop then match snd t.(!i) with | Some v -> v | None -> assert false else raise Not_found | Universe -> assert false coq-8.15.0/clib/segmenttree.mli000066400000000000000000000025431417001151100163200ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t (** [lookup k t] looks for an image for key [k] in the interval tree [t]. Raise [Not_found] if it fails. *) val lookup : int -> 'a t -> 'a coq-8.15.0/clib/store.ml000066400000000000000000000032731417001151100147620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a field val empty : t val set : t -> 'a field -> 'a -> t val get : t -> 'a field -> 'a option val remove : t -> 'a field -> t val merge : t -> t -> t end module Make() : S = struct module Dyn = Dyn.Make() module Map = Dyn.Map(struct type 'a t = 'a end) type t = Map.t type 'a field = 'a Dyn.tag let next = ref 0 let field () = let f = Dyn.anonymous !next in incr next; f let empty = Map.empty let set s f v = Map.add f v s let get s f = try Some (Map.find f s) with Not_found -> None let remove s f = Map.remove f s let merge s1 s2 = Map.fold (fun (Map.Any (f, v)) s -> Map.add f v s) s1 s2 end coq-8.15.0/clib/store.mli000066400000000000000000000025631417001151100151340ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a field (** Create a new field *) val empty : t (** Empty store *) val set : t -> 'a field -> 'a -> t (** Set a field *) val get : t -> 'a field -> 'a option (** Get the value of a field, if any *) val remove : t -> 'a field -> t (** Unset the value of the field *) val merge : t -> t -> t (** [merge s1 s2] adds all the fields of [s1] into [s2]. *) end module Make() : S (** Create a new store type. *) coq-8.15.0/clib/terminal.ml000066400000000000000000000176461417001151100154520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* o2 | Some _ -> match o2 with | None -> o1 | Some _ -> o2 let default = { fg_color = None; bg_color = None; bold = None; italic = None; underline = None; negative = None; prefix = None; suffix = None; } let reset = "\027[0m" let reset_style = { fg_color = Some `DEFAULT; bg_color = Some `DEFAULT; bold = Some false; italic = Some false; underline = Some false; negative = Some false; prefix = None; suffix = None; } let make ?fg_color ?bg_color ?bold ?italic ?underline ?negative ?style ?prefix ?suffix () = let st = match style with | None -> default | Some st -> st in { fg_color = set st.fg_color fg_color; bg_color = set st.bg_color bg_color; bold = set st.bold bold; italic = set st.italic italic; underline = set st.underline underline; negative = set st.negative negative; prefix = set st.prefix prefix; suffix = set st.suffix suffix; } let merge s1 s2 = { fg_color = set s1.fg_color s2.fg_color; bg_color = set s1.bg_color s2.bg_color; bold = set s1.bold s2.bold; italic = set s1.italic s2.italic; underline = set s1.underline s2.underline; negative = set s1.negative s2.negative; prefix = set s1.prefix s2.prefix; suffix = set s1.suffix s2.suffix; } let diff s1 s2 = let diff_op o1 o2 reset_val = match o1 with | None -> o2 | Some _ -> match o2 with | None -> reset_val | Some _ -> if o1 = o2 then None else o2 in { fg_color = diff_op s1.fg_color s2.fg_color reset_style.fg_color; bg_color = diff_op s1.bg_color s2.bg_color reset_style.bg_color; bold = diff_op s1.bold s2.bold reset_style.bold; italic = diff_op s1.italic s2.italic reset_style.italic; underline = diff_op s1.underline s2.underline reset_style.underline; negative = diff_op s1.negative s2.negative reset_style.negative; prefix = diff_op s1.prefix s2.prefix reset_style.prefix; suffix = diff_op s1.suffix s2.suffix reset_style.suffix; } let base_color = function | `DEFAULT -> 9 | `BLACK -> 0 | `RED -> 1 | `GREEN -> 2 | `YELLOW -> 3 | `BLUE -> 4 | `MAGENTA -> 5 | `CYAN -> 6 | `WHITE -> 7 | `LIGHT_BLACK -> 0 | `LIGHT_RED -> 1 | `LIGHT_GREEN -> 2 | `LIGHT_YELLOW -> 3 | `LIGHT_BLUE -> 4 | `LIGHT_MAGENTA -> 5 | `LIGHT_CYAN -> 6 | `LIGHT_WHITE -> 7 | _ -> invalid_arg "base_color" let extended_color off = function | `INDEX i -> [off + 8; 5; i] | `RGB (r, g, b) -> [off + 8; 2; r; g; b] | _ -> invalid_arg "extended_color" let is_light = function | `LIGHT_BLACK | `LIGHT_RED | `LIGHT_GREEN | `LIGHT_YELLOW | `LIGHT_BLUE | `LIGHT_MAGENTA | `LIGHT_CYAN | `LIGHT_WHITE -> true | _ -> false let is_extended = function | `INDEX _ | `RGB _ -> true | _ -> false let repr st = let fg = match st.fg_color with | None -> [] | Some c -> if is_light c then [90 + base_color c] else if is_extended c then extended_color 30 c else [30 + base_color c] in let bg = match st.bg_color with | None -> [] | Some c -> if is_light c then [100 + base_color c] else if is_extended c then extended_color 40 c else [40 + base_color c] in let bold = match st.bold with | None -> [] | Some true -> [1] | Some false -> [22] in let italic = match st.italic with | None -> [] | Some true -> [3] | Some false -> [23] in let underline = match st.underline with | None -> [] | Some true -> [4] | Some false -> [24] in let negative = match st.negative with | None -> [] | Some true -> [7] | Some false -> [27] in fg @ bg @ bold @ italic @ underline @ negative let eval st = let tags = repr st in let tags = List.map string_of_int tags in if List.length tags = 0 then "" else Printf.sprintf "\027[%sm" (String.concat ";" tags) let has_style t = Unix.isatty t && Sys.os_type = "Unix" let split c s = let len = String.length s in let rec split n = try let pos = String.index_from s n c in let dir = String.sub s n (pos-n) in dir :: split (succ pos) with | Not_found -> [String.sub s n (len-n)] in if len = 0 then [] else split 0 let check_char i = if i < 0 || i > 255 then invalid_arg "check_char" let parse_color off rem = match off with | 0 -> (`BLACK, rem) | 1 -> (`RED, rem) | 2 -> (`GREEN, rem) | 3 -> (`YELLOW, rem) | 4 -> (`BLUE, rem) | 5 -> (`MAGENTA, rem) | 6 -> (`CYAN, rem) | 7 -> (`WHITE, rem) | 9 -> (`DEFAULT, rem) | 8 -> begin match rem with | 5 :: i :: rem -> check_char i; (`INDEX i, rem) | 2 :: r :: g :: b :: rem -> check_char r; check_char g; check_char b; (`RGB (r, g, b), rem) | _ -> invalid_arg "parse_color" end | _ -> invalid_arg "parse_color" let set_light = function | `BLACK -> `LIGHT_BLACK | `RED -> `LIGHT_RED | `GREEN -> `LIGHT_GREEN | `YELLOW -> `LIGHT_YELLOW | `BLUE -> `LIGHT_BLUE | `MAGENTA -> `LIGHT_MAGENTA | `CYAN -> `LIGHT_CYAN | `WHITE -> `LIGHT_WHITE | _ -> invalid_arg "parse_color" let rec parse_style style = function | [] -> style | 0 :: rem -> let style = merge style reset_style in parse_style style rem | 1 :: rem -> let style = make ~style ~bold:true () in parse_style style rem | 3 :: rem -> let style = make ~style ~italic:true () in parse_style style rem | 4 :: rem -> let style = make ~style ~underline:true () in parse_style style rem | 7 :: rem -> let style = make ~style ~negative:true () in parse_style style rem | 22 :: rem -> let style = make ~style ~bold:false () in parse_style style rem | 23 :: rem -> let style = make ~style ~italic:false () in parse_style style rem | 24 :: rem -> let style = make ~style ~underline:false () in parse_style style rem | 27 :: rem -> let style = make ~style ~negative:false () in parse_style style rem | code :: rem when (30 <= code && code < 40) -> let color, rem = parse_color (code mod 10) rem in let style = make ~style ~fg_color:color () in parse_style style rem | code :: rem when (40 <= code && code < 50) -> let color, rem = parse_color (code mod 10) rem in let style = make ~style ~bg_color:color () in parse_style style rem | code :: rem when (90 <= code && code < 100) -> let color, rem = parse_color (code mod 10) rem in let style = make ~style ~fg_color:(set_light color) () in parse_style style rem | code :: rem when (100 <= code && code < 110) -> let color, rem = parse_color (code mod 10) rem in let style = make ~style ~bg_color:(set_light color) () in parse_style style rem | _ :: rem -> parse_style style rem (** Parse LS_COLORS-like strings *) let parse s = let defs = split ':' s in let fold accu s = match split '=' s with | [name; attrs] -> let attrs = split ';' attrs in let accu = try let attrs = List.map int_of_string attrs in let attrs = parse_style (make ()) attrs in (name, attrs) :: accu with _ -> accu in accu | _ -> accu in List.fold_left fold [] defs coq-8.15.0/clib/terminal.mli000066400000000000000000000044011417001151100156040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ?bg_color:color -> ?bold:bool -> ?italic:bool -> ?underline:bool -> ?negative:bool -> ?style:style -> ?prefix:string -> ?suffix:string -> unit -> style (** Create a style from the given flags. It is derived from the optional [style] argument if given. *) val merge : style -> style -> style (** [merge s1 s2] returns [s1] with all defined values of [s2] overwritten. *) val diff : style -> style -> style (** [diff s1 s2] returns the differences between [s1] and [s2]. *) val repr : style -> int list (** Generate the ANSI code representing the given style. *) val eval : style -> string (** Generate an escape sequence from a style. *) val reset : string (** This escape sequence resets all attributes. *) val reset_style : style (** The default style *) val has_style : Unix.file_descr -> bool (** Whether an output file descriptor handles styles. Very heuristic, only checks it is a terminal. *) val parse : string -> (string * style) list (** Parse strings describing terminal styles in the LS_COLORS syntax. For robustness, ignore meaningless entries and drops undefined styles. *) coq-8.15.0/clib/trie.ml000066400000000000000000000047431417001151100145740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* data val next : t -> label -> t val labels : t -> label list val add : label list -> data -> t -> t val remove : label list -> data -> t -> t val iter : (label list -> data -> unit) -> t -> unit end module type Grp = sig type t val nil : t val is_nil : t -> bool val add : t -> t -> t val sub : t -> t -> t end module Make (Y : Map.OrderedType) (X : Grp) = struct module T_codom = Map.Make(Y) type data = X.t type label = Y.t type t = Node of X.t * t T_codom.t let codom_for_all f m = let fold key v accu = f v && accu in T_codom.fold fold m true let empty = Node (X.nil, T_codom.empty) let next (Node (_,m)) lbl = T_codom.find lbl m let get (Node (hereset,_)) = hereset let labels (Node (_,m)) = (* FIXME: this is order-dependent. Try to find a more robust presentation? *) List.rev (T_codom.fold (fun x _ acc -> x::acc) m []) let is_empty_node (Node(a,b)) = (X.is_nil a) && (T_codom.is_empty b) let assure_arc m lbl = if T_codom.mem lbl m then m else T_codom.add lbl (Node (X.nil,T_codom.empty)) m let cleanse_arcs (Node (hereset,m)) = let m = if codom_for_all is_empty_node m then T_codom.empty else m in Node(hereset, m) let rec at_path f (Node (hereset,m)) = function | [] -> cleanse_arcs (Node(f hereset,m)) | h::t -> let m = assure_arc m h in cleanse_arcs (Node(hereset, T_codom.add h (at_path f (T_codom.find h m) t) m)) let add path v tm = at_path (fun hereset -> X.add v hereset) tm path let remove path v tm = at_path (fun hereset -> X.sub hereset v) tm path let iter f tlm = let rec apprec pfx (Node(hereset,m)) = let path = List.rev pfx in f path hereset; T_codom.iter (fun l tm -> apprec (l::pfx) tm) m in apprec [] tlm end coq-8.15.0/clib/trie.mli000066400000000000000000000040121417001151100147320ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* data (** Get the data at the current node. *) val next : t -> label -> t (** [next t lbl] returns the subtrie of [t] pointed by [lbl]. @raise Not_found if there is none. *) val labels : t -> label list (** Get the list of defined labels at the current node. *) val add : label list -> data -> t -> t (** [add t path v] adds [v] at path [path] in [t]. *) val remove : label list -> data -> t -> t (** [remove t path v] removes [v] from path [path] in [t]. *) val iter : (label list -> data -> unit) -> t -> unit (** Apply a function to all contents. *) end module type Grp = sig type t val nil : t val is_nil : t -> bool val add : t -> t -> t val sub : t -> t -> t end module Make (Label : Set.OrderedType) (Data : Grp) : S with type label = Label.t and type data = Data.t (** Generating functor, for a given type of labels and data. *) coq-8.15.0/clib/unicode.ml000066400000000000000000000341131417001151100152510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 1 lsl ((i land 7) * 3) (* 001 *) | IdentPart -> 2 lsl ((i land 7) * 3) (* 010 *) | Symbol -> 3 lsl ((i land 7) * 3) (* 011 *) | IdentSep -> 4 lsl ((i land 7) * 3) (* 100 *) | Unknown -> 0 lsl ((i land 7) * 3) (* 000 *) (* Helper to reset 3 bits in a word. *) let reset_mask i = lnot (7 lsl ((i land 7) * 3)) (* Initialize the lookup table from a list of segments, assigning a status to every character of each segment. The order of these assignments is relevant: it is possible to assign status [s] to a segment [(c1, c2)] and later assign [s'] to [c] even if [c] is between [c1] and [c2]. *) let mk_lookup_table_from_unicode_tables_for status tables = List.iter (List.iter (fun (c1, c2) -> for i = c1 to c2 do table.(i lsr 3) <- (table.(i lsr 3) land (reset_mask i)) lor (mask i status) done)) tables (* Look up into the table and interpret the found pattern. *) let lookup x = let v = (table.(x lsr 3) lsr ((x land 7) * 3)) land 7 in if v = 1 then Letter else if v = 2 then IdentPart else if v = 3 then Symbol else if v = 4 then IdentSep else Unknown (* [classify] discriminates between 5 different kinds of symbols based on the standard unicode classification (extracted from Camomile). *) let classify = let single c = [ (c, c) ] in (* General tables. *) mk_lookup_table_from_unicode_tables_for Symbol [ Unicodetable.sm; (* Symbol, maths. *) Unicodetable.sc; (* Symbol, currency. *) Unicodetable.so; (* Symbol, modifier. *) Unicodetable.pd; (* Punctuation, dash. *) Unicodetable.pc; (* Punctuation, connector. *) Unicodetable.pe; (* Punctuation, open. *) Unicodetable.ps; (* Punctution, close. *) Unicodetable.pi; (* Punctuation, initial quote. *) Unicodetable.pf; (* Punctuation, final quote. *) Unicodetable.po; (* Punctuation, other. *) ]; mk_lookup_table_from_unicode_tables_for Letter [ Unicodetable.lu; (* Letter, uppercase. *) Unicodetable.ll; (* Letter, lowercase. *) Unicodetable.lt; (* Letter, titlecase. *) Unicodetable.lo; (* Letter, others. *) Unicodetable.lm; (* Letter, modifier. *) ]; mk_lookup_table_from_unicode_tables_for IdentPart [ Unicodetable.nd; (* Number, decimal digits. *) Unicodetable.nl; (* Number, letter. *) Unicodetable.no; (* Number, other. *) ]; (* Workaround. Some characters seems to be missing in Camomile's category tables. We add them manually. *) mk_lookup_table_from_unicode_tables_for Letter [ [(0x01D00, 0x01D7F)]; (* Phonetic Extensions. *) [(0x01D80, 0x01DBF)]; (* Phonetic Extensions Suppl. *) [(0x01DC0, 0x01DFF)]; (* Combining Diacritical Marks Suppl.*) ]; (* Exceptions (from a previous version of this function). *) mk_lookup_table_from_unicode_tables_for Symbol [ [(0x000B2, 0x000B3)]; (* Superscript 2-3. *) single 0x000B9; (* Superscript 1. *) single 0x02070; (* Superscript 0. *) [(0x02074, 0x02079)]; (* Superscript 4-9. *) single 0x0002E; (* Dot. *) ]; mk_lookup_table_from_unicode_tables_for IdentSep [ single 0x005F; (* Underscore. *) single 0x00A0; (* Non breaking space. *) ]; mk_lookup_table_from_unicode_tables_for IdentPart [ single 0x0027; (* Single quote. *) ]; (* Lookup *) lookup exception End_of_input let utf8_of_unicode n = if n < 128 then String.make 1 (Char.chr n) else let (m,s) = if n < 2048 then (2,192) else if n < 65536 then (3,224) else (4,240) in String.init m (fun i -> let j = (n lsr ((m - 1 - i) * 6)) land 63 in Char.chr (j + if i = 0 then s else 128)) (* If [s] is some UTF-8 encoded string and [i] is a position of some UTF-8 character within [s] then [next_utf8 s i] returns [(j,n)] where: - [j] indicates the position of the next UTF-8 character - [n] represents the UTF-8 character at index [i] *) let next_utf8 s i = let err () = invalid_arg "utf8" in let l = String.length s - i in if l = 0 then raise End_of_input else let a = Char.code s.[i] in if a <= 0x7F then 1, a else if a land 0x40 = 0 || l = 1 then err () else let b = Char.code s.[i+1] in if b land 0xC0 <> 0x80 then err () else if a land 0x20 = 0 then 2, (a land 0x1F) lsl 6 + (b land 0x3F) else if l = 2 then err () else let c = Char.code s.[i+2] in if c land 0xC0 <> 0x80 then err () else if a land 0x10 = 0 then 3, (a land 0x0F) lsl 12 + (b land 0x3F) lsl 6 + (c land 0x3F) else if l = 3 then err () else let d = Char.code s.[i+3] in if d land 0xC0 <> 0x80 then err () else if a land 0x08 = 0 then 4, (a land 0x07) lsl 18 + (b land 0x3F) lsl 12 + (c land 0x3F) lsl 6 + (d land 0x3F) else err () let is_utf8 s = let rec check i = let (off, _) = next_utf8 s i in check (i + off) in try check 0 with End_of_input -> true | Invalid_argument _ -> false (* Escape string if it contains non-utf8 characters *) let escaped_non_utf8 s = let mk_escape x = Printf.sprintf "%%%X" x in let buff = Buffer.create (String.length s * 3) in let rec process_trailing_aux i j = if i = j then i else match String.unsafe_get s i with | '\128'..'\191' -> process_trailing_aux (i+1) j | _ -> i in let process_trailing i n = let j = if i+n-1 >= String.length s then i+1 else process_trailing_aux (i+1) (i+n) in (if j = i+n then Buffer.add_string buff (String.sub s i n) else let v = Array.init (j-i) (fun k -> mk_escape (Char.code s.[i+k])) in Buffer.add_string buff (String.concat "" (Array.to_list v))); j in let rec process i = if i >= String.length s then Buffer.contents buff else let c = String.unsafe_get s i in match c with | '\000'..'\127' -> Buffer.add_char buff c; process (i+1) | '\128'..'\191' | '\248'..'\255' -> Buffer.add_string buff (mk_escape (Char.code c)); process (i+1) | '\192'..'\223' -> process (process_trailing i 2) | '\224'..'\239' -> process (process_trailing i 3) | '\240'..'\247' -> process (process_trailing i 4) in process 0 let escaped_if_non_utf8 s = if is_utf8 s then s else escaped_non_utf8 s (* Check the well-formedness of an identifier *) let is_valid_ident_initial = function | Letter | IdentSep -> true | IdentPart | Symbol | Unknown -> false let initial_refutation j n s = if is_valid_ident_initial (classify n) then None else let c = String.sub s 0 j in Some (false, "Invalid character '"^c^"' at beginning of identifier \""^s^"\".") let is_valid_ident_trailing = function | Letter | IdentSep | IdentPart -> true | Symbol | Unknown -> false let trailing_refutation i j n s = if is_valid_ident_trailing (classify n) then None else let c = String.sub s i j in Some (false, "Invalid character '"^c^"' in identifier \""^s^"\".") let is_unknown = function | Unknown -> true | Letter | IdentSep | IdentPart | Symbol -> false let is_ident_part = function | IdentPart -> true | Letter | IdentSep | Symbol | Unknown -> false let is_ident_sep = function | IdentSep -> true | Letter | IdentPart | Symbol | Unknown -> false let ident_refutation s = if s = ".." then None else try let j, n = next_utf8 s 0 in match initial_refutation j n s with |None -> begin try let rec aux i = let j, n = next_utf8 s i in match trailing_refutation i j n s with |None -> aux (i + j) |x -> x in aux j with End_of_input -> None end |x -> x with | End_of_input -> Some (true,"The empty string is not an identifier.") | Invalid_argument _ -> Some (true,escaped_non_utf8 s^": invalid utf8 sequence.") let lowercase_unicode = let tree = Segmenttree.make Unicodetable.to_lower in fun unicode -> try match Segmenttree.lookup unicode tree with | `Abs c -> c | `Delta d -> unicode + d with Not_found -> unicode let lowercase_first_char s = assert (s <> ""); let j, n = next_utf8 s 0 in utf8_of_unicode (lowercase_unicode n) let split_at_first_letter s = let n, v = next_utf8 s 0 in if ((* optim *) n = 1 && s.[0] != '_') || not (is_ident_sep (classify v)) then None else begin let n = ref n in let p = ref 0 in while !n < String.length s && let n', v = next_utf8 s !n in p := n'; (* Test if not letter *) ((* optim *) n' = 1 && (s.[!n] = '_' || s.[!n] = '\'')) || let st = classify v in is_ident_sep st || is_ident_part st do n := !n + !p done; let s1 = String.sub s 0 !n in let s2 = String.sub s !n (String.length s - !n) in Some (s1,s2) end (** For extraction, we need to encode unicode character into ascii ones *) let is_basic_ascii s = let ok = ref true in String.iter (fun c -> if Char.code c >= 128 then ok := false) s; !ok let ascii_of_ident s = let len = String.length s in let has_UU i = i+2 < len && s.[i]='_' && s.[i+1]='U' && s.[i+2]='U' in let i = ref 0 in while !i < len && Char.code s.[!i] < 128 && not (has_UU !i) do incr i done; if !i = len then s else let out = Buffer.create (2*len) in Buffer.add_substring out s 0 !i; while !i < len do let j, n = next_utf8 s !i in if n >= 128 then (Printf.bprintf out "_UU%04x_" n; i := !i + j) else if has_UU !i then (Buffer.add_string out "_UUU"; i := !i + 3) else (Buffer.add_char out s.[!i]; incr i) done; Buffer.contents out (* Compute length of an UTF-8 encoded string Rem 1 : utf8_length <= String.length (equal if pure ascii) Rem 2 : if used for an iso8859_1 encoded string, the result is wrong in very rare cases. Such a wrong case corresponds to any sequence of a character in range 192..253 immediately followed by a character in range 128..191 (typical case in french is "déçu" which is counted 3 instead of 4); then no real harm to use always utf8_length even if using an iso8859_1 encoding *) (** FIXME: duplicate code with Pp *) let utf8_length s = let len = String.length s and cnt = ref 0 and nc = ref 0 and p = ref 0 in while !p < len do begin match s.[!p] with | '\000'..'\127' -> nc := 0 (* ascii char *) | '\128'..'\191' -> nc := 0 (* cannot start with a continuation byte *) | '\192'..'\223' -> nc := 1 (* expect 1 continuation byte *) | '\224'..'\239' -> nc := 2 (* expect 2 continuation bytes *) | '\240'..'\247' -> nc := 3 (* expect 3 continuation bytes *) | '\248'..'\255' -> nc := 0 (* invalid byte *) end ; incr p ; while !p < len && !nc > 0 do match s.[!p] with | '\128'..'\191' (* next continuation byte *) -> incr p ; decr nc | _ (* not a continuation byte *) -> nc := 0 done ; incr cnt done ; !cnt (* Variant of String.sub for UTF8 character positions *) let utf8_sub s start_u len_u = let len_b = String.length s and end_u = start_u + len_u and cnt = ref 0 and nc = ref 0 and p = ref 0 in let start_b = ref len_b in while !p < len_b && !cnt < end_u do if !cnt <= start_u then start_b := !p ; begin match s.[!p] with | '\000'..'\127' -> nc := 0 (* ascii char *) | '\128'..'\191' -> nc := 0 (* cannot start with a continuation byte *) | '\192'..'\223' -> nc := 1 (* expect 1 continuation byte *) | '\224'..'\239' -> nc := 2 (* expect 2 continuation bytes *) | '\240'..'\247' -> nc := 3 (* expect 3 continuation bytes *) | '\248'..'\255' -> nc := 0 (* invalid byte *) end ; incr p ; while !p < len_b && !nc > 0 do match s.[!p] with | '\128'..'\191' (* next continuation byte *) -> incr p ; decr nc | _ (* not a continuation byte *) -> nc := 0 done ; incr cnt done ; let end_b = !p in String.sub s !start_b (end_b - !start_b) coq-8.15.0/clib/unicode.mli000066400000000000000000000050361417001151100154240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* status (** Return [None] if a given string can be used as a (Coq) identifier. Return [Some (b,s)] otherwise, where [s] is an explanation and [b] is severity. *) val ident_refutation : string -> (bool * string) option (** Tells if a valid initial character for an identifier *) val is_valid_ident_initial : status -> bool (** Tells if a valid non-initial character for an identifier *) val is_valid_ident_trailing : status -> bool (** Tells if a character is unclassified *) val is_unknown : status -> bool (** First char of a string, converted to lowercase @raise Assert_failure if the input string is empty. *) val lowercase_first_char : string -> string (** Split a string supposed to be an ident at the first letter; as an optimization, return None if the first character is a letter *) val split_at_first_letter : string -> (string * string) option (** Return [true] if all UTF-8 characters in the input string are just plain ASCII characters. Returns [false] otherwise. *) val is_basic_ascii : string -> bool (** [ascii_of_ident s] maps UTF-8 string to a string composed solely from ASCII characters. The non-ASCII characters are translated to ["_UUxxxx_"] where {i xxxx} is the Unicode index of the character in hexadecimal (from four to six hex digits). To avoid potential name clashes, any preexisting substring ["_UU"] is turned into ["_UUU"]. *) val ascii_of_ident : string -> string (** Validate an UTF-8 string *) val is_utf8 : string -> bool (** Return the length of a valid UTF-8 string. *) val utf8_length : string -> int (** Variant of {!String.sub} for UTF-8 strings. *) val utf8_sub : string -> int -> int -> string (** Return a "%XX"-escaped string if it contains non UTF-8 characters. *) val escaped_if_non_utf8 : string -> string coq-8.15.0/clib/unicodetable.ml000066400000000000000000006254751417001151100163020ustar00rootroot00000000000000(** Unicode tables generated using UUCD. *) (* Letter, Uppercase *) let lu = [ (0x00041,0x0005A); (0x000C0,0x000D6); (0x000D8,0x000DE); (0x00100,0x00100); (0x00102,0x00102); (0x00104,0x00104); (0x00106,0x00106); (0x00108,0x00108); (0x0010A,0x0010A); (0x0010C,0x0010C); (0x0010E,0x0010E); (0x00110,0x00110); (0x00112,0x00112); (0x00114,0x00114); (0x00116,0x00116); (0x00118,0x00118); (0x0011A,0x0011A); (0x0011C,0x0011C); (0x0011E,0x0011E); (0x00120,0x00120); (0x00122,0x00122); (0x00124,0x00124); (0x00126,0x00126); (0x00128,0x00128); (0x0012A,0x0012A); (0x0012C,0x0012C); (0x0012E,0x0012E); (0x00130,0x00130); (0x00132,0x00132); (0x00134,0x00134); (0x00136,0x00136); (0x00139,0x00139); (0x0013B,0x0013B); (0x0013D,0x0013D); (0x0013F,0x0013F); (0x00141,0x00141); (0x00143,0x00143); (0x00145,0x00145); (0x00147,0x00147); (0x0014A,0x0014A); (0x0014C,0x0014C); (0x0014E,0x0014E); (0x00150,0x00150); (0x00152,0x00152); (0x00154,0x00154); (0x00156,0x00156); (0x00158,0x00158); (0x0015A,0x0015A); (0x0015C,0x0015C); (0x0015E,0x0015E); (0x00160,0x00160); (0x00162,0x00162); (0x00164,0x00164); (0x00166,0x00166); (0x00168,0x00168); (0x0016A,0x0016A); (0x0016C,0x0016C); (0x0016E,0x0016E); (0x00170,0x00170); (0x00172,0x00172); (0x00174,0x00174); (0x00176,0x00176); (0x00178,0x00179); (0x0017B,0x0017B); (0x0017D,0x0017D); (0x00181,0x00182); (0x00184,0x00184); (0x00186,0x00187); (0x00189,0x0018B); (0x0018E,0x00191); (0x00193,0x00194); (0x00196,0x00198); (0x0019C,0x0019D); (0x0019F,0x001A0); (0x001A2,0x001A2); (0x001A4,0x001A4); (0x001A6,0x001A7); (0x001A9,0x001A9); (0x001AC,0x001AC); (0x001AE,0x001AF); (0x001B1,0x001B3); (0x001B5,0x001B5); (0x001B7,0x001B8); (0x001BC,0x001BC); (0x001C4,0x001C4); (0x001C7,0x001C7); (0x001CA,0x001CA); (0x001CD,0x001CD); (0x001CF,0x001CF); (0x001D1,0x001D1); (0x001D3,0x001D3); (0x001D5,0x001D5); (0x001D7,0x001D7); (0x001D9,0x001D9); (0x001DB,0x001DB); (0x001DE,0x001DE); (0x001E0,0x001E0); (0x001E2,0x001E2); (0x001E4,0x001E4); (0x001E6,0x001E6); (0x001E8,0x001E8); (0x001EA,0x001EA); (0x001EC,0x001EC); (0x001EE,0x001EE); (0x001F1,0x001F1); (0x001F4,0x001F4); (0x001F6,0x001F8); (0x001FA,0x001FA); (0x001FC,0x001FC); (0x001FE,0x001FE); (0x00200,0x00200); (0x00202,0x00202); (0x00204,0x00204); (0x00206,0x00206); (0x00208,0x00208); (0x0020A,0x0020A); (0x0020C,0x0020C); (0x0020E,0x0020E); (0x00210,0x00210); (0x00212,0x00212); (0x00214,0x00214); (0x00216,0x00216); (0x00218,0x00218); (0x0021A,0x0021A); (0x0021C,0x0021C); (0x0021E,0x0021E); (0x00220,0x00220); (0x00222,0x00222); (0x00224,0x00224); (0x00226,0x00226); (0x00228,0x00228); (0x0022A,0x0022A); (0x0022C,0x0022C); (0x0022E,0x0022E); (0x00230,0x00230); (0x00232,0x00232); (0x0023A,0x0023B); (0x0023D,0x0023E); (0x00241,0x00241); (0x00243,0x00246); (0x00248,0x00248); (0x0024A,0x0024A); (0x0024C,0x0024C); (0x0024E,0x0024E); (0x00370,0x00370); (0x00372,0x00372); (0x00376,0x00376); (0x0037F,0x0037F); (0x00386,0x00386); (0x00388,0x0038A); (0x0038C,0x0038C); (0x0038E,0x0038F); (0x00391,0x003A1); (0x003A3,0x003AB); (0x003CF,0x003CF); (0x003D2,0x003D4); (0x003D8,0x003D8); (0x003DA,0x003DA); (0x003DC,0x003DC); (0x003DE,0x003DE); (0x003E0,0x003E0); (0x003E2,0x003E2); (0x003E4,0x003E4); (0x003E6,0x003E6); (0x003E8,0x003E8); (0x003EA,0x003EA); (0x003EC,0x003EC); (0x003EE,0x003EE); (0x003F4,0x003F4); (0x003F7,0x003F7); (0x003F9,0x003FA); (0x003FD,0x0042F); (0x00460,0x00460); (0x00462,0x00462); (0x00464,0x00464); (0x00466,0x00466); (0x00468,0x00468); (0x0046A,0x0046A); (0x0046C,0x0046C); (0x0046E,0x0046E); (0x00470,0x00470); (0x00472,0x00472); (0x00474,0x00474); (0x00476,0x00476); (0x00478,0x00478); (0x0047A,0x0047A); (0x0047C,0x0047C); (0x0047E,0x0047E); (0x00480,0x00480); (0x0048A,0x0048A); (0x0048C,0x0048C); (0x0048E,0x0048E); (0x00490,0x00490); (0x00492,0x00492); (0x00494,0x00494); (0x00496,0x00496); (0x00498,0x00498); (0x0049A,0x0049A); (0x0049C,0x0049C); (0x0049E,0x0049E); (0x004A0,0x004A0); (0x004A2,0x004A2); (0x004A4,0x004A4); (0x004A6,0x004A6); (0x004A8,0x004A8); (0x004AA,0x004AA); (0x004AC,0x004AC); (0x004AE,0x004AE); (0x004B0,0x004B0); (0x004B2,0x004B2); (0x004B4,0x004B4); (0x004B6,0x004B6); (0x004B8,0x004B8); (0x004BA,0x004BA); (0x004BC,0x004BC); (0x004BE,0x004BE); (0x004C0,0x004C1); (0x004C3,0x004C3); (0x004C5,0x004C5); (0x004C7,0x004C7); (0x004C9,0x004C9); (0x004CB,0x004CB); (0x004CD,0x004CD); (0x004D0,0x004D0); (0x004D2,0x004D2); (0x004D4,0x004D4); (0x004D6,0x004D6); (0x004D8,0x004D8); (0x004DA,0x004DA); (0x004DC,0x004DC); (0x004DE,0x004DE); (0x004E0,0x004E0); (0x004E2,0x004E2); (0x004E4,0x004E4); (0x004E6,0x004E6); (0x004E8,0x004E8); (0x004EA,0x004EA); (0x004EC,0x004EC); (0x004EE,0x004EE); (0x004F0,0x004F0); (0x004F2,0x004F2); (0x004F4,0x004F4); (0x004F6,0x004F6); (0x004F8,0x004F8); (0x004FA,0x004FA); (0x004FC,0x004FC); (0x004FE,0x004FE); (0x00500,0x00500); (0x00502,0x00502); (0x00504,0x00504); (0x00506,0x00506); (0x00508,0x00508); (0x0050A,0x0050A); (0x0050C,0x0050C); (0x0050E,0x0050E); (0x00510,0x00510); (0x00512,0x00512); (0x00514,0x00514); (0x00516,0x00516); (0x00518,0x00518); (0x0051A,0x0051A); (0x0051C,0x0051C); (0x0051E,0x0051E); (0x00520,0x00520); (0x00522,0x00522); (0x00524,0x00524); (0x00526,0x00526); (0x00528,0x00528); (0x0052A,0x0052A); (0x0052C,0x0052C); (0x0052E,0x0052E); (0x00531,0x00556); (0x010A0,0x010C5); (0x010C7,0x010C7); (0x010CD,0x010CD); (0x013A0,0x013F5); (0x01E00,0x01E00); (0x01E02,0x01E02); (0x01E04,0x01E04); (0x01E06,0x01E06); (0x01E08,0x01E08); (0x01E0A,0x01E0A); (0x01E0C,0x01E0C); (0x01E0E,0x01E0E); (0x01E10,0x01E10); (0x01E12,0x01E12); (0x01E14,0x01E14); (0x01E16,0x01E16); (0x01E18,0x01E18); (0x01E1A,0x01E1A); (0x01E1C,0x01E1C); (0x01E1E,0x01E1E); (0x01E20,0x01E20); (0x01E22,0x01E22); (0x01E24,0x01E24); (0x01E26,0x01E26); (0x01E28,0x01E28); (0x01E2A,0x01E2A); (0x01E2C,0x01E2C); (0x01E2E,0x01E2E); (0x01E30,0x01E30); (0x01E32,0x01E32); (0x01E34,0x01E34); (0x01E36,0x01E36); (0x01E38,0x01E38); (0x01E3A,0x01E3A); (0x01E3C,0x01E3C); (0x01E3E,0x01E3E); (0x01E40,0x01E40); (0x01E42,0x01E42); (0x01E44,0x01E44); (0x01E46,0x01E46); (0x01E48,0x01E48); (0x01E4A,0x01E4A); (0x01E4C,0x01E4C); (0x01E4E,0x01E4E); (0x01E50,0x01E50); (0x01E52,0x01E52); (0x01E54,0x01E54); (0x01E56,0x01E56); (0x01E58,0x01E58); (0x01E5A,0x01E5A); (0x01E5C,0x01E5C); (0x01E5E,0x01E5E); (0x01E60,0x01E60); (0x01E62,0x01E62); (0x01E64,0x01E64); (0x01E66,0x01E66); (0x01E68,0x01E68); (0x01E6A,0x01E6A); (0x01E6C,0x01E6C); (0x01E6E,0x01E6E); (0x01E70,0x01E70); (0x01E72,0x01E72); (0x01E74,0x01E74); (0x01E76,0x01E76); (0x01E78,0x01E78); (0x01E7A,0x01E7A); (0x01E7C,0x01E7C); (0x01E7E,0x01E7E); (0x01E80,0x01E80); (0x01E82,0x01E82); (0x01E84,0x01E84); (0x01E86,0x01E86); (0x01E88,0x01E88); (0x01E8A,0x01E8A); (0x01E8C,0x01E8C); (0x01E8E,0x01E8E); (0x01E90,0x01E90); (0x01E92,0x01E92); (0x01E94,0x01E94); (0x01E9E,0x01E9E); (0x01EA0,0x01EA0); (0x01EA2,0x01EA2); (0x01EA4,0x01EA4); (0x01EA6,0x01EA6); (0x01EA8,0x01EA8); (0x01EAA,0x01EAA); (0x01EAC,0x01EAC); (0x01EAE,0x01EAE); (0x01EB0,0x01EB0); (0x01EB2,0x01EB2); (0x01EB4,0x01EB4); (0x01EB6,0x01EB6); (0x01EB8,0x01EB8); (0x01EBA,0x01EBA); (0x01EBC,0x01EBC); (0x01EBE,0x01EBE); (0x01EC0,0x01EC0); (0x01EC2,0x01EC2); (0x01EC4,0x01EC4); (0x01EC6,0x01EC6); (0x01EC8,0x01EC8); (0x01ECA,0x01ECA); (0x01ECC,0x01ECC); (0x01ECE,0x01ECE); (0x01ED0,0x01ED0); (0x01ED2,0x01ED2); (0x01ED4,0x01ED4); (0x01ED6,0x01ED6); (0x01ED8,0x01ED8); (0x01EDA,0x01EDA); (0x01EDC,0x01EDC); (0x01EDE,0x01EDE); (0x01EE0,0x01EE0); (0x01EE2,0x01EE2); (0x01EE4,0x01EE4); (0x01EE6,0x01EE6); (0x01EE8,0x01EE8); (0x01EEA,0x01EEA); (0x01EEC,0x01EEC); (0x01EEE,0x01EEE); (0x01EF0,0x01EF0); (0x01EF2,0x01EF2); (0x01EF4,0x01EF4); (0x01EF6,0x01EF6); (0x01EF8,0x01EF8); (0x01EFA,0x01EFA); (0x01EFC,0x01EFC); (0x01EFE,0x01EFE); (0x01F08,0x01F0F); (0x01F18,0x01F1D); (0x01F28,0x01F2F); (0x01F38,0x01F3F); (0x01F48,0x01F4D); (0x01F59,0x01F59); (0x01F5B,0x01F5B); (0x01F5D,0x01F5D); (0x01F5F,0x01F5F); (0x01F68,0x01F6F); (0x01FB8,0x01FBB); (0x01FC8,0x01FCB); (0x01FD8,0x01FDB); (0x01FE8,0x01FEC); (0x01FF8,0x01FFB); (0x02102,0x02102); (0x02107,0x02107); (0x0210B,0x0210D); (0x02110,0x02112); (0x02115,0x02115); (0x02119,0x0211D); (0x02124,0x02124); (0x02126,0x02126); (0x02128,0x02128); (0x0212A,0x0212D); (0x02130,0x02133); (0x0213E,0x0213F); (0x02145,0x02145); (0x02183,0x02183); (0x02C00,0x02C2E); (0x02C60,0x02C60); (0x02C62,0x02C64); (0x02C67,0x02C67); (0x02C69,0x02C69); (0x02C6B,0x02C6B); (0x02C6D,0x02C70); (0x02C72,0x02C72); (0x02C75,0x02C75); (0x02C7E,0x02C80); (0x02C82,0x02C82); (0x02C84,0x02C84); (0x02C86,0x02C86); (0x02C88,0x02C88); (0x02C8A,0x02C8A); (0x02C8C,0x02C8C); (0x02C8E,0x02C8E); (0x02C90,0x02C90); (0x02C92,0x02C92); (0x02C94,0x02C94); (0x02C96,0x02C96); (0x02C98,0x02C98); (0x02C9A,0x02C9A); (0x02C9C,0x02C9C); (0x02C9E,0x02C9E); (0x02CA0,0x02CA0); (0x02CA2,0x02CA2); (0x02CA4,0x02CA4); (0x02CA6,0x02CA6); (0x02CA8,0x02CA8); (0x02CAA,0x02CAA); (0x02CAC,0x02CAC); (0x02CAE,0x02CAE); (0x02CB0,0x02CB0); (0x02CB2,0x02CB2); (0x02CB4,0x02CB4); (0x02CB6,0x02CB6); (0x02CB8,0x02CB8); (0x02CBA,0x02CBA); (0x02CBC,0x02CBC); (0x02CBE,0x02CBE); (0x02CC0,0x02CC0); (0x02CC2,0x02CC2); (0x02CC4,0x02CC4); (0x02CC6,0x02CC6); (0x02CC8,0x02CC8); (0x02CCA,0x02CCA); (0x02CCC,0x02CCC); (0x02CCE,0x02CCE); (0x02CD0,0x02CD0); (0x02CD2,0x02CD2); (0x02CD4,0x02CD4); (0x02CD6,0x02CD6); (0x02CD8,0x02CD8); (0x02CDA,0x02CDA); (0x02CDC,0x02CDC); (0x02CDE,0x02CDE); (0x02CE0,0x02CE0); (0x02CE2,0x02CE2); (0x02CEB,0x02CEB); (0x02CED,0x02CED); (0x02CF2,0x02CF2); (0x0A640,0x0A640); (0x0A642,0x0A642); (0x0A644,0x0A644); (0x0A646,0x0A646); (0x0A648,0x0A648); (0x0A64A,0x0A64A); (0x0A64C,0x0A64C); (0x0A64E,0x0A64E); (0x0A650,0x0A650); (0x0A652,0x0A652); (0x0A654,0x0A654); (0x0A656,0x0A656); (0x0A658,0x0A658); (0x0A65A,0x0A65A); (0x0A65C,0x0A65C); (0x0A65E,0x0A65E); (0x0A660,0x0A660); (0x0A662,0x0A662); (0x0A664,0x0A664); (0x0A666,0x0A666); (0x0A668,0x0A668); (0x0A66A,0x0A66A); (0x0A66C,0x0A66C); (0x0A680,0x0A680); (0x0A682,0x0A682); (0x0A684,0x0A684); (0x0A686,0x0A686); (0x0A688,0x0A688); (0x0A68A,0x0A68A); (0x0A68C,0x0A68C); (0x0A68E,0x0A68E); (0x0A690,0x0A690); (0x0A692,0x0A692); (0x0A694,0x0A694); (0x0A696,0x0A696); (0x0A698,0x0A698); (0x0A69A,0x0A69A); (0x0A722,0x0A722); (0x0A724,0x0A724); (0x0A726,0x0A726); (0x0A728,0x0A728); (0x0A72A,0x0A72A); (0x0A72C,0x0A72C); (0x0A72E,0x0A72E); (0x0A732,0x0A732); (0x0A734,0x0A734); (0x0A736,0x0A736); (0x0A738,0x0A738); (0x0A73A,0x0A73A); (0x0A73C,0x0A73C); (0x0A73E,0x0A73E); (0x0A740,0x0A740); (0x0A742,0x0A742); (0x0A744,0x0A744); (0x0A746,0x0A746); (0x0A748,0x0A748); (0x0A74A,0x0A74A); (0x0A74C,0x0A74C); (0x0A74E,0x0A74E); (0x0A750,0x0A750); (0x0A752,0x0A752); (0x0A754,0x0A754); (0x0A756,0x0A756); (0x0A758,0x0A758); (0x0A75A,0x0A75A); (0x0A75C,0x0A75C); (0x0A75E,0x0A75E); (0x0A760,0x0A760); (0x0A762,0x0A762); (0x0A764,0x0A764); (0x0A766,0x0A766); (0x0A768,0x0A768); (0x0A76A,0x0A76A); (0x0A76C,0x0A76C); (0x0A76E,0x0A76E); (0x0A779,0x0A779); (0x0A77B,0x0A77B); (0x0A77D,0x0A77E); (0x0A780,0x0A780); (0x0A782,0x0A782); (0x0A784,0x0A784); (0x0A786,0x0A786); (0x0A78B,0x0A78B); (0x0A78D,0x0A78D); (0x0A790,0x0A790); (0x0A792,0x0A792); (0x0A796,0x0A796); (0x0A798,0x0A798); (0x0A79A,0x0A79A); (0x0A79C,0x0A79C); (0x0A79E,0x0A79E); (0x0A7A0,0x0A7A0); (0x0A7A2,0x0A7A2); (0x0A7A4,0x0A7A4); (0x0A7A6,0x0A7A6); (0x0A7A8,0x0A7A8); (0x0A7AA,0x0A7AE); (0x0A7B0,0x0A7B4); (0x0A7B6,0x0A7B6); (0x0FF21,0x0FF3A); (0x10400,0x10427); (0x104B0,0x104D3); (0x10C80,0x10CB2); (0x118A0,0x118BF); (0x1D400,0x1D419); (0x1D434,0x1D44D); (0x1D468,0x1D481); (0x1D49C,0x1D49C); (0x1D49E,0x1D49F); (0x1D4A2,0x1D4A2); (0x1D4A5,0x1D4A6); (0x1D4A9,0x1D4AC); (0x1D4AE,0x1D4B5); (0x1D4D0,0x1D4E9); (0x1D504,0x1D505); (0x1D507,0x1D50A); (0x1D50D,0x1D514); (0x1D516,0x1D51C); (0x1D538,0x1D539); (0x1D53B,0x1D53E); (0x1D540,0x1D544); (0x1D546,0x1D546); (0x1D54A,0x1D550); (0x1D56C,0x1D585); (0x1D5A0,0x1D5B9); (0x1D5D4,0x1D5ED); (0x1D608,0x1D621); (0x1D63C,0x1D655); (0x1D670,0x1D689); (0x1D6A8,0x1D6C0); (0x1D6E2,0x1D6FA); (0x1D71C,0x1D734); (0x1D756,0x1D76E); (0x1D790,0x1D7A8); (0x1D7CA,0x1D7CA) ] (* Letter, Lowercase *) let ll = [ (0x00061,0x0007A); (0x000B5,0x000B5); (0x000DF,0x000F6); (0x000F8,0x000FF); (0x00101,0x00101); (0x00103,0x00103); (0x00105,0x00105); (0x00107,0x00107); (0x00109,0x00109); (0x0010B,0x0010B); (0x0010D,0x0010D); (0x0010F,0x0010F); (0x00111,0x00111); (0x00113,0x00113); (0x00115,0x00115); (0x00117,0x00117); (0x00119,0x00119); (0x0011B,0x0011B); (0x0011D,0x0011D); (0x0011F,0x0011F); (0x00121,0x00121); (0x00123,0x00123); (0x00125,0x00125); (0x00127,0x00127); (0x00129,0x00129); (0x0012B,0x0012B); (0x0012D,0x0012D); (0x0012F,0x0012F); (0x00131,0x00131); (0x00133,0x00133); (0x00135,0x00135); (0x00137,0x00138); (0x0013A,0x0013A); (0x0013C,0x0013C); (0x0013E,0x0013E); (0x00140,0x00140); (0x00142,0x00142); (0x00144,0x00144); (0x00146,0x00146); (0x00148,0x00149); (0x0014B,0x0014B); (0x0014D,0x0014D); (0x0014F,0x0014F); (0x00151,0x00151); (0x00153,0x00153); (0x00155,0x00155); (0x00157,0x00157); (0x00159,0x00159); (0x0015B,0x0015B); (0x0015D,0x0015D); (0x0015F,0x0015F); (0x00161,0x00161); (0x00163,0x00163); (0x00165,0x00165); (0x00167,0x00167); (0x00169,0x00169); (0x0016B,0x0016B); (0x0016D,0x0016D); (0x0016F,0x0016F); (0x00171,0x00171); (0x00173,0x00173); (0x00175,0x00175); (0x00177,0x00177); (0x0017A,0x0017A); (0x0017C,0x0017C); (0x0017E,0x00180); (0x00183,0x00183); (0x00185,0x00185); (0x00188,0x00188); (0x0018C,0x0018D); (0x00192,0x00192); (0x00195,0x00195); (0x00199,0x0019B); (0x0019E,0x0019E); (0x001A1,0x001A1); (0x001A3,0x001A3); (0x001A5,0x001A5); (0x001A8,0x001A8); (0x001AA,0x001AB); (0x001AD,0x001AD); (0x001B0,0x001B0); (0x001B4,0x001B4); (0x001B6,0x001B6); (0x001B9,0x001BA); (0x001BD,0x001BF); (0x001C6,0x001C6); (0x001C9,0x001C9); (0x001CC,0x001CC); (0x001CE,0x001CE); (0x001D0,0x001D0); (0x001D2,0x001D2); (0x001D4,0x001D4); (0x001D6,0x001D6); (0x001D8,0x001D8); (0x001DA,0x001DA); (0x001DC,0x001DD); (0x001DF,0x001DF); (0x001E1,0x001E1); (0x001E3,0x001E3); (0x001E5,0x001E5); (0x001E7,0x001E7); (0x001E9,0x001E9); (0x001EB,0x001EB); (0x001ED,0x001ED); (0x001EF,0x001F0); (0x001F3,0x001F3); (0x001F5,0x001F5); (0x001F9,0x001F9); (0x001FB,0x001FB); (0x001FD,0x001FD); (0x001FF,0x001FF); (0x00201,0x00201); (0x00203,0x00203); (0x00205,0x00205); (0x00207,0x00207); (0x00209,0x00209); (0x0020B,0x0020B); (0x0020D,0x0020D); (0x0020F,0x0020F); (0x00211,0x00211); (0x00213,0x00213); (0x00215,0x00215); (0x00217,0x00217); (0x00219,0x00219); (0x0021B,0x0021B); (0x0021D,0x0021D); (0x0021F,0x0021F); (0x00221,0x00221); (0x00223,0x00223); (0x00225,0x00225); (0x00227,0x00227); (0x00229,0x00229); (0x0022B,0x0022B); (0x0022D,0x0022D); (0x0022F,0x0022F); (0x00231,0x00231); (0x00233,0x00239); (0x0023C,0x0023C); (0x0023F,0x00240); (0x00242,0x00242); (0x00247,0x00247); (0x00249,0x00249); (0x0024B,0x0024B); (0x0024D,0x0024D); (0x0024F,0x00293); (0x00295,0x002AF); (0x00371,0x00371); (0x00373,0x00373); (0x00377,0x00377); (0x0037B,0x0037D); (0x00390,0x00390); (0x003AC,0x003CE); (0x003D0,0x003D1); (0x003D5,0x003D7); (0x003D9,0x003D9); (0x003DB,0x003DB); (0x003DD,0x003DD); (0x003DF,0x003DF); (0x003E1,0x003E1); (0x003E3,0x003E3); (0x003E5,0x003E5); (0x003E7,0x003E7); (0x003E9,0x003E9); (0x003EB,0x003EB); (0x003ED,0x003ED); (0x003EF,0x003F3); (0x003F5,0x003F5); (0x003F8,0x003F8); (0x003FB,0x003FC); (0x00430,0x0045F); (0x00461,0x00461); (0x00463,0x00463); (0x00465,0x00465); (0x00467,0x00467); (0x00469,0x00469); (0x0046B,0x0046B); (0x0046D,0x0046D); (0x0046F,0x0046F); (0x00471,0x00471); (0x00473,0x00473); (0x00475,0x00475); (0x00477,0x00477); (0x00479,0x00479); (0x0047B,0x0047B); (0x0047D,0x0047D); (0x0047F,0x0047F); (0x00481,0x00481); (0x0048B,0x0048B); (0x0048D,0x0048D); (0x0048F,0x0048F); (0x00491,0x00491); (0x00493,0x00493); (0x00495,0x00495); (0x00497,0x00497); (0x00499,0x00499); (0x0049B,0x0049B); (0x0049D,0x0049D); (0x0049F,0x0049F); (0x004A1,0x004A1); (0x004A3,0x004A3); (0x004A5,0x004A5); (0x004A7,0x004A7); (0x004A9,0x004A9); (0x004AB,0x004AB); (0x004AD,0x004AD); (0x004AF,0x004AF); (0x004B1,0x004B1); (0x004B3,0x004B3); (0x004B5,0x004B5); (0x004B7,0x004B7); (0x004B9,0x004B9); (0x004BB,0x004BB); (0x004BD,0x004BD); (0x004BF,0x004BF); (0x004C2,0x004C2); (0x004C4,0x004C4); (0x004C6,0x004C6); (0x004C8,0x004C8); (0x004CA,0x004CA); (0x004CC,0x004CC); (0x004CE,0x004CF); (0x004D1,0x004D1); (0x004D3,0x004D3); (0x004D5,0x004D5); (0x004D7,0x004D7); (0x004D9,0x004D9); (0x004DB,0x004DB); (0x004DD,0x004DD); (0x004DF,0x004DF); (0x004E1,0x004E1); (0x004E3,0x004E3); (0x004E5,0x004E5); (0x004E7,0x004E7); (0x004E9,0x004E9); (0x004EB,0x004EB); (0x004ED,0x004ED); (0x004EF,0x004EF); (0x004F1,0x004F1); (0x004F3,0x004F3); (0x004F5,0x004F5); (0x004F7,0x004F7); (0x004F9,0x004F9); (0x004FB,0x004FB); (0x004FD,0x004FD); (0x004FF,0x004FF); (0x00501,0x00501); (0x00503,0x00503); (0x00505,0x00505); (0x00507,0x00507); (0x00509,0x00509); (0x0050B,0x0050B); (0x0050D,0x0050D); (0x0050F,0x0050F); (0x00511,0x00511); (0x00513,0x00513); (0x00515,0x00515); (0x00517,0x00517); (0x00519,0x00519); (0x0051B,0x0051B); (0x0051D,0x0051D); (0x0051F,0x0051F); (0x00521,0x00521); (0x00523,0x00523); (0x00525,0x00525); (0x00527,0x00527); (0x00529,0x00529); (0x0052B,0x0052B); (0x0052D,0x0052D); (0x0052F,0x0052F); (0x00561,0x00587); (0x013F8,0x013FD); (0x01C80,0x01C88); (0x01D00,0x01D2B); (0x01D6B,0x01D77); (0x01D79,0x01D9A); (0x01E01,0x01E01); (0x01E03,0x01E03); (0x01E05,0x01E05); (0x01E07,0x01E07); (0x01E09,0x01E09); (0x01E0B,0x01E0B); (0x01E0D,0x01E0D); (0x01E0F,0x01E0F); (0x01E11,0x01E11); (0x01E13,0x01E13); (0x01E15,0x01E15); (0x01E17,0x01E17); (0x01E19,0x01E19); (0x01E1B,0x01E1B); (0x01E1D,0x01E1D); (0x01E1F,0x01E1F); (0x01E21,0x01E21); (0x01E23,0x01E23); (0x01E25,0x01E25); (0x01E27,0x01E27); (0x01E29,0x01E29); (0x01E2B,0x01E2B); (0x01E2D,0x01E2D); (0x01E2F,0x01E2F); (0x01E31,0x01E31); (0x01E33,0x01E33); (0x01E35,0x01E35); (0x01E37,0x01E37); (0x01E39,0x01E39); (0x01E3B,0x01E3B); (0x01E3D,0x01E3D); (0x01E3F,0x01E3F); (0x01E41,0x01E41); (0x01E43,0x01E43); (0x01E45,0x01E45); (0x01E47,0x01E47); (0x01E49,0x01E49); (0x01E4B,0x01E4B); (0x01E4D,0x01E4D); (0x01E4F,0x01E4F); (0x01E51,0x01E51); (0x01E53,0x01E53); (0x01E55,0x01E55); (0x01E57,0x01E57); (0x01E59,0x01E59); (0x01E5B,0x01E5B); (0x01E5D,0x01E5D); (0x01E5F,0x01E5F); (0x01E61,0x01E61); (0x01E63,0x01E63); (0x01E65,0x01E65); (0x01E67,0x01E67); (0x01E69,0x01E69); (0x01E6B,0x01E6B); (0x01E6D,0x01E6D); (0x01E6F,0x01E6F); (0x01E71,0x01E71); (0x01E73,0x01E73); (0x01E75,0x01E75); (0x01E77,0x01E77); (0x01E79,0x01E79); (0x01E7B,0x01E7B); (0x01E7D,0x01E7D); (0x01E7F,0x01E7F); (0x01E81,0x01E81); (0x01E83,0x01E83); (0x01E85,0x01E85); (0x01E87,0x01E87); (0x01E89,0x01E89); (0x01E8B,0x01E8B); (0x01E8D,0x01E8D); (0x01E8F,0x01E8F); (0x01E91,0x01E91); (0x01E93,0x01E93); (0x01E95,0x01E9D); (0x01E9F,0x01E9F); (0x01EA1,0x01EA1); (0x01EA3,0x01EA3); (0x01EA5,0x01EA5); (0x01EA7,0x01EA7); (0x01EA9,0x01EA9); (0x01EAB,0x01EAB); (0x01EAD,0x01EAD); (0x01EAF,0x01EAF); (0x01EB1,0x01EB1); (0x01EB3,0x01EB3); (0x01EB5,0x01EB5); (0x01EB7,0x01EB7); (0x01EB9,0x01EB9); (0x01EBB,0x01EBB); (0x01EBD,0x01EBD); (0x01EBF,0x01EBF); (0x01EC1,0x01EC1); (0x01EC3,0x01EC3); (0x01EC5,0x01EC5); (0x01EC7,0x01EC7); (0x01EC9,0x01EC9); (0x01ECB,0x01ECB); (0x01ECD,0x01ECD); (0x01ECF,0x01ECF); (0x01ED1,0x01ED1); (0x01ED3,0x01ED3); (0x01ED5,0x01ED5); (0x01ED7,0x01ED7); (0x01ED9,0x01ED9); (0x01EDB,0x01EDB); (0x01EDD,0x01EDD); (0x01EDF,0x01EDF); (0x01EE1,0x01EE1); (0x01EE3,0x01EE3); (0x01EE5,0x01EE5); (0x01EE7,0x01EE7); (0x01EE9,0x01EE9); (0x01EEB,0x01EEB); (0x01EED,0x01EED); (0x01EEF,0x01EEF); (0x01EF1,0x01EF1); (0x01EF3,0x01EF3); (0x01EF5,0x01EF5); (0x01EF7,0x01EF7); (0x01EF9,0x01EF9); (0x01EFB,0x01EFB); (0x01EFD,0x01EFD); (0x01EFF,0x01F07); (0x01F10,0x01F15); (0x01F20,0x01F27); (0x01F30,0x01F37); (0x01F40,0x01F45); (0x01F50,0x01F57); (0x01F60,0x01F67); (0x01F70,0x01F7D); (0x01F80,0x01F87); (0x01F90,0x01F97); (0x01FA0,0x01FA7); (0x01FB0,0x01FB4); (0x01FB6,0x01FB7); (0x01FBE,0x01FBE); (0x01FC2,0x01FC4); (0x01FC6,0x01FC7); (0x01FD0,0x01FD3); (0x01FD6,0x01FD7); (0x01FE0,0x01FE7); (0x01FF2,0x01FF4); (0x01FF6,0x01FF7); (0x0210A,0x0210A); (0x0210E,0x0210F); (0x02113,0x02113); (0x0212F,0x0212F); (0x02134,0x02134); (0x02139,0x02139); (0x0213C,0x0213D); (0x02146,0x02149); (0x0214E,0x0214E); (0x02184,0x02184); (0x02C30,0x02C5E); (0x02C61,0x02C61); (0x02C65,0x02C66); (0x02C68,0x02C68); (0x02C6A,0x02C6A); (0x02C6C,0x02C6C); (0x02C71,0x02C71); (0x02C73,0x02C74); (0x02C76,0x02C7B); (0x02C81,0x02C81); (0x02C83,0x02C83); (0x02C85,0x02C85); (0x02C87,0x02C87); (0x02C89,0x02C89); (0x02C8B,0x02C8B); (0x02C8D,0x02C8D); (0x02C8F,0x02C8F); (0x02C91,0x02C91); (0x02C93,0x02C93); (0x02C95,0x02C95); (0x02C97,0x02C97); (0x02C99,0x02C99); (0x02C9B,0x02C9B); (0x02C9D,0x02C9D); (0x02C9F,0x02C9F); (0x02CA1,0x02CA1); (0x02CA3,0x02CA3); (0x02CA5,0x02CA5); (0x02CA7,0x02CA7); (0x02CA9,0x02CA9); (0x02CAB,0x02CAB); (0x02CAD,0x02CAD); (0x02CAF,0x02CAF); (0x02CB1,0x02CB1); (0x02CB3,0x02CB3); (0x02CB5,0x02CB5); (0x02CB7,0x02CB7); (0x02CB9,0x02CB9); (0x02CBB,0x02CBB); (0x02CBD,0x02CBD); (0x02CBF,0x02CBF); (0x02CC1,0x02CC1); (0x02CC3,0x02CC3); (0x02CC5,0x02CC5); (0x02CC7,0x02CC7); (0x02CC9,0x02CC9); (0x02CCB,0x02CCB); (0x02CCD,0x02CCD); (0x02CCF,0x02CCF); (0x02CD1,0x02CD1); (0x02CD3,0x02CD3); (0x02CD5,0x02CD5); (0x02CD7,0x02CD7); (0x02CD9,0x02CD9); (0x02CDB,0x02CDB); (0x02CDD,0x02CDD); (0x02CDF,0x02CDF); (0x02CE1,0x02CE1); (0x02CE3,0x02CE4); (0x02CEC,0x02CEC); (0x02CEE,0x02CEE); (0x02CF3,0x02CF3); (0x02D00,0x02D25); (0x02D27,0x02D27); (0x02D2D,0x02D2D); (0x0A641,0x0A641); (0x0A643,0x0A643); (0x0A645,0x0A645); (0x0A647,0x0A647); (0x0A649,0x0A649); (0x0A64B,0x0A64B); (0x0A64D,0x0A64D); (0x0A64F,0x0A64F); (0x0A651,0x0A651); (0x0A653,0x0A653); (0x0A655,0x0A655); (0x0A657,0x0A657); (0x0A659,0x0A659); (0x0A65B,0x0A65B); (0x0A65D,0x0A65D); (0x0A65F,0x0A65F); (0x0A661,0x0A661); (0x0A663,0x0A663); (0x0A665,0x0A665); (0x0A667,0x0A667); (0x0A669,0x0A669); (0x0A66B,0x0A66B); (0x0A66D,0x0A66D); (0x0A681,0x0A681); (0x0A683,0x0A683); (0x0A685,0x0A685); (0x0A687,0x0A687); (0x0A689,0x0A689); (0x0A68B,0x0A68B); (0x0A68D,0x0A68D); (0x0A68F,0x0A68F); (0x0A691,0x0A691); (0x0A693,0x0A693); (0x0A695,0x0A695); (0x0A697,0x0A697); (0x0A699,0x0A699); (0x0A69B,0x0A69B); (0x0A723,0x0A723); (0x0A725,0x0A725); (0x0A727,0x0A727); (0x0A729,0x0A729); (0x0A72B,0x0A72B); (0x0A72D,0x0A72D); (0x0A72F,0x0A731); (0x0A733,0x0A733); (0x0A735,0x0A735); (0x0A737,0x0A737); (0x0A739,0x0A739); (0x0A73B,0x0A73B); (0x0A73D,0x0A73D); (0x0A73F,0x0A73F); (0x0A741,0x0A741); (0x0A743,0x0A743); (0x0A745,0x0A745); (0x0A747,0x0A747); (0x0A749,0x0A749); (0x0A74B,0x0A74B); (0x0A74D,0x0A74D); (0x0A74F,0x0A74F); (0x0A751,0x0A751); (0x0A753,0x0A753); (0x0A755,0x0A755); (0x0A757,0x0A757); (0x0A759,0x0A759); (0x0A75B,0x0A75B); (0x0A75D,0x0A75D); (0x0A75F,0x0A75F); (0x0A761,0x0A761); (0x0A763,0x0A763); (0x0A765,0x0A765); (0x0A767,0x0A767); (0x0A769,0x0A769); (0x0A76B,0x0A76B); (0x0A76D,0x0A76D); (0x0A76F,0x0A76F); (0x0A771,0x0A778); (0x0A77A,0x0A77A); (0x0A77C,0x0A77C); (0x0A77F,0x0A77F); (0x0A781,0x0A781); (0x0A783,0x0A783); (0x0A785,0x0A785); (0x0A787,0x0A787); (0x0A78C,0x0A78C); (0x0A78E,0x0A78E); (0x0A791,0x0A791); (0x0A793,0x0A795); (0x0A797,0x0A797); (0x0A799,0x0A799); (0x0A79B,0x0A79B); (0x0A79D,0x0A79D); (0x0A79F,0x0A79F); (0x0A7A1,0x0A7A1); (0x0A7A3,0x0A7A3); (0x0A7A5,0x0A7A5); (0x0A7A7,0x0A7A7); (0x0A7A9,0x0A7A9); (0x0A7B5,0x0A7B5); (0x0A7B7,0x0A7B7); (0x0A7FA,0x0A7FA); (0x0AB30,0x0AB5A); (0x0AB60,0x0AB65); (0x0AB70,0x0ABBF); (0x0FB00,0x0FB06); (0x0FB13,0x0FB17); (0x0FF41,0x0FF5A); (0x10428,0x1044F); (0x104D8,0x104FB); (0x10CC0,0x10CF2); (0x118C0,0x118DF); (0x1D41A,0x1D433); (0x1D44E,0x1D454); (0x1D456,0x1D467); (0x1D482,0x1D49B); (0x1D4B6,0x1D4B9); (0x1D4BB,0x1D4BB); (0x1D4BD,0x1D4C3); (0x1D4C5,0x1D4CF); (0x1D4EA,0x1D503); (0x1D51E,0x1D537); (0x1D552,0x1D56B); (0x1D586,0x1D59F); (0x1D5BA,0x1D5D3); (0x1D5EE,0x1D607); (0x1D622,0x1D63B); (0x1D656,0x1D66F); (0x1D68A,0x1D6A5); (0x1D6C2,0x1D6DA); (0x1D6DC,0x1D6E1); (0x1D6FC,0x1D714); (0x1D716,0x1D71B); (0x1D736,0x1D74E); (0x1D750,0x1D755); (0x1D770,0x1D788); (0x1D78A,0x1D78F); (0x1D7AA,0x1D7C2); (0x1D7C4,0x1D7C9); (0x1D7CB,0x1D7CB) ] (* Letter, Titlecase *) let lt = [ (0x001C5,0x001C5); (0x001C8,0x001C8); (0x001CB,0x001CB); (0x001F2,0x001F2); (0x01F88,0x01F8F); (0x01F98,0x01F9F); (0x01FA8,0x01FAF); (0x01FBC,0x01FBC); (0x01FCC,0x01FCC) ] (* Mark, Non-Spacing *) let mn = [ (0x00300,0x0036F); (0x00483,0x00487); (0x00591,0x005BD); (0x005BF,0x005BF); (0x005C1,0x005C2); (0x005C4,0x005C5); (0x005C7,0x005C7); (0x00610,0x0061A); (0x0064B,0x0065F); (0x00670,0x00670); (0x006D6,0x006DC); (0x006DF,0x006E4); (0x006E7,0x006E8); (0x006EA,0x006ED); (0x00711,0x00711); (0x00730,0x0074A); (0x007A6,0x007B0); (0x007EB,0x007F3); (0x00816,0x00819); (0x0081B,0x00823); (0x00825,0x00827); (0x00829,0x0082D); (0x00859,0x0085B); (0x008D4,0x008E1); (0x008E3,0x00902); (0x0093A,0x0093A); (0x0093C,0x0093C); (0x00941,0x00948); (0x0094D,0x0094D); (0x00951,0x00957); (0x00962,0x00963); (0x00981,0x00981); (0x009BC,0x009BC); (0x009C1,0x009C4); (0x009CD,0x009CD); (0x009E2,0x009E3); (0x00A01,0x00A02); (0x00A3C,0x00A3C); (0x00A41,0x00A42); (0x00A47,0x00A48); (0x00A4B,0x00A4D); (0x00A51,0x00A51); (0x00A70,0x00A71); (0x00A75,0x00A75); (0x00A81,0x00A82); (0x00ABC,0x00ABC); (0x00AC1,0x00AC5); (0x00AC7,0x00AC8); (0x00ACD,0x00ACD); (0x00AE2,0x00AE3); (0x00B01,0x00B01); (0x00B3C,0x00B3C); (0x00B3F,0x00B3F); (0x00B41,0x00B44); (0x00B4D,0x00B4D); (0x00B56,0x00B56); (0x00B62,0x00B63); (0x00B82,0x00B82); (0x00BC0,0x00BC0); (0x00BCD,0x00BCD); (0x00C00,0x00C00); (0x00C3E,0x00C40); (0x00C46,0x00C48); (0x00C4A,0x00C4D); (0x00C55,0x00C56); (0x00C62,0x00C63); (0x00C81,0x00C81); (0x00CBC,0x00CBC); (0x00CBF,0x00CBF); (0x00CC6,0x00CC6); (0x00CCC,0x00CCD); (0x00CE2,0x00CE3); (0x00D01,0x00D01); (0x00D41,0x00D44); (0x00D4D,0x00D4D); (0x00D62,0x00D63); (0x00DCA,0x00DCA); (0x00DD2,0x00DD4); (0x00DD6,0x00DD6); (0x00E31,0x00E31); (0x00E34,0x00E3A); (0x00E47,0x00E4E); (0x00EB1,0x00EB1); (0x00EB4,0x00EB9); (0x00EBB,0x00EBC); (0x00EC8,0x00ECD); (0x00F18,0x00F19); (0x00F35,0x00F35); (0x00F37,0x00F37); (0x00F39,0x00F39); (0x00F71,0x00F7E); (0x00F80,0x00F84); (0x00F86,0x00F87); (0x00F8D,0x00F97); (0x00F99,0x00FBC); (0x00FC6,0x00FC6); (0x0102D,0x01030); (0x01032,0x01037); (0x01039,0x0103A); (0x0103D,0x0103E); (0x01058,0x01059); (0x0105E,0x01060); (0x01071,0x01074); (0x01082,0x01082); (0x01085,0x01086); (0x0108D,0x0108D); (0x0109D,0x0109D); (0x0135D,0x0135F); (0x01712,0x01714); (0x01732,0x01734); (0x01752,0x01753); (0x01772,0x01773); (0x017B4,0x017B5); (0x017B7,0x017BD); (0x017C6,0x017C6); (0x017C9,0x017D3); (0x017DD,0x017DD); (0x0180B,0x0180D); (0x01885,0x01886); (0x018A9,0x018A9); (0x01920,0x01922); (0x01927,0x01928); (0x01932,0x01932); (0x01939,0x0193B); (0x01A17,0x01A18); (0x01A1B,0x01A1B); (0x01A56,0x01A56); (0x01A58,0x01A5E); (0x01A60,0x01A60); (0x01A62,0x01A62); (0x01A65,0x01A6C); (0x01A73,0x01A7C); (0x01A7F,0x01A7F); (0x01AB0,0x01ABD); (0x01B00,0x01B03); (0x01B34,0x01B34); (0x01B36,0x01B3A); (0x01B3C,0x01B3C); (0x01B42,0x01B42); (0x01B6B,0x01B73); (0x01B80,0x01B81); (0x01BA2,0x01BA5); (0x01BA8,0x01BA9); (0x01BAB,0x01BAD); (0x01BE6,0x01BE6); (0x01BE8,0x01BE9); (0x01BED,0x01BED); (0x01BEF,0x01BF1); (0x01C2C,0x01C33); (0x01C36,0x01C37); (0x01CD0,0x01CD2); (0x01CD4,0x01CE0); (0x01CE2,0x01CE8); (0x01CED,0x01CED); (0x01CF4,0x01CF4); (0x01CF8,0x01CF9); (0x01DC0,0x01DF5); (0x01DFB,0x01DFF); (0x020D0,0x020DC); (0x020E1,0x020E1); (0x020E5,0x020F0); (0x02CEF,0x02CF1); (0x02D7F,0x02D7F); (0x02DE0,0x02DFF); (0x0302A,0x0302D); (0x03099,0x0309A); (0x0A66F,0x0A66F); (0x0A674,0x0A67D); (0x0A69E,0x0A69F); (0x0A6F0,0x0A6F1); (0x0A802,0x0A802); (0x0A806,0x0A806); (0x0A80B,0x0A80B); (0x0A825,0x0A826); (0x0A8C4,0x0A8C5); (0x0A8E0,0x0A8F1); (0x0A926,0x0A92D); (0x0A947,0x0A951); (0x0A980,0x0A982); (0x0A9B3,0x0A9B3); (0x0A9B6,0x0A9B9); (0x0A9BC,0x0A9BC); (0x0A9E5,0x0A9E5); (0x0AA29,0x0AA2E); (0x0AA31,0x0AA32); (0x0AA35,0x0AA36); (0x0AA43,0x0AA43); (0x0AA4C,0x0AA4C); (0x0AA7C,0x0AA7C); (0x0AAB0,0x0AAB0); (0x0AAB2,0x0AAB4); (0x0AAB7,0x0AAB8); (0x0AABE,0x0AABF); (0x0AAC1,0x0AAC1); (0x0AAEC,0x0AAED); (0x0AAF6,0x0AAF6); (0x0ABE5,0x0ABE5); (0x0ABE8,0x0ABE8); (0x0ABED,0x0ABED); (0x0FB1E,0x0FB1E); (0x0FE00,0x0FE0F); (0x0FE20,0x0FE2F); (0x101FD,0x101FD); (0x102E0,0x102E0); (0x10376,0x1037A); (0x10A01,0x10A03); (0x10A05,0x10A06); (0x10A0C,0x10A0F); (0x10A38,0x10A3A); (0x10A3F,0x10A3F); (0x10AE5,0x10AE6); (0x11001,0x11001); (0x11038,0x11046); (0x1107F,0x11081); (0x110B3,0x110B6); (0x110B9,0x110BA); (0x11100,0x11102); (0x11127,0x1112B); (0x1112D,0x11134); (0x11173,0x11173); (0x11180,0x11181); (0x111B6,0x111BE); (0x111CA,0x111CC); (0x1122F,0x11231); (0x11234,0x11234); (0x11236,0x11237); (0x1123E,0x1123E); (0x112DF,0x112DF); (0x112E3,0x112EA); (0x11300,0x11301); (0x1133C,0x1133C); (0x11340,0x11340); (0x11366,0x1136C); (0x11370,0x11374); (0x11438,0x1143F); (0x11442,0x11444); (0x11446,0x11446); (0x114B3,0x114B8); (0x114BA,0x114BA); (0x114BF,0x114C0); (0x114C2,0x114C3); (0x115B2,0x115B5); (0x115BC,0x115BD); (0x115BF,0x115C0); (0x115DC,0x115DD); (0x11633,0x1163A); (0x1163D,0x1163D); (0x1163F,0x11640); (0x116AB,0x116AB); (0x116AD,0x116AD); (0x116B0,0x116B5); (0x116B7,0x116B7); (0x1171D,0x1171F); (0x11722,0x11725); (0x11727,0x1172B); (0x11C30,0x11C36); (0x11C38,0x11C3D); (0x11C3F,0x11C3F); (0x11C92,0x11CA7); (0x11CAA,0x11CB0); (0x11CB2,0x11CB3); (0x11CB5,0x11CB6); (0x16AF0,0x16AF4); (0x16B30,0x16B36); (0x16F8F,0x16F92); (0x1BC9D,0x1BC9E); (0x1D167,0x1D169); (0x1D17B,0x1D182); (0x1D185,0x1D18B); (0x1D1AA,0x1D1AD); (0x1D242,0x1D244); (0x1DA00,0x1DA36); (0x1DA3B,0x1DA6C); (0x1DA75,0x1DA75); (0x1DA84,0x1DA84); (0x1DA9B,0x1DA9F); (0x1DAA1,0x1DAAF); (0x1E000,0x1E006); (0x1E008,0x1E018); (0x1E01B,0x1E021); (0x1E023,0x1E024); (0x1E026,0x1E02A); (0x1E8D0,0x1E8D6); (0x1E944,0x1E94A) ] (* Mark, Spacing Combining *) let mc = [ (0x00903,0x00903); (0x0093B,0x0093B); (0x0093E,0x00940); (0x00949,0x0094C); (0x0094E,0x0094F); (0x00982,0x00983); (0x009BE,0x009C0); (0x009C7,0x009C8); (0x009CB,0x009CC); (0x009D7,0x009D7); (0x00A03,0x00A03); (0x00A3E,0x00A40); (0x00A83,0x00A83); (0x00ABE,0x00AC0); (0x00AC9,0x00AC9); (0x00ACB,0x00ACC); (0x00B02,0x00B03); (0x00B3E,0x00B3E); (0x00B40,0x00B40); (0x00B47,0x00B48); (0x00B4B,0x00B4C); (0x00B57,0x00B57); (0x00BBE,0x00BBF); (0x00BC1,0x00BC2); (0x00BC6,0x00BC8); (0x00BCA,0x00BCC); (0x00BD7,0x00BD7); (0x00C01,0x00C03); (0x00C41,0x00C44); (0x00C82,0x00C83); (0x00CBE,0x00CBE); (0x00CC0,0x00CC4); (0x00CC7,0x00CC8); (0x00CCA,0x00CCB); (0x00CD5,0x00CD6); (0x00D02,0x00D03); (0x00D3E,0x00D40); (0x00D46,0x00D48); (0x00D4A,0x00D4C); (0x00D57,0x00D57); (0x00D82,0x00D83); (0x00DCF,0x00DD1); (0x00DD8,0x00DDF); (0x00DF2,0x00DF3); (0x00F3E,0x00F3F); (0x00F7F,0x00F7F); (0x0102B,0x0102C); (0x01031,0x01031); (0x01038,0x01038); (0x0103B,0x0103C); (0x01056,0x01057); (0x01062,0x01064); (0x01067,0x0106D); (0x01083,0x01084); (0x01087,0x0108C); (0x0108F,0x0108F); (0x0109A,0x0109C); (0x017B6,0x017B6); (0x017BE,0x017C5); (0x017C7,0x017C8); (0x01923,0x01926); (0x01929,0x0192B); (0x01930,0x01931); (0x01933,0x01938); (0x01A19,0x01A1A); (0x01A55,0x01A55); (0x01A57,0x01A57); (0x01A61,0x01A61); (0x01A63,0x01A64); (0x01A6D,0x01A72); (0x01B04,0x01B04); (0x01B35,0x01B35); (0x01B3B,0x01B3B); (0x01B3D,0x01B41); (0x01B43,0x01B44); (0x01B82,0x01B82); (0x01BA1,0x01BA1); (0x01BA6,0x01BA7); (0x01BAA,0x01BAA); (0x01BE7,0x01BE7); (0x01BEA,0x01BEC); (0x01BEE,0x01BEE); (0x01BF2,0x01BF3); (0x01C24,0x01C2B); (0x01C34,0x01C35); (0x01CE1,0x01CE1); (0x01CF2,0x01CF3); (0x0302E,0x0302F); (0x0A823,0x0A824); (0x0A827,0x0A827); (0x0A880,0x0A881); (0x0A8B4,0x0A8C3); (0x0A952,0x0A953); (0x0A983,0x0A983); (0x0A9B4,0x0A9B5); (0x0A9BA,0x0A9BB); (0x0A9BD,0x0A9C0); (0x0AA2F,0x0AA30); (0x0AA33,0x0AA34); (0x0AA4D,0x0AA4D); (0x0AA7B,0x0AA7B); (0x0AA7D,0x0AA7D); (0x0AAEB,0x0AAEB); (0x0AAEE,0x0AAEF); (0x0AAF5,0x0AAF5); (0x0ABE3,0x0ABE4); (0x0ABE6,0x0ABE7); (0x0ABE9,0x0ABEA); (0x0ABEC,0x0ABEC); (0x11000,0x11000); (0x11002,0x11002); (0x11082,0x11082); (0x110B0,0x110B2); (0x110B7,0x110B8); (0x1112C,0x1112C); (0x11182,0x11182); (0x111B3,0x111B5); (0x111BF,0x111C0); (0x1122C,0x1122E); (0x11232,0x11233); (0x11235,0x11235); (0x112E0,0x112E2); (0x11302,0x11303); (0x1133E,0x1133F); (0x11341,0x11344); (0x11347,0x11348); (0x1134B,0x1134D); (0x11357,0x11357); (0x11362,0x11363); (0x11435,0x11437); (0x11440,0x11441); (0x11445,0x11445); (0x114B0,0x114B2); (0x114B9,0x114B9); (0x114BB,0x114BE); (0x114C1,0x114C1); (0x115AF,0x115B1); (0x115B8,0x115BB); (0x115BE,0x115BE); (0x11630,0x11632); (0x1163B,0x1163C); (0x1163E,0x1163E); (0x116AC,0x116AC); (0x116AE,0x116AF); (0x116B6,0x116B6); (0x11720,0x11721); (0x11726,0x11726); (0x11C2F,0x11C2F); (0x11C3E,0x11C3E); (0x11CA9,0x11CA9); (0x11CB1,0x11CB1); (0x11CB4,0x11CB4); (0x16F51,0x16F7E); (0x1D165,0x1D166) ] (* Mark, Enclosing *) let me = [ (0x00488,0x00489); (0x01ABE,0x01ABE); (0x020DD,0x020E0); (0x020E2,0x020E4) ] (* Number, Decimal Digit *) let nd = [ (0x00030,0x00039); (0x00660,0x00669); (0x006F0,0x006F9); (0x007C0,0x007C9); (0x00966,0x0096F); (0x009E6,0x009EF); (0x00A66,0x00A6F); (0x00AE6,0x00AEF); (0x00B66,0x00B6F); (0x00BE6,0x00BEF); (0x00C66,0x00C6F); (0x00CE6,0x00CEF); (0x00D66,0x00D6F); (0x00DE6,0x00DEF); (0x00E50,0x00E59); (0x00ED0,0x00ED9); (0x00F20,0x00F29); (0x01040,0x01049); (0x01090,0x01099); (0x017E0,0x017E9); (0x01810,0x01819); (0x01946,0x0194F); (0x019D0,0x019D9); (0x01A80,0x01A89); (0x01A90,0x01A99); (0x01B50,0x01B59); (0x01BB0,0x01BB9); (0x01C40,0x01C49); (0x01C50,0x01C59); (0x0A620,0x0A629); (0x0A8D0,0x0A8D9); (0x0A900,0x0A909); (0x0A9D0,0x0A9D9); (0x0A9F0,0x0A9F9); (0x0AA50,0x0AA59); (0x0ABF0,0x0ABF9); (0x0FF10,0x0FF19); (0x104A0,0x104A9); (0x11066,0x1106F); (0x110F0,0x110F9); (0x11136,0x1113F); (0x111D0,0x111D9); (0x112F0,0x112F9); (0x11450,0x11459); (0x114D0,0x114D9); (0x11650,0x11659); (0x116C0,0x116C9); (0x11730,0x11739); (0x118E0,0x118E9); (0x11C50,0x11C59); (0x16A60,0x16A69); (0x16B50,0x16B59); (0x1D7CE,0x1D7FF) ] (* Number, Letter *) let nl = [ (0x016EE,0x016F0); (0x02160,0x02182); (0x02185,0x02188); (0x03007,0x03007); (0x03021,0x03029); (0x03038,0x0303A); (0x0A6E6,0x0A6EF); (0x10140,0x10174); (0x10341,0x10341); (0x1034A,0x1034A); (0x103D1,0x103D5) ] (* Number, Other *) let no = [ (0x000B2,0x000B3); (0x000B9,0x000B9); (0x000BC,0x000BE); (0x009F4,0x009F9); (0x00B72,0x00B77); (0x00BF0,0x00BF2); (0x00C78,0x00C7E); (0x00D58,0x00D5E); (0x00D70,0x00D78); (0x00F2A,0x00F33); (0x01369,0x0137C); (0x017F0,0x017F9); (0x019DA,0x019DA); (0x02070,0x02070); (0x02074,0x02079); (0x02080,0x02089); (0x02150,0x0215F); (0x02189,0x02189); (0x02460,0x0249B); (0x024EA,0x024FF); (0x02776,0x02793); (0x02CFD,0x02CFD); (0x03192,0x03195); (0x03220,0x03229); (0x03248,0x0324F); (0x03251,0x0325F); (0x03280,0x03289); (0x032B1,0x032BF); (0x0A830,0x0A835); (0x10107,0x10133); (0x10175,0x10178); (0x1018A,0x1018B); (0x102E1,0x102FB); (0x10320,0x10323); (0x10858,0x1085F); (0x10879,0x1087F); (0x108A7,0x108AF); (0x108FB,0x108FF); (0x10916,0x1091B); (0x109BC,0x109BD); (0x109C0,0x109CF); (0x109D2,0x109FF); (0x10A40,0x10A47); (0x10A7D,0x10A7E); (0x10A9D,0x10A9F); (0x10AEB,0x10AEF); (0x10B58,0x10B5F); (0x10B78,0x10B7F); (0x10BA9,0x10BAF); (0x10CFA,0x10CFF); (0x10E60,0x10E7E); (0x11052,0x11065); (0x111E1,0x111F4); (0x1173A,0x1173B); (0x118EA,0x118F2); (0x11C5A,0x11C6C); (0x16B5B,0x16B61); (0x1D360,0x1D371); (0x1E8C7,0x1E8CF) ] (* Separator, Space *) let zs = [ (0x00020,0x00020); (0x000A0,0x000A0); (0x01680,0x01680); (0x02000,0x0200A); (0x0202F,0x0202F); (0x0205F,0x0205F) ] (* Separator, Line *) let zl = [ ] (* Separator, Paragraph *) let zp = [ ] (* Other, Control *) let cc = [ (0x00000,0x0001F) ] (* Other, Format *) let cf = [ (0x000AD,0x000AD); (0x00600,0x00605); (0x0061C,0x0061C); (0x006DD,0x006DD); (0x0070F,0x0070F); (0x008E2,0x008E2); (0x0180E,0x0180E); (0x0200B,0x0200F); (0x0202A,0x0202E); (0x02060,0x02064); (0x02066,0x0206F); (0x0FEFF,0x0FEFF); (0x0FFF9,0x0FFFB); (0x110BD,0x110BD); (0x1BCA0,0x1BCA3); (0x1D173,0x1D17A); (0xE0001,0xE0001) ] (* Other, Surrogate *) let cs = [ ] (* Other, Private Use *) let co = [ (0x0E000,0x0F8FF); (0xF0000,0xFFFFD) ] (* Other, Not Assigned *) let cn = [ (0x00378,0x00379); (0x00380,0x00383); (0x0038B,0x0038B); (0x0038D,0x0038D); (0x003A2,0x003A2); (0x00530,0x00530); (0x00557,0x00558); (0x00560,0x00560); (0x00588,0x00588); (0x0058B,0x0058C); (0x00590,0x00590); (0x005C8,0x005CF); (0x005EB,0x005EF); (0x005F5,0x005FF); (0x0061D,0x0061D); (0x0070E,0x0070E); (0x0074B,0x0074C); (0x007B2,0x007BF); (0x007FB,0x007FF); (0x0082E,0x0082F); (0x0083F,0x0083F); (0x0085C,0x0085D); (0x0085F,0x0089F); (0x008B5,0x008B5); (0x008BE,0x008D3); (0x00984,0x00984); (0x0098D,0x0098E); (0x00991,0x00992); (0x009A9,0x009A9); (0x009B1,0x009B1); (0x009B3,0x009B5); (0x009BA,0x009BB); (0x009C5,0x009C6); (0x009C9,0x009CA); (0x009CF,0x009D6); (0x009D8,0x009DB); (0x009DE,0x009DE); (0x009E4,0x009E5); (0x009FC,0x00A00); (0x00A04,0x00A04); (0x00A0B,0x00A0E); (0x00A11,0x00A12); (0x00A29,0x00A29); (0x00A31,0x00A31); (0x00A34,0x00A34); (0x00A37,0x00A37); (0x00A3A,0x00A3B); (0x00A3D,0x00A3D); (0x00A43,0x00A46); (0x00A49,0x00A4A); (0x00A4E,0x00A50); (0x00A52,0x00A58); (0x00A5D,0x00A5D); (0x00A5F,0x00A65); (0x00A76,0x00A80); (0x00A84,0x00A84); (0x00A8E,0x00A8E); (0x00A92,0x00A92); (0x00AA9,0x00AA9); (0x00AB1,0x00AB1); (0x00AB4,0x00AB4); (0x00ABA,0x00ABB); (0x00AC6,0x00AC6); (0x00ACA,0x00ACA); (0x00ACE,0x00ACF); (0x00AD1,0x00ADF); (0x00AE4,0x00AE5); (0x00AF2,0x00AF8); (0x00AFA,0x00B00); (0x00B04,0x00B04); (0x00B0D,0x00B0E); (0x00B11,0x00B12); (0x00B29,0x00B29); (0x00B31,0x00B31); (0x00B34,0x00B34); (0x00B3A,0x00B3B); (0x00B45,0x00B46); (0x00B49,0x00B4A); (0x00B4E,0x00B55); (0x00B58,0x00B5B); (0x00B5E,0x00B5E); (0x00B64,0x00B65); (0x00B78,0x00B81); (0x00B84,0x00B84); (0x00B8B,0x00B8D); (0x00B91,0x00B91); (0x00B96,0x00B98); (0x00B9B,0x00B9B); (0x00B9D,0x00B9D); (0x00BA0,0x00BA2); (0x00BA5,0x00BA7); (0x00BAB,0x00BAD); (0x00BBA,0x00BBD); (0x00BC3,0x00BC5); (0x00BC9,0x00BC9); (0x00BCE,0x00BCF); (0x00BD1,0x00BD6); (0x00BD8,0x00BE5); (0x00BFB,0x00BFF); (0x00C04,0x00C04); (0x00C0D,0x00C0D); (0x00C11,0x00C11); (0x00C29,0x00C29); (0x00C3A,0x00C3C); (0x00C45,0x00C45); (0x00C49,0x00C49); (0x00C4E,0x00C54); (0x00C57,0x00C57); (0x00C5B,0x00C5F); (0x00C64,0x00C65); (0x00C70,0x00C77); (0x00C84,0x00C84); (0x00C8D,0x00C8D); (0x00C91,0x00C91); (0x00CA9,0x00CA9); (0x00CB4,0x00CB4); (0x00CBA,0x00CBB); (0x00CC5,0x00CC5); (0x00CC9,0x00CC9); (0x00CCE,0x00CD4); (0x00CD7,0x00CDD); (0x00CDF,0x00CDF); (0x00CE4,0x00CE5); (0x00CF0,0x00CF0); (0x00CF3,0x00D00); (0x00D04,0x00D04); (0x00D0D,0x00D0D); (0x00D11,0x00D11); (0x00D3B,0x00D3C); (0x00D45,0x00D45); (0x00D49,0x00D49); (0x00D50,0x00D53); (0x00D64,0x00D65); (0x00D80,0x00D81); (0x00D84,0x00D84); (0x00D97,0x00D99); (0x00DB2,0x00DB2); (0x00DBC,0x00DBC); (0x00DBE,0x00DBF); (0x00DC7,0x00DC9); (0x00DCB,0x00DCE); (0x00DD5,0x00DD5); (0x00DD7,0x00DD7); (0x00DE0,0x00DE5); (0x00DF0,0x00DF1); (0x00DF5,0x00E00); (0x00E3B,0x00E3E); (0x00E5C,0x00E80); (0x00E83,0x00E83); (0x00E85,0x00E86); (0x00E89,0x00E89); (0x00E8B,0x00E8C); (0x00E8E,0x00E93); (0x00E98,0x00E98); (0x00EA0,0x00EA0); (0x00EA4,0x00EA4); (0x00EA6,0x00EA6); (0x00EA8,0x00EA9); (0x00EAC,0x00EAC); (0x00EBA,0x00EBA); (0x00EBE,0x00EBF); (0x00EC5,0x00EC5); (0x00EC7,0x00EC7); (0x00ECE,0x00ECF); (0x00EDA,0x00EDB); (0x00EE0,0x00EFF); (0x00F48,0x00F48); (0x00F6D,0x00F70); (0x00F98,0x00F98); (0x00FBD,0x00FBD); (0x00FCD,0x00FCD); (0x00FDB,0x00FFF); (0x010C6,0x010C6); (0x010C8,0x010CC); (0x010CE,0x010CF); (0x01249,0x01249); (0x0124E,0x0124F); (0x01257,0x01257); (0x01259,0x01259); (0x0125E,0x0125F); (0x01289,0x01289); (0x0128E,0x0128F); (0x012B1,0x012B1); (0x012B6,0x012B7); (0x012BF,0x012BF); (0x012C1,0x012C1); (0x012C6,0x012C7); (0x012D7,0x012D7); (0x01311,0x01311); (0x01316,0x01317); (0x0135B,0x0135C); (0x0137D,0x0137F); (0x0139A,0x0139F); (0x013F6,0x013F7); (0x013FE,0x013FF); (0x0169D,0x0169F); (0x016F9,0x016FF); (0x0170D,0x0170D); (0x01715,0x0171F); (0x01737,0x0173F); (0x01754,0x0175F); (0x0176D,0x0176D); (0x01771,0x01771); (0x01774,0x0177F); (0x017DE,0x017DF); (0x017EA,0x017EF); (0x017FA,0x017FF); (0x0180F,0x0180F); (0x0181A,0x0181F); (0x01878,0x0187F); (0x018AB,0x018AF); (0x018F6,0x018FF); (0x0191F,0x0191F); (0x0192C,0x0192F); (0x0193C,0x0193F); (0x01941,0x01943); (0x0196E,0x0196F); (0x01975,0x0197F); (0x019AC,0x019AF); (0x019CA,0x019CF); (0x019DB,0x019DD); (0x01A1C,0x01A1D); (0x01A5F,0x01A5F); (0x01A7D,0x01A7E); (0x01A8A,0x01A8F); (0x01A9A,0x01A9F); (0x01AAE,0x01AAF); (0x01ABF,0x01AFF); (0x01B4C,0x01B4F); (0x01B7D,0x01B7F); (0x01BF4,0x01BFB); (0x01C38,0x01C3A); (0x01C4A,0x01C4C); (0x01C89,0x01CBF); (0x01CC8,0x01CCF); (0x01CF7,0x01CF7); (0x01CFA,0x01CFF); (0x01DF6,0x01DFA); (0x01F16,0x01F17); (0x01F1E,0x01F1F); (0x01F46,0x01F47); (0x01F4E,0x01F4F); (0x01F58,0x01F58); (0x01F5A,0x01F5A); (0x01F5C,0x01F5C); (0x01F5E,0x01F5E); (0x01F7E,0x01F7F); (0x01FB5,0x01FB5); (0x01FC5,0x01FC5); (0x01FD4,0x01FD5); (0x01FDC,0x01FDC); (0x01FF0,0x01FF1); (0x01FF5,0x01FF5); (0x01FFF,0x01FFF); (0x02065,0x02065); (0x02072,0x02073); (0x0208F,0x0208F); (0x0209D,0x0209F); (0x020BF,0x020CF); (0x020F1,0x020FF); (0x0218C,0x0218F); (0x023FF,0x023FF); (0x02427,0x0243F); (0x0244B,0x0245F); (0x02B74,0x02B75); (0x02B96,0x02B97); (0x02BBA,0x02BBC); (0x02BC9,0x02BC9); (0x02BD2,0x02BEB); (0x02BF0,0x02BFF); (0x02C2F,0x02C2F); (0x02C5F,0x02C5F); (0x02CF4,0x02CF8); (0x02D26,0x02D26); (0x02D28,0x02D2C); (0x02D2E,0x02D2F); (0x02D68,0x02D6E); (0x02D71,0x02D7E); (0x02D97,0x02D9F); (0x02DA7,0x02DA7); (0x02DAF,0x02DAF); (0x02DB7,0x02DB7); (0x02DBF,0x02DBF); (0x02DC7,0x02DC7); (0x02DCF,0x02DCF); (0x02DD7,0x02DD7); (0x02DDF,0x02DDF); (0x02E45,0x02E7F); (0x02E9A,0x02E9A); (0x02EF4,0x02EFF); (0x02FD6,0x02FEF); (0x02FFC,0x02FFF); (0x03040,0x03040); (0x03097,0x03098); (0x03100,0x03104); (0x0312E,0x03130); (0x0318F,0x0318F); (0x031BB,0x031BF); (0x031E4,0x031EF); (0x0321F,0x0321F); (0x032FF,0x032FF); (0x04DB6,0x04DBF); (0x09FD6,0x09FFF); (0x0A48D,0x0A48F); (0x0A4C7,0x0A4CF); (0x0A62C,0x0A63F); (0x0A6F8,0x0A6FF); (0x0A7AF,0x0A7AF); (0x0A7B8,0x0A7F6); (0x0A82C,0x0A82F); (0x0A83A,0x0A83F); (0x0A878,0x0A87F); (0x0A8C6,0x0A8CD); (0x0A8DA,0x0A8DF); (0x0A8FE,0x0A8FF); (0x0A954,0x0A95E); (0x0A97D,0x0A97F); (0x0A9CE,0x0A9CE); (0x0A9DA,0x0A9DD); (0x0A9FF,0x0A9FF); (0x0AA37,0x0AA3F); (0x0AA4E,0x0AA4F); (0x0AA5A,0x0AA5B); (0x0AAC3,0x0AADA); (0x0AAF7,0x0AB00); (0x0AB07,0x0AB08); (0x0AB0F,0x0AB10); (0x0AB17,0x0AB1F); (0x0AB27,0x0AB27); (0x0AB2F,0x0AB2F); (0x0AB66,0x0AB6F); (0x0ABEE,0x0ABEF); (0x0ABFA,0x0ABFF); (0x0D7A4,0x0D7AF); (0x0D7C7,0x0D7CA); (0x0D7FC,0x0D7FF); (0x0FA6E,0x0FA6F); (0x0FADA,0x0FAFF); (0x0FB07,0x0FB12); (0x0FB18,0x0FB1C); (0x0FB37,0x0FB37); (0x0FB3D,0x0FB3D); (0x0FB3F,0x0FB3F); (0x0FB42,0x0FB42); (0x0FB45,0x0FB45); (0x0FBC2,0x0FBD2); (0x0FD40,0x0FD4F); (0x0FD90,0x0FD91); (0x0FDC8,0x0FDEF); (0x0FDFE,0x0FDFF); (0x0FE1A,0x0FE1F); (0x0FE53,0x0FE53); (0x0FE67,0x0FE67); (0x0FE6C,0x0FE6F); (0x0FE75,0x0FE75); (0x0FEFD,0x0FEFE); (0x0FF00,0x0FF00); (0x0FFBF,0x0FFC1); (0x0FFC8,0x0FFC9); (0x0FFD0,0x0FFD1); (0x0FFD8,0x0FFD9); (0x0FFDD,0x0FFDF); (0x0FFE7,0x0FFE7); (0x0FFEF,0x0FFF8); (0x0FFFE,0x0FFFF); (0x1000C,0x1000C); (0x10027,0x10027); (0x1003B,0x1003B); (0x1003E,0x1003E); (0x1004E,0x1004F); (0x1005E,0x1007F); (0x100FB,0x100FF); (0x10103,0x10106); (0x10134,0x10136); (0x1018F,0x1018F); (0x1019C,0x1019F); (0x101A1,0x101CF); (0x101FE,0x1027F); (0x1029D,0x1029F); (0x102D1,0x102DF); (0x102FC,0x102FF); (0x10324,0x1032F); (0x1034B,0x1034F); (0x1037B,0x1037F); (0x1039E,0x1039E); (0x103C4,0x103C7); (0x103D6,0x103FF); (0x1049E,0x1049F); (0x104AA,0x104AF); (0x104D4,0x104D7); (0x104FC,0x104FF); (0x10528,0x1052F); (0x10564,0x1056E); (0x10570,0x105FF); (0x10737,0x1073F); (0x10756,0x1075F); (0x10768,0x107FF); (0x10806,0x10807); (0x10809,0x10809); (0x10836,0x10836); (0x10839,0x1083B); (0x1083D,0x1083E); (0x10856,0x10856); (0x1089F,0x108A6); (0x108B0,0x108DF); (0x108F3,0x108F3); (0x108F6,0x108FA); (0x1091C,0x1091E); (0x1093A,0x1093E); (0x10940,0x1097F); (0x109B8,0x109BB); (0x109D0,0x109D1); (0x10A04,0x10A04); (0x10A07,0x10A0B); (0x10A14,0x10A14); (0x10A18,0x10A18); (0x10A34,0x10A37); (0x10A3B,0x10A3E); (0x10A48,0x10A4F); (0x10A59,0x10A5F); (0x10AA0,0x10ABF); (0x10AE7,0x10AEA); (0x10AF7,0x10AFF); (0x10B36,0x10B38); (0x10B56,0x10B57); (0x10B73,0x10B77); (0x10B92,0x10B98); (0x10B9D,0x10BA8); (0x10BB0,0x10BFF); (0x10C49,0x10C7F); (0x10CB3,0x10CBF); (0x10CF3,0x10CF9); (0x10D00,0x10E5F); (0x10E7F,0x10FFF); (0x1104E,0x11051); (0x11070,0x1107E); (0x110C2,0x110CF); (0x110E9,0x110EF); (0x110FA,0x110FF); (0x11135,0x11135); (0x11144,0x1114F); (0x11177,0x1117F); (0x111CE,0x111CF); (0x111E0,0x111E0); (0x111F5,0x111FF); (0x11212,0x11212); (0x1123F,0x1127F); (0x11287,0x11287); (0x11289,0x11289); (0x1128E,0x1128E); (0x1129E,0x1129E); (0x112AA,0x112AF); (0x112EB,0x112EF); (0x112FA,0x112FF); (0x11304,0x11304); (0x1130D,0x1130E); (0x11311,0x11312); (0x11329,0x11329); (0x11331,0x11331); (0x11334,0x11334); (0x1133A,0x1133B); (0x11345,0x11346); (0x11349,0x1134A); (0x1134E,0x1134F); (0x11351,0x11356); (0x11358,0x1135C); (0x11364,0x11365); (0x1136D,0x1136F); (0x11375,0x113FF); (0x1145A,0x1145A); (0x1145C,0x1145C); (0x1145E,0x1147F); (0x114C8,0x114CF); (0x114DA,0x1157F); (0x115B6,0x115B7); (0x115DE,0x115FF); (0x11645,0x1164F); (0x1165A,0x1165F); (0x1166D,0x1167F); (0x116B8,0x116BF); (0x116CA,0x116FF); (0x1171A,0x1171C); (0x1172C,0x1172F); (0x11740,0x1189F); (0x118F3,0x118FE); (0x11900,0x11ABF); (0x11AF9,0x11BFF); (0x11C09,0x11C09); (0x11C37,0x11C37); (0x11C46,0x11C4F); (0x11C6D,0x11C6F); (0x11C90,0x11C91); (0x11CA8,0x11CA8); (0x11CB7,0x11FFF); (0x1239A,0x123FF); (0x1246F,0x1246F); (0x12475,0x1247F); (0x12544,0x12FFF); (0x1342F,0x143FF); (0x14647,0x167FF); (0x16A39,0x16A3F); (0x16A5F,0x16A5F); (0x16A6A,0x16A6D); (0x16A70,0x16ACF); (0x16AEE,0x16AEF); (0x16AF6,0x16AFF); (0x16B46,0x16B4F); (0x16B5A,0x16B5A); (0x16B62,0x16B62); (0x16B78,0x16B7C); (0x16B90,0x16EFF); (0x16F45,0x16F4F); (0x16F7F,0x16F8E); (0x16FA0,0x16FDF); (0x16FE1,0x16FFF); (0x187ED,0x187FF); (0x18AF3,0x1AFFF); (0x1B002,0x1BBFF); (0x1BC6B,0x1BC6F); (0x1BC7D,0x1BC7F); (0x1BC89,0x1BC8F); (0x1BC9A,0x1BC9B); (0x1BCA4,0x1CFFF); (0x1D0F6,0x1D0FF); (0x1D127,0x1D128); (0x1D1E9,0x1D1FF); (0x1D246,0x1D2FF); (0x1D357,0x1D35F); (0x1D372,0x1D3FF); (0x1D455,0x1D455); (0x1D49D,0x1D49D); (0x1D4A0,0x1D4A1); (0x1D4A3,0x1D4A4); (0x1D4A7,0x1D4A8); (0x1D4AD,0x1D4AD); (0x1D4BA,0x1D4BA); (0x1D4BC,0x1D4BC); (0x1D4C4,0x1D4C4); (0x1D506,0x1D506); (0x1D50B,0x1D50C); (0x1D515,0x1D515); (0x1D51D,0x1D51D); (0x1D53A,0x1D53A); (0x1D53F,0x1D53F); (0x1D545,0x1D545); (0x1D547,0x1D549); (0x1D551,0x1D551); (0x1D6A6,0x1D6A7); (0x1D7CC,0x1D7CD); (0x1DA8C,0x1DA9A); (0x1DAA0,0x1DAA0); (0x1DAB0,0x1DFFF); (0x1E007,0x1E007); (0x1E019,0x1E01A); (0x1E022,0x1E022); (0x1E025,0x1E025); (0x1E02B,0x1E7FF); (0x1E8C5,0x1E8C6); (0x1E8D7,0x1E8FF); (0x1E94B,0x1E94F); (0x1E95A,0x1E95D); (0x1E960,0x1EDFF); (0x1EE04,0x1EE04); (0x1EE20,0x1EE20); (0x1EE23,0x1EE23); (0x1EE25,0x1EE26); (0x1EE28,0x1EE28); (0x1EE33,0x1EE33); (0x1EE38,0x1EE38); (0x1EE3A,0x1EE3A); (0x1EE3C,0x1EE41); (0x1EE43,0x1EE46); (0x1EE48,0x1EE48); (0x1EE4A,0x1EE4A); (0x1EE4C,0x1EE4C); (0x1EE50,0x1EE50); (0x1EE53,0x1EE53); (0x1EE55,0x1EE56); (0x1EE58,0x1EE58); (0x1EE5A,0x1EE5A); (0x1EE5C,0x1EE5C); (0x1EE5E,0x1EE5E); (0x1EE60,0x1EE60); (0x1EE63,0x1EE63); (0x1EE65,0x1EE66); (0x1EE6B,0x1EE6B); (0x1EE73,0x1EE73); (0x1EE78,0x1EE78); (0x1EE7D,0x1EE7D); (0x1EE7F,0x1EE7F); (0x1EE8A,0x1EE8A); (0x1EE9C,0x1EEA0); (0x1EEA4,0x1EEA4); (0x1EEAA,0x1EEAA); (0x1EEBC,0x1EEEF); (0x1EEF2,0x1EFFF); (0x1F02C,0x1F02F); (0x1F094,0x1F09F); (0x1F0AF,0x1F0B0); (0x1F0C0,0x1F0C0); (0x1F0D0,0x1F0D0); (0x1F0F6,0x1F0FF); (0x1F10D,0x1F10F); (0x1F12F,0x1F12F); (0x1F16C,0x1F16F); (0x1F1AD,0x1F1E5); (0x1F203,0x1F20F); (0x1F23C,0x1F23F); (0x1F249,0x1F24F); (0x1F252,0x1F2FF); (0x1F6D3,0x1F6DF); (0x1F6ED,0x1F6EF); (0x1F6F7,0x1F6FF); (0x1F774,0x1F77F); (0x1F7D5,0x1F7FF); (0x1F80C,0x1F80F); (0x1F848,0x1F84F); (0x1F85A,0x1F85F); (0x1F888,0x1F88F); (0x1F8AE,0x1F90F); (0x1F91F,0x1F91F); (0x1F928,0x1F92F); (0x1F931,0x1F932); (0x1F93F,0x1F93F); (0x1F94C,0x1F94F); (0x1F95F,0x1F97F); (0x1F992,0x1F9BF); (0x1F9C1,0x1FFFF); (0x2A6D7,0x2A6FF); (0x2B735,0x2B73F); (0x2B81E,0x2B81F); (0x2CEA2,0x2F7FF); (0x2FA1E,0xE0000); (0xE0002,0xE001F); (0xE0080,0xE00FF); (0xE01F0,0xEFFFF); (0xFFFFE,0xFFFFF) ] (* Letter, Modifier *) let lm = [ (0x002B0,0x002C1); (0x002C6,0x002D1); (0x002E0,0x002E4); (0x002EC,0x002EC); (0x002EE,0x002EE); (0x00374,0x00374); (0x0037A,0x0037A); (0x00559,0x00559); (0x00640,0x00640); (0x006E5,0x006E6); (0x007F4,0x007F5); (0x007FA,0x007FA); (0x0081A,0x0081A); (0x00824,0x00824); (0x00828,0x00828); (0x00971,0x00971); (0x00E46,0x00E46); (0x00EC6,0x00EC6); (0x010FC,0x010FC); (0x017D7,0x017D7); (0x01843,0x01843); (0x01AA7,0x01AA7); (0x01C78,0x01C7D); (0x01D2C,0x01D6A); (0x01D78,0x01D78); (0x01D9B,0x01DBF); (0x02071,0x02071); (0x0207F,0x0207F); (0x02090,0x0209C); (0x02C7C,0x02C7D); (0x02D6F,0x02D6F); (0x02E2F,0x02E2F); (0x03005,0x03005); (0x03031,0x03035); (0x0303B,0x0303B); (0x0309D,0x0309E); (0x030FC,0x030FE); (0x0A015,0x0A015); (0x0A4F8,0x0A4FD); (0x0A60C,0x0A60C); (0x0A67F,0x0A67F); (0x0A69C,0x0A69D); (0x0A717,0x0A71F); (0x0A770,0x0A770); (0x0A788,0x0A788); (0x0A7F8,0x0A7F9); (0x0A9CF,0x0A9CF); (0x0A9E6,0x0A9E6); (0x0AA70,0x0AA70); (0x0AADD,0x0AADD); (0x0AAF3,0x0AAF4); (0x0AB5C,0x0AB5F); (0x0FF70,0x0FF70); (0x0FF9E,0x0FF9F); (0x16B40,0x16B43); (0x16F93,0x16F9F) ] (* Letter, Other *) let lo = [ (0x000AA,0x000AA); (0x000BA,0x000BA); (0x001BB,0x001BB); (0x001C0,0x001C3); (0x00294,0x00294); (0x005D0,0x005EA); (0x005F0,0x005F2); (0x00620,0x0063F); (0x00641,0x0064A); (0x0066E,0x0066F); (0x00671,0x006D3); (0x006D5,0x006D5); (0x006EE,0x006EF); (0x006FA,0x006FC); (0x006FF,0x006FF); (0x00710,0x00710); (0x00712,0x0072F); (0x0074D,0x007A5); (0x007B1,0x007B1); (0x007CA,0x007EA); (0x00800,0x00815); (0x00840,0x00858); (0x008A0,0x008B4); (0x008B6,0x008BD); (0x00904,0x00939); (0x0093D,0x0093D); (0x00950,0x00950); (0x00958,0x00961); (0x00972,0x00980); (0x00985,0x0098C); (0x0098F,0x00990); (0x00993,0x009A8); (0x009AA,0x009B0); (0x009B2,0x009B2); (0x009B6,0x009B9); (0x009BD,0x009BD); (0x009CE,0x009CE); (0x009DC,0x009DD); (0x009DF,0x009E1); (0x009F0,0x009F1); (0x00A05,0x00A0A); (0x00A0F,0x00A10); (0x00A13,0x00A28); (0x00A2A,0x00A30); (0x00A32,0x00A33); (0x00A35,0x00A36); (0x00A38,0x00A39); (0x00A59,0x00A5C); (0x00A5E,0x00A5E); (0x00A72,0x00A74); (0x00A85,0x00A8D); (0x00A8F,0x00A91); (0x00A93,0x00AA8); (0x00AAA,0x00AB0); (0x00AB2,0x00AB3); (0x00AB5,0x00AB9); (0x00ABD,0x00ABD); (0x00AD0,0x00AD0); (0x00AE0,0x00AE1); (0x00AF9,0x00AF9); (0x00B05,0x00B0C); (0x00B0F,0x00B10); (0x00B13,0x00B28); (0x00B2A,0x00B30); (0x00B32,0x00B33); (0x00B35,0x00B39); (0x00B3D,0x00B3D); (0x00B5C,0x00B5D); (0x00B5F,0x00B61); (0x00B71,0x00B71); (0x00B83,0x00B83); (0x00B85,0x00B8A); (0x00B8E,0x00B90); (0x00B92,0x00B95); (0x00B99,0x00B9A); (0x00B9C,0x00B9C); (0x00B9E,0x00B9F); (0x00BA3,0x00BA4); (0x00BA8,0x00BAA); (0x00BAE,0x00BB9); (0x00BD0,0x00BD0); (0x00C05,0x00C0C); (0x00C0E,0x00C10); (0x00C12,0x00C28); (0x00C2A,0x00C39); (0x00C3D,0x00C3D); (0x00C58,0x00C5A); (0x00C60,0x00C61); (0x00C80,0x00C80); (0x00C85,0x00C8C); (0x00C8E,0x00C90); (0x00C92,0x00CA8); (0x00CAA,0x00CB3); (0x00CB5,0x00CB9); (0x00CBD,0x00CBD); (0x00CDE,0x00CDE); (0x00CE0,0x00CE1); (0x00CF1,0x00CF2); (0x00D05,0x00D0C); (0x00D0E,0x00D10); (0x00D12,0x00D3A); (0x00D3D,0x00D3D); (0x00D4E,0x00D4E); (0x00D54,0x00D56); (0x00D5F,0x00D61); (0x00D7A,0x00D7F); (0x00D85,0x00D96); (0x00D9A,0x00DB1); (0x00DB3,0x00DBB); (0x00DBD,0x00DBD); (0x00DC0,0x00DC6); (0x00E01,0x00E30); (0x00E32,0x00E33); (0x00E40,0x00E45); (0x00E81,0x00E82); (0x00E84,0x00E84); (0x00E87,0x00E88); (0x00E8A,0x00E8A); (0x00E8D,0x00E8D); (0x00E94,0x00E97); (0x00E99,0x00E9F); (0x00EA1,0x00EA3); (0x00EA5,0x00EA5); (0x00EA7,0x00EA7); (0x00EAA,0x00EAB); (0x00EAD,0x00EB0); (0x00EB2,0x00EB3); (0x00EBD,0x00EBD); (0x00EC0,0x00EC4); (0x00EDC,0x00EDF); (0x00F00,0x00F00); (0x00F40,0x00F47); (0x00F49,0x00F6C); (0x00F88,0x00F8C); (0x01000,0x0102A); (0x0103F,0x0103F); (0x01050,0x01055); (0x0105A,0x0105D); (0x01061,0x01061); (0x01065,0x01066); (0x0106E,0x01070); (0x01075,0x01081); (0x0108E,0x0108E); (0x010D0,0x010FA); (0x010FD,0x01248); (0x0124A,0x0124D); (0x01250,0x01256); (0x01258,0x01258); (0x0125A,0x0125D); (0x01260,0x01288); (0x0128A,0x0128D); (0x01290,0x012B0); (0x012B2,0x012B5); (0x012B8,0x012BE); (0x012C0,0x012C0); (0x012C2,0x012C5); (0x012C8,0x012D6); (0x012D8,0x01310); (0x01312,0x01315); (0x01318,0x0135A); (0x01380,0x0138F); (0x01401,0x0166C); (0x0166F,0x0167F); (0x01681,0x0169A); (0x016A0,0x016EA); (0x016F1,0x016F8); (0x01700,0x0170C); (0x0170E,0x01711); (0x01720,0x01731); (0x01740,0x01751); (0x01760,0x0176C); (0x0176E,0x01770); (0x01780,0x017B3); (0x017DC,0x017DC); (0x01820,0x01842); (0x01844,0x01877); (0x01880,0x01884); (0x01887,0x018A8); (0x018AA,0x018AA); (0x018B0,0x018F5); (0x01900,0x0191E); (0x01950,0x0196D); (0x01970,0x01974); (0x01980,0x019AB); (0x019B0,0x019C9); (0x01A00,0x01A16); (0x01A20,0x01A54); (0x01B05,0x01B33); (0x01B45,0x01B4B); (0x01B83,0x01BA0); (0x01BAE,0x01BAF); (0x01BBA,0x01BE5); (0x01C00,0x01C23); (0x01C4D,0x01C4F); (0x01C5A,0x01C77); (0x01CE9,0x01CEC); (0x01CEE,0x01CF1); (0x01CF5,0x01CF6); (0x02135,0x02138); (0x02D30,0x02D67); (0x02D80,0x02D96); (0x02DA0,0x02DA6); (0x02DA8,0x02DAE); (0x02DB0,0x02DB6); (0x02DB8,0x02DBE); (0x02DC0,0x02DC6); (0x02DC8,0x02DCE); (0x02DD0,0x02DD6); (0x02DD8,0x02DDE); (0x03006,0x03006); (0x0303C,0x0303C); (0x03041,0x03096); (0x0309F,0x0309F); (0x030A1,0x030FA); (0x030FF,0x030FF); (0x03105,0x0312D); (0x03131,0x0318E); (0x031A0,0x031BA); (0x031F0,0x031FF); (0x03400,0x04DB5); (0x04E00,0x09FD5); (0x0A000,0x0A014); (0x0A016,0x0A48C); (0x0A4D0,0x0A4F7); (0x0A500,0x0A60B); (0x0A610,0x0A61F); (0x0A62A,0x0A62B); (0x0A66E,0x0A66E); (0x0A6A0,0x0A6E5); (0x0A78F,0x0A78F); (0x0A7F7,0x0A7F7); (0x0A7FB,0x0A801); (0x0A803,0x0A805); (0x0A807,0x0A80A); (0x0A80C,0x0A822); (0x0A840,0x0A873); (0x0A882,0x0A8B3); (0x0A8F2,0x0A8F7); (0x0A8FB,0x0A8FB); (0x0A8FD,0x0A8FD); (0x0A90A,0x0A925); (0x0A930,0x0A946); (0x0A960,0x0A97C); (0x0A984,0x0A9B2); (0x0A9E0,0x0A9E4); (0x0A9E7,0x0A9EF); (0x0A9FA,0x0A9FE); (0x0AA00,0x0AA28); (0x0AA40,0x0AA42); (0x0AA44,0x0AA4B); (0x0AA60,0x0AA6F); (0x0AA71,0x0AA76); (0x0AA7A,0x0AA7A); (0x0AA7E,0x0AAAF); (0x0AAB1,0x0AAB1); (0x0AAB5,0x0AAB6); (0x0AAB9,0x0AABD); (0x0AAC0,0x0AAC0); (0x0AAC2,0x0AAC2); (0x0AADB,0x0AADC); (0x0AAE0,0x0AAEA); (0x0AAF2,0x0AAF2); (0x0AB01,0x0AB06); (0x0AB09,0x0AB0E); (0x0AB11,0x0AB16); (0x0AB20,0x0AB26); (0x0AB28,0x0AB2E); (0x0ABC0,0x0ABE2); (0x0AC00,0x0D7A3); (0x0D7B0,0x0D7C6); (0x0D7CB,0x0D7FB); (0x0F900,0x0FA6D); (0x0FA70,0x0FAD9); (0x0FB1D,0x0FB1D); (0x0FB1F,0x0FB28); (0x0FB2A,0x0FB36); (0x0FB38,0x0FB3C); (0x0FB3E,0x0FB3E); (0x0FB40,0x0FB41); (0x0FB43,0x0FB44); (0x0FB46,0x0FBB1); (0x0FBD3,0x0FD3D); (0x0FD50,0x0FD8F); (0x0FD92,0x0FDC7); (0x0FDF0,0x0FDFB); (0x0FE70,0x0FE74); (0x0FE76,0x0FEFC); (0x0FF66,0x0FF6F); (0x0FF71,0x0FF9D); (0x0FFA0,0x0FFBE); (0x0FFC2,0x0FFC7); (0x0FFCA,0x0FFCF); (0x0FFD2,0x0FFD7); (0x0FFDA,0x0FFDC); (0x10000,0x1000B); (0x1000D,0x10026); (0x10028,0x1003A); (0x1003C,0x1003D); (0x1003F,0x1004D); (0x10050,0x1005D); (0x10080,0x100FA); (0x10280,0x1029C); (0x102A0,0x102D0); (0x10300,0x1031F); (0x10330,0x10340); (0x10342,0x10349); (0x10350,0x10375); (0x10380,0x1039D); (0x103A0,0x103C3); (0x103C8,0x103CF); (0x10450,0x1049D); (0x10500,0x10527); (0x10530,0x10563); (0x10600,0x10736); (0x10740,0x10755); (0x10760,0x10767); (0x10800,0x10805); (0x10808,0x10808); (0x1080A,0x10835); (0x10837,0x10838); (0x1083C,0x1083C); (0x1083F,0x10855); (0x10860,0x10876); (0x10880,0x1089E); (0x108E0,0x108F2); (0x108F4,0x108F5); (0x10900,0x10915); (0x10920,0x10939); (0x10980,0x109B7); (0x109BE,0x109BF); (0x10A00,0x10A00); (0x10A10,0x10A13); (0x10A15,0x10A17); (0x10A19,0x10A33); (0x10A60,0x10A7C); (0x10A80,0x10A9C); (0x10AC0,0x10AC7); (0x10AC9,0x10AE4); (0x10B00,0x10B35); (0x10B40,0x10B55); (0x10B60,0x10B72); (0x10B80,0x10B91); (0x10C00,0x10C48); (0x11003,0x11037); (0x11083,0x110AF); (0x110D0,0x110E8); (0x11103,0x11126); (0x11150,0x11172); (0x11176,0x11176); (0x11183,0x111B2); (0x111C1,0x111C4); (0x111DA,0x111DA); (0x111DC,0x111DC); (0x11200,0x11211); (0x11213,0x1122B); (0x11280,0x11286); (0x11288,0x11288); (0x1128A,0x1128D); (0x1128F,0x1129D); (0x1129F,0x112A8); (0x112B0,0x112DE); (0x11305,0x1130C); (0x1130F,0x11310); (0x11313,0x11328); (0x1132A,0x11330); (0x11332,0x11333); (0x11335,0x11339); (0x1133D,0x1133D); (0x11350,0x11350); (0x1135D,0x11361); (0x11400,0x11434); (0x11447,0x1144A); (0x11480,0x114AF); (0x114C4,0x114C5); (0x114C7,0x114C7); (0x11580,0x115AE); (0x115D8,0x115DB); (0x11600,0x1162F); (0x11644,0x11644); (0x11680,0x116AA); (0x11700,0x11719); (0x118FF,0x118FF); (0x11AC0,0x11AF8); (0x11C00,0x11C08); (0x11C0A,0x11C2E); (0x11C40,0x11C40); (0x11C72,0x11C8F); (0x12000,0x12399); (0x12480,0x12543); (0x13000,0x1342E); (0x14400,0x14646); (0x16800,0x16A38); (0x16A40,0x16A5E); (0x16AD0,0x16AED); (0x16B00,0x16B2F); (0x16B63,0x16B77); (0x16B7D,0x16B8F); (0x16F00,0x16F44); (0x16F50,0x16F50); (0x17000,0x187EC); (0x18800,0x18AF2); (0x1B000,0x1B001); (0x1BC00,0x1BC6A); (0x1BC70,0x1BC7C); (0x1BC80,0x1BC88); (0x1BC90,0x1BC99); (0x1E800,0x1E8C4); (0x1EE00,0x1EE03); (0x1EE05,0x1EE1F); (0x1EE21,0x1EE22); (0x1EE24,0x1EE24); (0x1EE27,0x1EE27); (0x1EE29,0x1EE32); (0x1EE34,0x1EE37); (0x1EE39,0x1EE39); (0x1EE3B,0x1EE3B); (0x1EE42,0x1EE42); (0x1EE47,0x1EE47); (0x1EE49,0x1EE49); (0x1EE4B,0x1EE4B); (0x1EE4D,0x1EE4F); (0x1EE51,0x1EE52); (0x1EE54,0x1EE54); (0x1EE57,0x1EE57); (0x1EE59,0x1EE59); (0x1EE5B,0x1EE5B); (0x1EE5D,0x1EE5D); (0x1EE5F,0x1EE5F); (0x1EE61,0x1EE62); (0x1EE64,0x1EE64); (0x1EE67,0x1EE6A); (0x1EE6C,0x1EE72); (0x1EE74,0x1EE77); (0x1EE79,0x1EE7C); (0x1EE7E,0x1EE7E); (0x1EE80,0x1EE89); (0x1EE8B,0x1EE9B); (0x1EEA1,0x1EEA3); (0x1EEA5,0x1EEA9); (0x1EEAB,0x1EEBB); (0x20000,0x2A6D6); (0x2A700,0x2B734); (0x2B740,0x2B81D); (0x2B820,0x2CEA1) ] (* Punctuation, Connector *) let pc = [ (0x0005F,0x0005F); (0x0203F,0x02040); (0x02054,0x02054); (0x0FE33,0x0FE34); (0x0FE4D,0x0FE4F) ] (* Punctuation, Dash *) let pd = [ (0x0002D,0x0002D); (0x0058A,0x0058A); (0x005BE,0x005BE); (0x01400,0x01400); (0x01806,0x01806); (0x02010,0x02015); (0x02E17,0x02E17); (0x02E1A,0x02E1A); (0x02E3A,0x02E3B); (0x02E40,0x02E40); (0x0301C,0x0301C); (0x03030,0x03030); (0x030A0,0x030A0); (0x0FE31,0x0FE32); (0x0FE58,0x0FE58); (0x0FE63,0x0FE63) ] (* Punctuation, Open *) let ps = [ (0x00028,0x00028); (0x0005B,0x0005B); (0x0007B,0x0007B); (0x00F3A,0x00F3A); (0x00F3C,0x00F3C); (0x0169B,0x0169B); (0x0201A,0x0201A); (0x0201E,0x0201E); (0x02045,0x02045); (0x0207D,0x0207D); (0x0208D,0x0208D); (0x02308,0x02308); (0x0230A,0x0230A); (0x02329,0x02329); (0x02768,0x02768); (0x0276A,0x0276A); (0x0276C,0x0276C); (0x0276E,0x0276E); (0x02770,0x02770); (0x02772,0x02772); (0x02774,0x02774); (0x027C5,0x027C5); (0x027E6,0x027E6); (0x027E8,0x027E8); (0x027EA,0x027EA); (0x027EC,0x027EC); (0x027EE,0x027EE); (0x02983,0x02983); (0x02985,0x02985); (0x02987,0x02987); (0x02989,0x02989); (0x0298B,0x0298B); (0x0298D,0x0298D); (0x0298F,0x0298F); (0x02991,0x02991); (0x02993,0x02993); (0x02995,0x02995); (0x02997,0x02997); (0x029D8,0x029D8); (0x029DA,0x029DA); (0x029FC,0x029FC); (0x02E22,0x02E22); (0x02E24,0x02E24); (0x02E26,0x02E26); (0x02E28,0x02E28); (0x02E42,0x02E42); (0x03008,0x03008); (0x0300A,0x0300A); (0x0300C,0x0300C); (0x0300E,0x0300E); (0x03010,0x03010); (0x03014,0x03014); (0x03016,0x03016); (0x03018,0x03018); (0x0301A,0x0301A); (0x0301D,0x0301D); (0x0FD3F,0x0FD3F); (0x0FE17,0x0FE17); (0x0FE35,0x0FE35); (0x0FE37,0x0FE37); (0x0FE39,0x0FE39); (0x0FE3B,0x0FE3B); (0x0FE3D,0x0FE3D); (0x0FE3F,0x0FE3F); (0x0FE41,0x0FE41); (0x0FE43,0x0FE43); (0x0FE47,0x0FE47); (0x0FE59,0x0FE59); (0x0FE5B,0x0FE5B); (0x0FE5D,0x0FE5D); (0x0FF08,0x0FF08); (0x0FF3B,0x0FF3B); (0x0FF5B,0x0FF5B); (0x0FF5F,0x0FF5F) ] (* Punctuation, Close *) let pe = [ (0x00029,0x00029); (0x0005D,0x0005D); (0x0007D,0x0007D); (0x00F3B,0x00F3B); (0x00F3D,0x00F3D); (0x0169C,0x0169C); (0x02046,0x02046); (0x0207E,0x0207E); (0x0208E,0x0208E); (0x02309,0x02309); (0x0230B,0x0230B); (0x0232A,0x0232A); (0x02769,0x02769); (0x0276B,0x0276B); (0x0276D,0x0276D); (0x0276F,0x0276F); (0x02771,0x02771); (0x02773,0x02773); (0x02775,0x02775); (0x027C6,0x027C6); (0x027E7,0x027E7); (0x027E9,0x027E9); (0x027EB,0x027EB); (0x027ED,0x027ED); (0x027EF,0x027EF); (0x02984,0x02984); (0x02986,0x02986); (0x02988,0x02988); (0x0298A,0x0298A); (0x0298C,0x0298C); (0x0298E,0x0298E); (0x02990,0x02990); (0x02992,0x02992); (0x02994,0x02994); (0x02996,0x02996); (0x02998,0x02998); (0x029D9,0x029D9); (0x029DB,0x029DB); (0x029FD,0x029FD); (0x02E23,0x02E23); (0x02E25,0x02E25); (0x02E27,0x02E27); (0x02E29,0x02E29); (0x03009,0x03009); (0x0300B,0x0300B); (0x0300D,0x0300D); (0x0300F,0x0300F); (0x03011,0x03011); (0x03015,0x03015); (0x03017,0x03017); (0x03019,0x03019); (0x0301B,0x0301B); (0x0301E,0x0301F); (0x0FD3E,0x0FD3E); (0x0FE18,0x0FE18); (0x0FE36,0x0FE36); (0x0FE38,0x0FE38); (0x0FE3A,0x0FE3A); (0x0FE3C,0x0FE3C); (0x0FE3E,0x0FE3E); (0x0FE40,0x0FE40); (0x0FE42,0x0FE42); (0x0FE44,0x0FE44); (0x0FE48,0x0FE48); (0x0FE5A,0x0FE5A); (0x0FE5C,0x0FE5C); (0x0FE5E,0x0FE5E); (0x0FF09,0x0FF09); (0x0FF3D,0x0FF3D); (0x0FF5D,0x0FF5D); (0x0FF60,0x0FF60) ] (* Punctuation, Initial quote *) let pi = [ (0x000AB,0x000AB); (0x02018,0x02018); (0x0201B,0x0201C); (0x0201F,0x0201F); (0x02039,0x02039); (0x02E02,0x02E02); (0x02E04,0x02E04); (0x02E09,0x02E09); (0x02E0C,0x02E0C); (0x02E1C,0x02E1C) ] (* Punctuation, Final quote *) let pf = [ (0x000BB,0x000BB); (0x02019,0x02019); (0x0201D,0x0201D); (0x0203A,0x0203A); (0x02E03,0x02E03); (0x02E05,0x02E05); (0x02E0A,0x02E0A); (0x02E0D,0x02E0D); (0x02E1D,0x02E1D) ] (* Punctuation, Other *) let po = [ (0x00021,0x00023); (0x00025,0x00027); (0x0002A,0x0002A); (0x0002C,0x0002C); (0x0002E,0x0002F); (0x0003A,0x0003B); (0x0003F,0x00040); (0x0005C,0x0005C); (0x000A1,0x000A1); (0x000A7,0x000A7); (0x000B6,0x000B7); (0x000BF,0x000BF); (0x0037E,0x0037E); (0x00387,0x00387); (0x0055A,0x0055F); (0x00589,0x00589); (0x005C0,0x005C0); (0x005C3,0x005C3); (0x005C6,0x005C6); (0x005F3,0x005F4); (0x00609,0x0060A); (0x0060C,0x0060D); (0x0061B,0x0061B); (0x0061E,0x0061F); (0x0066A,0x0066D); (0x006D4,0x006D4); (0x00700,0x0070D); (0x007F7,0x007F9); (0x00830,0x0083E); (0x0085E,0x0085E); (0x00964,0x00965); (0x00970,0x00970); (0x00AF0,0x00AF0); (0x00DF4,0x00DF4); (0x00E4F,0x00E4F); (0x00E5A,0x00E5B); (0x00F04,0x00F12); (0x00F14,0x00F14); (0x00F85,0x00F85); (0x00FD0,0x00FD4); (0x00FD9,0x00FDA); (0x0104A,0x0104F); (0x010FB,0x010FB); (0x01360,0x01368); (0x0166D,0x0166E); (0x016EB,0x016ED); (0x01735,0x01736); (0x017D4,0x017D6); (0x017D8,0x017DA); (0x01800,0x01805); (0x01807,0x0180A); (0x01944,0x01945); (0x01A1E,0x01A1F); (0x01AA0,0x01AA6); (0x01AA8,0x01AAD); (0x01B5A,0x01B60); (0x01BFC,0x01BFF); (0x01C3B,0x01C3F); (0x01C7E,0x01C7F); (0x01CC0,0x01CC7); (0x01CD3,0x01CD3); (0x02016,0x02017); (0x02020,0x02027); (0x02030,0x02038); (0x0203B,0x0203E); (0x02041,0x02043); (0x02047,0x02051); (0x02053,0x02053); (0x02055,0x0205E); (0x02CF9,0x02CFC); (0x02CFE,0x02CFF); (0x02D70,0x02D70); (0x02E00,0x02E01); (0x02E06,0x02E08); (0x02E0B,0x02E0B); (0x02E0E,0x02E16); (0x02E18,0x02E19); (0x02E1B,0x02E1B); (0x02E1E,0x02E1F); (0x02E2A,0x02E2E); (0x02E30,0x02E39); (0x02E3C,0x02E3F); (0x02E41,0x02E41); (0x02E43,0x02E44); (0x03001,0x03003); (0x0303D,0x0303D); (0x030FB,0x030FB); (0x0A4FE,0x0A4FF); (0x0A60D,0x0A60F); (0x0A673,0x0A673); (0x0A67E,0x0A67E); (0x0A6F2,0x0A6F7); (0x0A874,0x0A877); (0x0A8CE,0x0A8CF); (0x0A8F8,0x0A8FA); (0x0A8FC,0x0A8FC); (0x0A92E,0x0A92F); (0x0A95F,0x0A95F); (0x0A9C1,0x0A9CD); (0x0A9DE,0x0A9DF); (0x0AA5C,0x0AA5F); (0x0AADE,0x0AADF); (0x0AAF0,0x0AAF1); (0x0ABEB,0x0ABEB); (0x0FE10,0x0FE16); (0x0FE19,0x0FE19); (0x0FE30,0x0FE30); (0x0FE45,0x0FE46); (0x0FE49,0x0FE4C); (0x0FE50,0x0FE52); (0x0FE54,0x0FE57); (0x0FE5F,0x0FE61); (0x0FE68,0x0FE68); (0x0FE6A,0x0FE6B); (0x0FF01,0x0FF03); (0x0FF05,0x0FF07); (0x0FF0A,0x0FF0A); (0x0FF0C,0x0FF0C); (0x0FF0E,0x0FF0F); (0x0FF1A,0x0FF1B); (0x0FF1F,0x0FF20); (0x0FF3C,0x0FF3C); (0x0FF61,0x0FF61); (0x0FF64,0x0FF65); (0x10100,0x10102); (0x1039F,0x1039F); (0x103D0,0x103D0); (0x1056F,0x1056F); (0x10857,0x10857); (0x1091F,0x1091F); (0x1093F,0x1093F); (0x10A50,0x10A58); (0x10A7F,0x10A7F); (0x10AF0,0x10AF6); (0x10B39,0x10B3F); (0x10B99,0x10B9C); (0x11047,0x1104D); (0x110BB,0x110BC); (0x110BE,0x110C1); (0x11140,0x11143); (0x11174,0x11175); (0x111C5,0x111C9); (0x111CD,0x111CD); (0x111DB,0x111DB); (0x111DD,0x111DF); (0x11238,0x1123D); (0x112A9,0x112A9); (0x1144B,0x1144F); (0x1145B,0x1145B); (0x1145D,0x1145D); (0x114C6,0x114C6); (0x115C1,0x115D7); (0x11641,0x11643); (0x11660,0x1166C); (0x1173C,0x1173E); (0x11C41,0x11C45); (0x11C70,0x11C71); (0x12470,0x12474); (0x16A6E,0x16A6F); (0x16AF5,0x16AF5); (0x16B37,0x16B3B); (0x16B44,0x16B44); (0x1BC9F,0x1BC9F); (0x1DA87,0x1DA8B) ] (* Symbol, Math *) let sm = [ (0x0002B,0x0002B); (0x0003C,0x0003E); (0x0007C,0x0007C); (0x0007E,0x0007E); (0x000AC,0x000AC); (0x000B1,0x000B1); (0x000D7,0x000D7); (0x000F7,0x000F7); (0x003F6,0x003F6); (0x00606,0x00608); (0x02044,0x02044); (0x02052,0x02052); (0x0207A,0x0207C); (0x0208A,0x0208C); (0x02118,0x02118); (0x02140,0x02144); (0x0214B,0x0214B); (0x02190,0x02194); (0x0219A,0x0219B); (0x021A0,0x021A0); (0x021A3,0x021A3); (0x021A6,0x021A6); (0x021AE,0x021AE); (0x021CE,0x021CF); (0x021D2,0x021D2); (0x021D4,0x021D4); (0x021F4,0x022FF); (0x02320,0x02321); (0x0237C,0x0237C); (0x0239B,0x023B3); (0x023DC,0x023E1); (0x025B7,0x025B7); (0x025C1,0x025C1); (0x025F8,0x025FF); (0x0266F,0x0266F); (0x027C0,0x027C4); (0x027C7,0x027E5); (0x027F0,0x027FF); (0x02900,0x02982); (0x02999,0x029D7); (0x029DC,0x029FB); (0x029FE,0x02AFF); (0x02B30,0x02B44); (0x02B47,0x02B4C); (0x0FB29,0x0FB29); (0x0FE62,0x0FE62); (0x0FE64,0x0FE66); (0x0FF0B,0x0FF0B); (0x0FF1C,0x0FF1E); (0x0FF5C,0x0FF5C); (0x0FF5E,0x0FF5E); (0x0FFE2,0x0FFE2); (0x0FFE9,0x0FFEC); (0x1D6C1,0x1D6C1); (0x1D6DB,0x1D6DB); (0x1D6FB,0x1D6FB); (0x1D715,0x1D715); (0x1D735,0x1D735); (0x1D74F,0x1D74F); (0x1D76F,0x1D76F); (0x1D789,0x1D789); (0x1D7A9,0x1D7A9); (0x1D7C3,0x1D7C3) ] (* Symbol, Currency *) let sc = [ (0x00024,0x00024); (0x000A2,0x000A5); (0x0058F,0x0058F); (0x0060B,0x0060B); (0x009F2,0x009F3); (0x009FB,0x009FB); (0x00AF1,0x00AF1); (0x00BF9,0x00BF9); (0x00E3F,0x00E3F); (0x017DB,0x017DB); (0x020A0,0x020BE); (0x0A838,0x0A838); (0x0FDFC,0x0FDFC); (0x0FE69,0x0FE69); (0x0FF04,0x0FF04); (0x0FFE0,0x0FFE1) ] (* Symbol, Modifier *) let sk = [ (0x0005E,0x0005E); (0x00060,0x00060); (0x000A8,0x000A8); (0x000AF,0x000AF); (0x000B4,0x000B4); (0x000B8,0x000B8); (0x002C2,0x002C5); (0x002D2,0x002DF); (0x002E5,0x002EB); (0x002ED,0x002ED); (0x002EF,0x002FF); (0x00375,0x00375); (0x00384,0x00385); (0x01FBD,0x01FBD); (0x01FBF,0x01FC1); (0x01FCD,0x01FCF); (0x01FDD,0x01FDF); (0x01FED,0x01FEF); (0x01FFD,0x01FFE); (0x0309B,0x0309C); (0x0A700,0x0A716); (0x0A720,0x0A721); (0x0A789,0x0A78A); (0x0AB5B,0x0AB5B); (0x0FBB2,0x0FBC1); (0x0FF3E,0x0FF3E); (0x0FF40,0x0FF40); (0x0FFE3,0x0FFE3) ] (* Symbol, Other *) let so = [ (0x000A6,0x000A6); (0x000A9,0x000A9); (0x000AE,0x000AE); (0x000B0,0x000B0); (0x00482,0x00482); (0x0058D,0x0058E); (0x0060E,0x0060F); (0x006DE,0x006DE); (0x006E9,0x006E9); (0x006FD,0x006FE); (0x007F6,0x007F6); (0x009FA,0x009FA); (0x00B70,0x00B70); (0x00BF3,0x00BF8); (0x00BFA,0x00BFA); (0x00C7F,0x00C7F); (0x00D4F,0x00D4F); (0x00D79,0x00D79); (0x00F01,0x00F03); (0x00F13,0x00F13); (0x00F15,0x00F17); (0x00F1A,0x00F1F); (0x00F34,0x00F34); (0x00F36,0x00F36); (0x00F38,0x00F38); (0x00FBE,0x00FC5); (0x00FC7,0x00FCC); (0x00FCE,0x00FCF); (0x00FD5,0x00FD8); (0x0109E,0x0109F); (0x01390,0x01399); (0x01940,0x01940); (0x019DE,0x019FF); (0x01B61,0x01B6A); (0x01B74,0x01B7C); (0x02100,0x02101); (0x02103,0x02106); (0x02108,0x02109); (0x02114,0x02114); (0x02116,0x02117); (0x0211E,0x02123); (0x02125,0x02125); (0x02127,0x02127); (0x02129,0x02129); (0x0212E,0x0212E); (0x0213A,0x0213B); (0x0214A,0x0214A); (0x0214C,0x0214D); (0x0214F,0x0214F); (0x0218A,0x0218B); (0x02195,0x02199); (0x0219C,0x0219F); (0x021A1,0x021A2); (0x021A4,0x021A5); (0x021A7,0x021AD); (0x021AF,0x021CD); (0x021D0,0x021D1); (0x021D3,0x021D3); (0x021D5,0x021F3); (0x02300,0x02307); (0x0230C,0x0231F); (0x02322,0x02328); (0x0232B,0x0237B); (0x0237D,0x0239A); (0x023B4,0x023DB); (0x023E2,0x023FE); (0x02400,0x02426); (0x02440,0x0244A); (0x0249C,0x024E9); (0x02500,0x025B6); (0x025B8,0x025C0); (0x025C2,0x025F7); (0x02600,0x0266E); (0x02670,0x02767); (0x02794,0x027BF); (0x02800,0x028FF); (0x02B00,0x02B2F); (0x02B45,0x02B46); (0x02B4D,0x02B73); (0x02B76,0x02B95); (0x02B98,0x02BB9); (0x02BBD,0x02BC8); (0x02BCA,0x02BD1); (0x02BEC,0x02BEF); (0x02CE5,0x02CEA); (0x02E80,0x02E99); (0x02E9B,0x02EF3); (0x02F00,0x02FD5); (0x02FF0,0x02FFB); (0x03004,0x03004); (0x03012,0x03013); (0x03020,0x03020); (0x03036,0x03037); (0x0303E,0x0303F); (0x03190,0x03191); (0x03196,0x0319F); (0x031C0,0x031E3); (0x03200,0x0321E); (0x0322A,0x03247); (0x03250,0x03250); (0x03260,0x0327F); (0x0328A,0x032B0); (0x032C0,0x032FE); (0x03300,0x033FF); (0x04DC0,0x04DFF); (0x0A490,0x0A4C6); (0x0A828,0x0A82B); (0x0A836,0x0A837); (0x0A839,0x0A839); (0x0AA77,0x0AA79); (0x0FDFD,0x0FDFD); (0x0FFE4,0x0FFE4); (0x0FFE8,0x0FFE8); (0x0FFED,0x0FFEE); (0x0FFFC,0x0FFFD); (0x10137,0x1013F); (0x10179,0x10189); (0x1018C,0x1018E); (0x10190,0x1019B); (0x101A0,0x101A0); (0x101D0,0x101FC); (0x10877,0x10878); (0x10AC8,0x10AC8); (0x1173F,0x1173F); (0x16B3C,0x16B3F); (0x16B45,0x16B45); (0x1BC9C,0x1BC9C); (0x1D000,0x1D0F5); (0x1D100,0x1D126); (0x1D129,0x1D164); (0x1D16A,0x1D16C); (0x1D183,0x1D184); (0x1D18C,0x1D1A9); (0x1D1AE,0x1D1E8); (0x1D200,0x1D241); (0x1D245,0x1D245); (0x1D300,0x1D356); (0x1D800,0x1D9FF); (0x1DA37,0x1DA3A); (0x1DA6D,0x1DA74); (0x1DA76,0x1DA83); (0x1DA85,0x1DA86); (0x1F000,0x1F02B); (0x1F030,0x1F093); (0x1F0A0,0x1F0AE); (0x1F0B1,0x1F0BF); (0x1F0C1,0x1F0CF); (0x1F0D1,0x1F0F5); (0x1F110,0x1F12E); (0x1F130,0x1F16B); (0x1F170,0x1F1AC); (0x1F1E6,0x1F202); (0x1F210,0x1F23B); (0x1F240,0x1F248); (0x1F250,0x1F251); (0x1F300,0x1F3FA); (0x1F400,0x1F6D2); (0x1F6E0,0x1F6EC); (0x1F6F0,0x1F6F6); (0x1F700,0x1F773); (0x1F780,0x1F7D4); (0x1F800,0x1F80B); (0x1F810,0x1F847); (0x1F850,0x1F859); (0x1F860,0x1F887); (0x1F890,0x1F8AD); (0x1F910,0x1F91E); (0x1F920,0x1F927); (0x1F930,0x1F930); (0x1F933,0x1F93E); (0x1F940,0x1F94B); (0x1F950,0x1F95E); (0x1F980,0x1F991) ] let to_lower = [ (0x00041,0x0005A), `Delta (32); (0x000C0,0x000D6), `Delta (32); (0x000D8,0x000DE), `Delta (32); (0x00100,0x00100), `Abs (0x00101); (0x00102,0x00102), `Abs (0x00103); (0x00104,0x00104), `Abs (0x00105); (0x00106,0x00106), `Abs (0x00107); (0x00108,0x00108), `Abs (0x00109); (0x0010A,0x0010A), `Abs (0x0010B); (0x0010C,0x0010C), `Abs (0x0010D); (0x0010E,0x0010E), `Abs (0x0010F); (0x00110,0x00110), `Abs (0x00111); (0x00112,0x00112), `Abs (0x00113); (0x00114,0x00114), `Abs (0x00115); (0x00116,0x00116), `Abs (0x00117); (0x00118,0x00118), `Abs (0x00119); (0x0011A,0x0011A), `Abs (0x0011B); (0x0011C,0x0011C), `Abs (0x0011D); (0x0011E,0x0011E), `Abs (0x0011F); (0x00120,0x00120), `Abs (0x00121); (0x00122,0x00122), `Abs (0x00123); (0x00124,0x00124), `Abs (0x00125); (0x00126,0x00126), `Abs (0x00127); (0x00128,0x00128), `Abs (0x00129); (0x0012A,0x0012A), `Abs (0x0012B); (0x0012C,0x0012C), `Abs (0x0012D); (0x0012E,0x0012E), `Abs (0x0012F); (0x00130,0x00130), `Abs (0x00069); (0x00132,0x00132), `Abs (0x00133); (0x00134,0x00134), `Abs (0x00135); (0x00136,0x00136), `Abs (0x00137); (0x00139,0x00139), `Abs (0x0013A); (0x0013B,0x0013B), `Abs (0x0013C); (0x0013D,0x0013D), `Abs (0x0013E); (0x0013F,0x0013F), `Abs (0x00140); (0x00141,0x00141), `Abs (0x00142); (0x00143,0x00143), `Abs (0x00144); (0x00145,0x00145), `Abs (0x00146); (0x00147,0x00147), `Abs (0x00148); (0x0014A,0x0014A), `Abs (0x0014B); (0x0014C,0x0014C), `Abs (0x0014D); (0x0014E,0x0014E), `Abs (0x0014F); (0x00150,0x00150), `Abs (0x00151); (0x00152,0x00152), `Abs (0x00153); (0x00154,0x00154), `Abs (0x00155); (0x00156,0x00156), `Abs (0x00157); (0x00158,0x00158), `Abs (0x00159); (0x0015A,0x0015A), `Abs (0x0015B); (0x0015C,0x0015C), `Abs (0x0015D); (0x0015E,0x0015E), `Abs (0x0015F); (0x00160,0x00160), `Abs (0x00161); (0x00162,0x00162), `Abs (0x00163); (0x00164,0x00164), `Abs (0x00165); (0x00166,0x00166), `Abs (0x00167); (0x00168,0x00168), `Abs (0x00169); (0x0016A,0x0016A), `Abs (0x0016B); (0x0016C,0x0016C), `Abs (0x0016D); (0x0016E,0x0016E), `Abs (0x0016F); (0x00170,0x00170), `Abs (0x00171); (0x00172,0x00172), `Abs (0x00173); (0x00174,0x00174), `Abs (0x00175); (0x00176,0x00176), `Abs (0x00177); (0x00178,0x00178), `Abs (0x000FF); (0x00179,0x00179), `Abs (0x0017A); (0x0017B,0x0017B), `Abs (0x0017C); (0x0017D,0x0017D), `Abs (0x0017E); (0x00181,0x00181), `Abs (0x00253); (0x00182,0x00182), `Abs (0x00183); (0x00184,0x00184), `Abs (0x00185); (0x00186,0x00186), `Abs (0x00254); (0x00187,0x00187), `Abs (0x00188); (0x00189,0x0018A), `Delta (205); (0x0018B,0x0018B), `Abs (0x0018C); (0x0018E,0x0018E), `Abs (0x001DD); (0x0018F,0x0018F), `Abs (0x00259); (0x00190,0x00190), `Abs (0x0025B); (0x00191,0x00191), `Abs (0x00192); (0x00193,0x00193), `Abs (0x00260); (0x00194,0x00194), `Abs (0x00263); (0x00196,0x00196), `Abs (0x00269); (0x00197,0x00197), `Abs (0x00268); (0x00198,0x00198), `Abs (0x00199); (0x0019C,0x0019C), `Abs (0x0026F); (0x0019D,0x0019D), `Abs (0x00272); (0x0019F,0x0019F), `Abs (0x00275); (0x001A0,0x001A0), `Abs (0x001A1); (0x001A2,0x001A2), `Abs (0x001A3); (0x001A4,0x001A4), `Abs (0x001A5); (0x001A6,0x001A6), `Abs (0x00280); (0x001A7,0x001A7), `Abs (0x001A8); (0x001A9,0x001A9), `Abs (0x00283); (0x001AC,0x001AC), `Abs (0x001AD); (0x001AE,0x001AE), `Abs (0x00288); (0x001AF,0x001AF), `Abs (0x001B0); (0x001B1,0x001B2), `Delta (217); (0x001B3,0x001B3), `Abs (0x001B4); (0x001B5,0x001B5), `Abs (0x001B6); (0x001B7,0x001B7), `Abs (0x00292); (0x001B8,0x001B8), `Abs (0x001B9); (0x001BC,0x001BC), `Abs (0x001BD); (0x001C4,0x001C4), `Abs (0x001C6); (0x001C7,0x001C7), `Abs (0x001C9); (0x001CA,0x001CA), `Abs (0x001CC); (0x001CD,0x001CD), `Abs (0x001CE); (0x001CF,0x001CF), `Abs (0x001D0); (0x001D1,0x001D1), `Abs (0x001D2); (0x001D3,0x001D3), `Abs (0x001D4); (0x001D5,0x001D5), `Abs (0x001D6); (0x001D7,0x001D7), `Abs (0x001D8); (0x001D9,0x001D9), `Abs (0x001DA); (0x001DB,0x001DB), `Abs (0x001DC); (0x001DE,0x001DE), `Abs (0x001DF); (0x001E0,0x001E0), `Abs (0x001E1); (0x001E2,0x001E2), `Abs (0x001E3); (0x001E4,0x001E4), `Abs (0x001E5); (0x001E6,0x001E6), `Abs (0x001E7); (0x001E8,0x001E8), `Abs (0x001E9); (0x001EA,0x001EA), `Abs (0x001EB); (0x001EC,0x001EC), `Abs (0x001ED); (0x001EE,0x001EE), `Abs (0x001EF); (0x001F1,0x001F1), `Abs (0x001F3); (0x001F4,0x001F4), `Abs (0x001F5); (0x001F6,0x001F6), `Abs (0x00195); (0x001F7,0x001F7), `Abs (0x001BF); (0x001F8,0x001F8), `Abs (0x001F9); (0x001FA,0x001FA), `Abs (0x001FB); (0x001FC,0x001FC), `Abs (0x001FD); (0x001FE,0x001FE), `Abs (0x001FF); (0x00200,0x00200), `Abs (0x00201); (0x00202,0x00202), `Abs (0x00203); (0x00204,0x00204), `Abs (0x00205); (0x00206,0x00206), `Abs (0x00207); (0x00208,0x00208), `Abs (0x00209); (0x0020A,0x0020A), `Abs (0x0020B); (0x0020C,0x0020C), `Abs (0x0020D); (0x0020E,0x0020E), `Abs (0x0020F); (0x00210,0x00210), `Abs (0x00211); (0x00212,0x00212), `Abs (0x00213); (0x00214,0x00214), `Abs (0x00215); (0x00216,0x00216), `Abs (0x00217); (0x00218,0x00218), `Abs (0x00219); (0x0021A,0x0021A), `Abs (0x0021B); (0x0021C,0x0021C), `Abs (0x0021D); (0x0021E,0x0021E), `Abs (0x0021F); (0x00220,0x00220), `Abs (0x0019E); (0x00222,0x00222), `Abs (0x00223); (0x00224,0x00224), `Abs (0x00225); (0x00226,0x00226), `Abs (0x00227); (0x00228,0x00228), `Abs (0x00229); (0x0022A,0x0022A), `Abs (0x0022B); (0x0022C,0x0022C), `Abs (0x0022D); (0x0022E,0x0022E), `Abs (0x0022F); (0x00230,0x00230), `Abs (0x00231); (0x00232,0x00232), `Abs (0x00233); (0x0023A,0x0023A), `Abs (0x02C65); (0x0023B,0x0023B), `Abs (0x0023C); (0x0023D,0x0023D), `Abs (0x0019A); (0x0023E,0x0023E), `Abs (0x02C66); (0x00241,0x00241), `Abs (0x00242); (0x00243,0x00243), `Abs (0x00180); (0x00244,0x00244), `Abs (0x00289); (0x00245,0x00245), `Abs (0x0028C); (0x00246,0x00246), `Abs (0x00247); (0x00248,0x00248), `Abs (0x00249); (0x0024A,0x0024A), `Abs (0x0024B); (0x0024C,0x0024C), `Abs (0x0024D); (0x0024E,0x0024E), `Abs (0x0024F); (0x00370,0x00370), `Abs (0x00371); (0x00372,0x00372), `Abs (0x00373); (0x00376,0x00376), `Abs (0x00377); (0x0037F,0x0037F), `Abs (0x003F3); (0x00386,0x00386), `Abs (0x003AC); (0x00388,0x0038A), `Delta (37); (0x0038C,0x0038C), `Abs (0x003CC); (0x0038E,0x0038F), `Delta (63); (0x00391,0x003A1), `Delta (32); (0x003A3,0x003AB), `Delta (32); (0x003CF,0x003CF), `Abs (0x003D7); (0x003D2,0x003D4), `Delta (0); (0x003D8,0x003D8), `Abs (0x003D9); (0x003DA,0x003DA), `Abs (0x003DB); (0x003DC,0x003DC), `Abs (0x003DD); (0x003DE,0x003DE), `Abs (0x003DF); (0x003E0,0x003E0), `Abs (0x003E1); (0x003E2,0x003E2), `Abs (0x003E3); (0x003E4,0x003E4), `Abs (0x003E5); (0x003E6,0x003E6), `Abs (0x003E7); (0x003E8,0x003E8), `Abs (0x003E9); (0x003EA,0x003EA), `Abs (0x003EB); (0x003EC,0x003EC), `Abs (0x003ED); (0x003EE,0x003EE), `Abs (0x003EF); (0x003F4,0x003F4), `Abs (0x003B8); (0x003F7,0x003F7), `Abs (0x003F8); (0x003F9,0x003F9), `Abs (0x003F2); (0x003FA,0x003FA), `Abs (0x003FB); (0x003FD,0x003FF), `Delta (-130); (0x00400,0x0040F), `Delta (80); (0x00410,0x0042F), `Delta (32); (0x00460,0x00460), `Abs (0x00461); (0x00462,0x00462), `Abs (0x00463); (0x00464,0x00464), `Abs (0x00465); (0x00466,0x00466), `Abs (0x00467); (0x00468,0x00468), `Abs (0x00469); (0x0046A,0x0046A), `Abs (0x0046B); (0x0046C,0x0046C), `Abs (0x0046D); (0x0046E,0x0046E), `Abs (0x0046F); (0x00470,0x00470), `Abs (0x00471); (0x00472,0x00472), `Abs (0x00473); (0x00474,0x00474), `Abs (0x00475); (0x00476,0x00476), `Abs (0x00477); (0x00478,0x00478), `Abs (0x00479); (0x0047A,0x0047A), `Abs (0x0047B); (0x0047C,0x0047C), `Abs (0x0047D); (0x0047E,0x0047E), `Abs (0x0047F); (0x00480,0x00480), `Abs (0x00481); (0x0048A,0x0048A), `Abs (0x0048B); (0x0048C,0x0048C), `Abs (0x0048D); (0x0048E,0x0048E), `Abs (0x0048F); (0x00490,0x00490), `Abs (0x00491); (0x00492,0x00492), `Abs (0x00493); (0x00494,0x00494), `Abs (0x00495); (0x00496,0x00496), `Abs (0x00497); (0x00498,0x00498), `Abs (0x00499); (0x0049A,0x0049A), `Abs (0x0049B); (0x0049C,0x0049C), `Abs (0x0049D); (0x0049E,0x0049E), `Abs (0x0049F); (0x004A0,0x004A0), `Abs (0x004A1); (0x004A2,0x004A2), `Abs (0x004A3); (0x004A4,0x004A4), `Abs (0x004A5); (0x004A6,0x004A6), `Abs (0x004A7); (0x004A8,0x004A8), `Abs (0x004A9); (0x004AA,0x004AA), `Abs (0x004AB); (0x004AC,0x004AC), `Abs (0x004AD); (0x004AE,0x004AE), `Abs (0x004AF); (0x004B0,0x004B0), `Abs (0x004B1); (0x004B2,0x004B2), `Abs (0x004B3); (0x004B4,0x004B4), `Abs (0x004B5); (0x004B6,0x004B6), `Abs (0x004B7); (0x004B8,0x004B8), `Abs (0x004B9); (0x004BA,0x004BA), `Abs (0x004BB); (0x004BC,0x004BC), `Abs (0x004BD); (0x004BE,0x004BE), `Abs (0x004BF); (0x004C0,0x004C0), `Abs (0x004CF); (0x004C1,0x004C1), `Abs (0x004C2); (0x004C3,0x004C3), `Abs (0x004C4); (0x004C5,0x004C5), `Abs (0x004C6); (0x004C7,0x004C7), `Abs (0x004C8); (0x004C9,0x004C9), `Abs (0x004CA); (0x004CB,0x004CB), `Abs (0x004CC); (0x004CD,0x004CD), `Abs (0x004CE); (0x004D0,0x004D0), `Abs (0x004D1); (0x004D2,0x004D2), `Abs (0x004D3); (0x004D4,0x004D4), `Abs (0x004D5); (0x004D6,0x004D6), `Abs (0x004D7); (0x004D8,0x004D8), `Abs (0x004D9); (0x004DA,0x004DA), `Abs (0x004DB); (0x004DC,0x004DC), `Abs (0x004DD); (0x004DE,0x004DE), `Abs (0x004DF); (0x004E0,0x004E0), `Abs (0x004E1); (0x004E2,0x004E2), `Abs (0x004E3); (0x004E4,0x004E4), `Abs (0x004E5); (0x004E6,0x004E6), `Abs (0x004E7); (0x004E8,0x004E8), `Abs (0x004E9); (0x004EA,0x004EA), `Abs (0x004EB); (0x004EC,0x004EC), `Abs (0x004ED); (0x004EE,0x004EE), `Abs (0x004EF); (0x004F0,0x004F0), `Abs (0x004F1); (0x004F2,0x004F2), `Abs (0x004F3); (0x004F4,0x004F4), `Abs (0x004F5); (0x004F6,0x004F6), `Abs (0x004F7); (0x004F8,0x004F8), `Abs (0x004F9); (0x004FA,0x004FA), `Abs (0x004FB); (0x004FC,0x004FC), `Abs (0x004FD); (0x004FE,0x004FE), `Abs (0x004FF); (0x00500,0x00500), `Abs (0x00501); (0x00502,0x00502), `Abs (0x00503); (0x00504,0x00504), `Abs (0x00505); (0x00506,0x00506), `Abs (0x00507); (0x00508,0x00508), `Abs (0x00509); (0x0050A,0x0050A), `Abs (0x0050B); (0x0050C,0x0050C), `Abs (0x0050D); (0x0050E,0x0050E), `Abs (0x0050F); (0x00510,0x00510), `Abs (0x00511); (0x00512,0x00512), `Abs (0x00513); (0x00514,0x00514), `Abs (0x00515); (0x00516,0x00516), `Abs (0x00517); (0x00518,0x00518), `Abs (0x00519); (0x0051A,0x0051A), `Abs (0x0051B); (0x0051C,0x0051C), `Abs (0x0051D); (0x0051E,0x0051E), `Abs (0x0051F); (0x00520,0x00520), `Abs (0x00521); (0x00522,0x00522), `Abs (0x00523); (0x00524,0x00524), `Abs (0x00525); (0x00526,0x00526), `Abs (0x00527); (0x00528,0x00528), `Abs (0x00529); (0x0052A,0x0052A), `Abs (0x0052B); (0x0052C,0x0052C), `Abs (0x0052D); (0x0052E,0x0052E), `Abs (0x0052F); (0x00531,0x00556), `Delta (48); (0x010A0,0x010C5), `Delta (7264); (0x010C7,0x010C7), `Abs (0x02D27); (0x010CD,0x010CD), `Abs (0x02D2D); (0x013A0,0x013EF), `Delta (38864); (0x013F0,0x013F5), `Delta (8); (0x01E00,0x01E00), `Abs (0x01E01); (0x01E02,0x01E02), `Abs (0x01E03); (0x01E04,0x01E04), `Abs (0x01E05); (0x01E06,0x01E06), `Abs (0x01E07); (0x01E08,0x01E08), `Abs (0x01E09); (0x01E0A,0x01E0A), `Abs (0x01E0B); (0x01E0C,0x01E0C), `Abs (0x01E0D); (0x01E0E,0x01E0E), `Abs (0x01E0F); (0x01E10,0x01E10), `Abs (0x01E11); (0x01E12,0x01E12), `Abs (0x01E13); (0x01E14,0x01E14), `Abs (0x01E15); (0x01E16,0x01E16), `Abs (0x01E17); (0x01E18,0x01E18), `Abs (0x01E19); (0x01E1A,0x01E1A), `Abs (0x01E1B); (0x01E1C,0x01E1C), `Abs (0x01E1D); (0x01E1E,0x01E1E), `Abs (0x01E1F); (0x01E20,0x01E20), `Abs (0x01E21); (0x01E22,0x01E22), `Abs (0x01E23); (0x01E24,0x01E24), `Abs (0x01E25); (0x01E26,0x01E26), `Abs (0x01E27); (0x01E28,0x01E28), `Abs (0x01E29); (0x01E2A,0x01E2A), `Abs (0x01E2B); (0x01E2C,0x01E2C), `Abs (0x01E2D); (0x01E2E,0x01E2E), `Abs (0x01E2F); (0x01E30,0x01E30), `Abs (0x01E31); (0x01E32,0x01E32), `Abs (0x01E33); (0x01E34,0x01E34), `Abs (0x01E35); (0x01E36,0x01E36), `Abs (0x01E37); (0x01E38,0x01E38), `Abs (0x01E39); (0x01E3A,0x01E3A), `Abs (0x01E3B); (0x01E3C,0x01E3C), `Abs (0x01E3D); (0x01E3E,0x01E3E), `Abs (0x01E3F); (0x01E40,0x01E40), `Abs (0x01E41); (0x01E42,0x01E42), `Abs (0x01E43); (0x01E44,0x01E44), `Abs (0x01E45); (0x01E46,0x01E46), `Abs (0x01E47); (0x01E48,0x01E48), `Abs (0x01E49); (0x01E4A,0x01E4A), `Abs (0x01E4B); (0x01E4C,0x01E4C), `Abs (0x01E4D); (0x01E4E,0x01E4E), `Abs (0x01E4F); (0x01E50,0x01E50), `Abs (0x01E51); (0x01E52,0x01E52), `Abs (0x01E53); (0x01E54,0x01E54), `Abs (0x01E55); (0x01E56,0x01E56), `Abs (0x01E57); (0x01E58,0x01E58), `Abs (0x01E59); (0x01E5A,0x01E5A), `Abs (0x01E5B); (0x01E5C,0x01E5C), `Abs (0x01E5D); (0x01E5E,0x01E5E), `Abs (0x01E5F); (0x01E60,0x01E60), `Abs (0x01E61); (0x01E62,0x01E62), `Abs (0x01E63); (0x01E64,0x01E64), `Abs (0x01E65); (0x01E66,0x01E66), `Abs (0x01E67); (0x01E68,0x01E68), `Abs (0x01E69); (0x01E6A,0x01E6A), `Abs (0x01E6B); (0x01E6C,0x01E6C), `Abs (0x01E6D); (0x01E6E,0x01E6E), `Abs (0x01E6F); (0x01E70,0x01E70), `Abs (0x01E71); (0x01E72,0x01E72), `Abs (0x01E73); (0x01E74,0x01E74), `Abs (0x01E75); (0x01E76,0x01E76), `Abs (0x01E77); (0x01E78,0x01E78), `Abs (0x01E79); (0x01E7A,0x01E7A), `Abs (0x01E7B); (0x01E7C,0x01E7C), `Abs (0x01E7D); (0x01E7E,0x01E7E), `Abs (0x01E7F); (0x01E80,0x01E80), `Abs (0x01E81); (0x01E82,0x01E82), `Abs (0x01E83); (0x01E84,0x01E84), `Abs (0x01E85); (0x01E86,0x01E86), `Abs (0x01E87); (0x01E88,0x01E88), `Abs (0x01E89); (0x01E8A,0x01E8A), `Abs (0x01E8B); (0x01E8C,0x01E8C), `Abs (0x01E8D); (0x01E8E,0x01E8E), `Abs (0x01E8F); (0x01E90,0x01E90), `Abs (0x01E91); (0x01E92,0x01E92), `Abs (0x01E93); (0x01E94,0x01E94), `Abs (0x01E95); (0x01E9E,0x01E9E), `Abs (0x000DF); (0x01EA0,0x01EA0), `Abs (0x01EA1); (0x01EA2,0x01EA2), `Abs (0x01EA3); (0x01EA4,0x01EA4), `Abs (0x01EA5); (0x01EA6,0x01EA6), `Abs (0x01EA7); (0x01EA8,0x01EA8), `Abs (0x01EA9); (0x01EAA,0x01EAA), `Abs (0x01EAB); (0x01EAC,0x01EAC), `Abs (0x01EAD); (0x01EAE,0x01EAE), `Abs (0x01EAF); (0x01EB0,0x01EB0), `Abs (0x01EB1); (0x01EB2,0x01EB2), `Abs (0x01EB3); (0x01EB4,0x01EB4), `Abs (0x01EB5); (0x01EB6,0x01EB6), `Abs (0x01EB7); (0x01EB8,0x01EB8), `Abs (0x01EB9); (0x01EBA,0x01EBA), `Abs (0x01EBB); (0x01EBC,0x01EBC), `Abs (0x01EBD); (0x01EBE,0x01EBE), `Abs (0x01EBF); (0x01EC0,0x01EC0), `Abs (0x01EC1); (0x01EC2,0x01EC2), `Abs (0x01EC3); (0x01EC4,0x01EC4), `Abs (0x01EC5); (0x01EC6,0x01EC6), `Abs (0x01EC7); (0x01EC8,0x01EC8), `Abs (0x01EC9); (0x01ECA,0x01ECA), `Abs (0x01ECB); (0x01ECC,0x01ECC), `Abs (0x01ECD); (0x01ECE,0x01ECE), `Abs (0x01ECF); (0x01ED0,0x01ED0), `Abs (0x01ED1); (0x01ED2,0x01ED2), `Abs (0x01ED3); (0x01ED4,0x01ED4), `Abs (0x01ED5); (0x01ED6,0x01ED6), `Abs (0x01ED7); (0x01ED8,0x01ED8), `Abs (0x01ED9); (0x01EDA,0x01EDA), `Abs (0x01EDB); (0x01EDC,0x01EDC), `Abs (0x01EDD); (0x01EDE,0x01EDE), `Abs (0x01EDF); (0x01EE0,0x01EE0), `Abs (0x01EE1); (0x01EE2,0x01EE2), `Abs (0x01EE3); (0x01EE4,0x01EE4), `Abs (0x01EE5); (0x01EE6,0x01EE6), `Abs (0x01EE7); (0x01EE8,0x01EE8), `Abs (0x01EE9); (0x01EEA,0x01EEA), `Abs (0x01EEB); (0x01EEC,0x01EEC), `Abs (0x01EED); (0x01EEE,0x01EEE), `Abs (0x01EEF); (0x01EF0,0x01EF0), `Abs (0x01EF1); (0x01EF2,0x01EF2), `Abs (0x01EF3); (0x01EF4,0x01EF4), `Abs (0x01EF5); (0x01EF6,0x01EF6), `Abs (0x01EF7); (0x01EF8,0x01EF8), `Abs (0x01EF9); (0x01EFA,0x01EFA), `Abs (0x01EFB); (0x01EFC,0x01EFC), `Abs (0x01EFD); (0x01EFE,0x01EFE), `Abs (0x01EFF); (0x01F08,0x01F0F), `Delta (-8); (0x01F18,0x01F1D), `Delta (-8); (0x01F28,0x01F2F), `Delta (-8); (0x01F38,0x01F3F), `Delta (-8); (0x01F48,0x01F4D), `Delta (-8); (0x01F59,0x01F59), `Abs (0x01F51); (0x01F5B,0x01F5B), `Abs (0x01F53); (0x01F5D,0x01F5D), `Abs (0x01F55); (0x01F5F,0x01F5F), `Abs (0x01F57); (0x01F68,0x01F6F), `Delta (-8); (0x01FB8,0x01FB9), `Delta (-8); (0x01FBA,0x01FBB), `Delta (-74); (0x01FC8,0x01FCB), `Delta (-86); (0x01FD8,0x01FD9), `Delta (-8); (0x01FDA,0x01FDB), `Delta (-100); (0x01FE8,0x01FE9), `Delta (-8); (0x01FEA,0x01FEB), `Delta (-112); (0x01FEC,0x01FEC), `Abs (0x01FE5); (0x01FF8,0x01FF9), `Delta (-128); (0x01FFA,0x01FFB), `Delta (-126); (0x02102,0x02102), `Abs (0x02102); (0x02107,0x02107), `Abs (0x02107); (0x0210B,0x0210D), `Delta (0); (0x02110,0x02112), `Delta (0); (0x02115,0x02115), `Abs (0x02115); (0x02119,0x0211D), `Delta (0); (0x02124,0x02124), `Abs (0x02124); (0x02126,0x02126), `Abs (0x003C9); (0x02128,0x02128), `Abs (0x02128); (0x0212A,0x0212A), `Abs (0x0006B); (0x0212B,0x0212B), `Abs (0x000E5); (0x0212C,0x0212D), `Delta (0); (0x02130,0x02131), `Delta (0); (0x02132,0x02132), `Abs (0x0214E); (0x02133,0x02133), `Abs (0x02133); (0x0213E,0x0213F), `Delta (0); (0x02145,0x02145), `Abs (0x02145); (0x02183,0x02183), `Abs (0x02184); (0x02C00,0x02C2E), `Delta (48); (0x02C60,0x02C60), `Abs (0x02C61); (0x02C62,0x02C62), `Abs (0x0026B); (0x02C63,0x02C63), `Abs (0x01D7D); (0x02C64,0x02C64), `Abs (0x0027D); (0x02C67,0x02C67), `Abs (0x02C68); (0x02C69,0x02C69), `Abs (0x02C6A); (0x02C6B,0x02C6B), `Abs (0x02C6C); (0x02C6D,0x02C6D), `Abs (0x00251); (0x02C6E,0x02C6E), `Abs (0x00271); (0x02C6F,0x02C6F), `Abs (0x00250); (0x02C70,0x02C70), `Abs (0x00252); (0x02C72,0x02C72), `Abs (0x02C73); (0x02C75,0x02C75), `Abs (0x02C76); (0x02C7E,0x02C7F), `Delta (-10815); (0x02C80,0x02C80), `Abs (0x02C81); (0x02C82,0x02C82), `Abs (0x02C83); (0x02C84,0x02C84), `Abs (0x02C85); (0x02C86,0x02C86), `Abs (0x02C87); (0x02C88,0x02C88), `Abs (0x02C89); (0x02C8A,0x02C8A), `Abs (0x02C8B); (0x02C8C,0x02C8C), `Abs (0x02C8D); (0x02C8E,0x02C8E), `Abs (0x02C8F); (0x02C90,0x02C90), `Abs (0x02C91); (0x02C92,0x02C92), `Abs (0x02C93); (0x02C94,0x02C94), `Abs (0x02C95); (0x02C96,0x02C96), `Abs (0x02C97); (0x02C98,0x02C98), `Abs (0x02C99); (0x02C9A,0x02C9A), `Abs (0x02C9B); (0x02C9C,0x02C9C), `Abs (0x02C9D); (0x02C9E,0x02C9E), `Abs (0x02C9F); (0x02CA0,0x02CA0), `Abs (0x02CA1); (0x02CA2,0x02CA2), `Abs (0x02CA3); (0x02CA4,0x02CA4), `Abs (0x02CA5); (0x02CA6,0x02CA6), `Abs (0x02CA7); (0x02CA8,0x02CA8), `Abs (0x02CA9); (0x02CAA,0x02CAA), `Abs (0x02CAB); (0x02CAC,0x02CAC), `Abs (0x02CAD); (0x02CAE,0x02CAE), `Abs (0x02CAF); (0x02CB0,0x02CB0), `Abs (0x02CB1); (0x02CB2,0x02CB2), `Abs (0x02CB3); (0x02CB4,0x02CB4), `Abs (0x02CB5); (0x02CB6,0x02CB6), `Abs (0x02CB7); (0x02CB8,0x02CB8), `Abs (0x02CB9); (0x02CBA,0x02CBA), `Abs (0x02CBB); (0x02CBC,0x02CBC), `Abs (0x02CBD); (0x02CBE,0x02CBE), `Abs (0x02CBF); (0x02CC0,0x02CC0), `Abs (0x02CC1); (0x02CC2,0x02CC2), `Abs (0x02CC3); (0x02CC4,0x02CC4), `Abs (0x02CC5); (0x02CC6,0x02CC6), `Abs (0x02CC7); (0x02CC8,0x02CC8), `Abs (0x02CC9); (0x02CCA,0x02CCA), `Abs (0x02CCB); (0x02CCC,0x02CCC), `Abs (0x02CCD); (0x02CCE,0x02CCE), `Abs (0x02CCF); (0x02CD0,0x02CD0), `Abs (0x02CD1); (0x02CD2,0x02CD2), `Abs (0x02CD3); (0x02CD4,0x02CD4), `Abs (0x02CD5); (0x02CD6,0x02CD6), `Abs (0x02CD7); (0x02CD8,0x02CD8), `Abs (0x02CD9); (0x02CDA,0x02CDA), `Abs (0x02CDB); (0x02CDC,0x02CDC), `Abs (0x02CDD); (0x02CDE,0x02CDE), `Abs (0x02CDF); (0x02CE0,0x02CE0), `Abs (0x02CE1); (0x02CE2,0x02CE2), `Abs (0x02CE3); (0x02CEB,0x02CEB), `Abs (0x02CEC); (0x02CED,0x02CED), `Abs (0x02CEE); (0x02CF2,0x02CF2), `Abs (0x02CF3); (0x0A640,0x0A640), `Abs (0x0A641); (0x0A642,0x0A642), `Abs (0x0A643); (0x0A644,0x0A644), `Abs (0x0A645); (0x0A646,0x0A646), `Abs (0x0A647); (0x0A648,0x0A648), `Abs (0x0A649); (0x0A64A,0x0A64A), `Abs (0x0A64B); (0x0A64C,0x0A64C), `Abs (0x0A64D); (0x0A64E,0x0A64E), `Abs (0x0A64F); (0x0A650,0x0A650), `Abs (0x0A651); (0x0A652,0x0A652), `Abs (0x0A653); (0x0A654,0x0A654), `Abs (0x0A655); (0x0A656,0x0A656), `Abs (0x0A657); (0x0A658,0x0A658), `Abs (0x0A659); (0x0A65A,0x0A65A), `Abs (0x0A65B); (0x0A65C,0x0A65C), `Abs (0x0A65D); (0x0A65E,0x0A65E), `Abs (0x0A65F); (0x0A660,0x0A660), `Abs (0x0A661); (0x0A662,0x0A662), `Abs (0x0A663); (0x0A664,0x0A664), `Abs (0x0A665); (0x0A666,0x0A666), `Abs (0x0A667); (0x0A668,0x0A668), `Abs (0x0A669); (0x0A66A,0x0A66A), `Abs (0x0A66B); (0x0A66C,0x0A66C), `Abs (0x0A66D); (0x0A680,0x0A680), `Abs (0x0A681); (0x0A682,0x0A682), `Abs (0x0A683); (0x0A684,0x0A684), `Abs (0x0A685); (0x0A686,0x0A686), `Abs (0x0A687); (0x0A688,0x0A688), `Abs (0x0A689); (0x0A68A,0x0A68A), `Abs (0x0A68B); (0x0A68C,0x0A68C), `Abs (0x0A68D); (0x0A68E,0x0A68E), `Abs (0x0A68F); (0x0A690,0x0A690), `Abs (0x0A691); (0x0A692,0x0A692), `Abs (0x0A693); (0x0A694,0x0A694), `Abs (0x0A695); (0x0A696,0x0A696), `Abs (0x0A697); (0x0A698,0x0A698), `Abs (0x0A699); (0x0A69A,0x0A69A), `Abs (0x0A69B); (0x0A722,0x0A722), `Abs (0x0A723); (0x0A724,0x0A724), `Abs (0x0A725); (0x0A726,0x0A726), `Abs (0x0A727); (0x0A728,0x0A728), `Abs (0x0A729); (0x0A72A,0x0A72A), `Abs (0x0A72B); (0x0A72C,0x0A72C), `Abs (0x0A72D); (0x0A72E,0x0A72E), `Abs (0x0A72F); (0x0A732,0x0A732), `Abs (0x0A733); (0x0A734,0x0A734), `Abs (0x0A735); (0x0A736,0x0A736), `Abs (0x0A737); (0x0A738,0x0A738), `Abs (0x0A739); (0x0A73A,0x0A73A), `Abs (0x0A73B); (0x0A73C,0x0A73C), `Abs (0x0A73D); (0x0A73E,0x0A73E), `Abs (0x0A73F); (0x0A740,0x0A740), `Abs (0x0A741); (0x0A742,0x0A742), `Abs (0x0A743); (0x0A744,0x0A744), `Abs (0x0A745); (0x0A746,0x0A746), `Abs (0x0A747); (0x0A748,0x0A748), `Abs (0x0A749); (0x0A74A,0x0A74A), `Abs (0x0A74B); (0x0A74C,0x0A74C), `Abs (0x0A74D); (0x0A74E,0x0A74E), `Abs (0x0A74F); (0x0A750,0x0A750), `Abs (0x0A751); (0x0A752,0x0A752), `Abs (0x0A753); (0x0A754,0x0A754), `Abs (0x0A755); (0x0A756,0x0A756), `Abs (0x0A757); (0x0A758,0x0A758), `Abs (0x0A759); (0x0A75A,0x0A75A), `Abs (0x0A75B); (0x0A75C,0x0A75C), `Abs (0x0A75D); (0x0A75E,0x0A75E), `Abs (0x0A75F); (0x0A760,0x0A760), `Abs (0x0A761); (0x0A762,0x0A762), `Abs (0x0A763); (0x0A764,0x0A764), `Abs (0x0A765); (0x0A766,0x0A766), `Abs (0x0A767); (0x0A768,0x0A768), `Abs (0x0A769); (0x0A76A,0x0A76A), `Abs (0x0A76B); (0x0A76C,0x0A76C), `Abs (0x0A76D); (0x0A76E,0x0A76E), `Abs (0x0A76F); (0x0A779,0x0A779), `Abs (0x0A77A); (0x0A77B,0x0A77B), `Abs (0x0A77C); (0x0A77D,0x0A77D), `Abs (0x01D79); (0x0A77E,0x0A77E), `Abs (0x0A77F); (0x0A780,0x0A780), `Abs (0x0A781); (0x0A782,0x0A782), `Abs (0x0A783); (0x0A784,0x0A784), `Abs (0x0A785); (0x0A786,0x0A786), `Abs (0x0A787); (0x0A78B,0x0A78B), `Abs (0x0A78C); (0x0A78D,0x0A78D), `Abs (0x00265); (0x0A790,0x0A790), `Abs (0x0A791); (0x0A792,0x0A792), `Abs (0x0A793); (0x0A796,0x0A796), `Abs (0x0A797); (0x0A798,0x0A798), `Abs (0x0A799); (0x0A79A,0x0A79A), `Abs (0x0A79B); (0x0A79C,0x0A79C), `Abs (0x0A79D); (0x0A79E,0x0A79E), `Abs (0x0A79F); (0x0A7A0,0x0A7A0), `Abs (0x0A7A1); (0x0A7A2,0x0A7A2), `Abs (0x0A7A3); (0x0A7A4,0x0A7A4), `Abs (0x0A7A5); (0x0A7A6,0x0A7A6), `Abs (0x0A7A7); (0x0A7A8,0x0A7A8), `Abs (0x0A7A9); (0x0A7AA,0x0A7AA), `Abs (0x00266); (0x0A7AB,0x0A7AB), `Abs (0x0025C); (0x0A7AC,0x0A7AC), `Abs (0x00261); (0x0A7AD,0x0A7AD), `Abs (0x0026C); (0x0A7AE,0x0A7AE), `Abs (0x0026A); (0x0A7B0,0x0A7B0), `Abs (0x0029E); (0x0A7B1,0x0A7B1), `Abs (0x00287); (0x0A7B2,0x0A7B2), `Abs (0x0029D); (0x0A7B3,0x0A7B3), `Abs (0x0AB53); (0x0A7B4,0x0A7B4), `Abs (0x0A7B5); (0x0A7B6,0x0A7B6), `Abs (0x0A7B7); (0x0FF21,0x0FF3A), `Delta (32); (0x10400,0x10427), `Delta (40); (0x104B0,0x104D3), `Delta (40); (0x10C80,0x10CB2), `Delta (64); (0x118A0,0x118BF), `Delta (32); (0x1D400,0x1D419), `Delta (0); (0x1D434,0x1D44D), `Delta (0); (0x1D468,0x1D481), `Delta (0); (0x1D49C,0x1D49C), `Abs (0x1D49C); (0x1D49E,0x1D49F), `Delta (0); (0x1D4A2,0x1D4A2), `Abs (0x1D4A2); (0x1D4A5,0x1D4A6), `Delta (0); (0x1D4A9,0x1D4AC), `Delta (0); (0x1D4AE,0x1D4B5), `Delta (0); (0x1D4D0,0x1D4E9), `Delta (0); (0x1D504,0x1D505), `Delta (0); (0x1D507,0x1D50A), `Delta (0); (0x1D50D,0x1D514), `Delta (0); (0x1D516,0x1D51C), `Delta (0); (0x1D538,0x1D539), `Delta (0); (0x1D53B,0x1D53E), `Delta (0); (0x1D540,0x1D544), `Delta (0); (0x1D546,0x1D546), `Abs (0x1D546); (0x1D54A,0x1D550), `Delta (0); (0x1D56C,0x1D585), `Delta (0); (0x1D5A0,0x1D5B9), `Delta (0); (0x1D5D4,0x1D5ED), `Delta (0); (0x1D608,0x1D621), `Delta (0); (0x1D63C,0x1D655), `Delta (0); (0x1D670,0x1D689), `Delta (0); (0x1D6A8,0x1D6C0), `Delta (0); (0x1D6E2,0x1D6FA), `Delta (0); (0x1D71C,0x1D734), `Delta (0); (0x1D756,0x1D76E), `Delta (0); (0x1D790,0x1D7A8), `Delta (0); (0x1D7CA,0x1D7CA), `Abs (0x1D7CA); (0x1E900,0x1E921), `Delta (34); (0x00061,0x0007A), `Delta (0); (0x000B5,0x000B5), `Abs (0x000B5); (0x000DF,0x000F6), `Delta (0); (0x000F8,0x000FF), `Delta (0); (0x00101,0x00101), `Abs (0x00101); (0x00103,0x00103), `Abs (0x00103); (0x00105,0x00105), `Abs (0x00105); (0x00107,0x00107), `Abs (0x00107); (0x00109,0x00109), `Abs (0x00109); (0x0010B,0x0010B), `Abs (0x0010B); (0x0010D,0x0010D), `Abs (0x0010D); (0x0010F,0x0010F), `Abs (0x0010F); (0x00111,0x00111), `Abs (0x00111); (0x00113,0x00113), `Abs (0x00113); (0x00115,0x00115), `Abs (0x00115); (0x00117,0x00117), `Abs (0x00117); (0x00119,0x00119), `Abs (0x00119); (0x0011B,0x0011B), `Abs (0x0011B); (0x0011D,0x0011D), `Abs (0x0011D); (0x0011F,0x0011F), `Abs (0x0011F); (0x00121,0x00121), `Abs (0x00121); (0x00123,0x00123), `Abs (0x00123); (0x00125,0x00125), `Abs (0x00125); (0x00127,0x00127), `Abs (0x00127); (0x00129,0x00129), `Abs (0x00129); (0x0012B,0x0012B), `Abs (0x0012B); (0x0012D,0x0012D), `Abs (0x0012D); (0x0012F,0x0012F), `Abs (0x0012F); (0x00131,0x00131), `Abs (0x00131); (0x00133,0x00133), `Abs (0x00133); (0x00135,0x00135), `Abs (0x00135); (0x00137,0x00138), `Delta (0); (0x0013A,0x0013A), `Abs (0x0013A); (0x0013C,0x0013C), `Abs (0x0013C); (0x0013E,0x0013E), `Abs (0x0013E); (0x00140,0x00140), `Abs (0x00140); (0x00142,0x00142), `Abs (0x00142); (0x00144,0x00144), `Abs (0x00144); (0x00146,0x00146), `Abs (0x00146); (0x00148,0x00149), `Delta (0); (0x0014B,0x0014B), `Abs (0x0014B); (0x0014D,0x0014D), `Abs (0x0014D); (0x0014F,0x0014F), `Abs (0x0014F); (0x00151,0x00151), `Abs (0x00151); (0x00153,0x00153), `Abs (0x00153); (0x00155,0x00155), `Abs (0x00155); (0x00157,0x00157), `Abs (0x00157); (0x00159,0x00159), `Abs (0x00159); (0x0015B,0x0015B), `Abs (0x0015B); (0x0015D,0x0015D), `Abs (0x0015D); (0x0015F,0x0015F), `Abs (0x0015F); (0x00161,0x00161), `Abs (0x00161); (0x00163,0x00163), `Abs (0x00163); (0x00165,0x00165), `Abs (0x00165); (0x00167,0x00167), `Abs (0x00167); (0x00169,0x00169), `Abs (0x00169); (0x0016B,0x0016B), `Abs (0x0016B); (0x0016D,0x0016D), `Abs (0x0016D); (0x0016F,0x0016F), `Abs (0x0016F); (0x00171,0x00171), `Abs (0x00171); (0x00173,0x00173), `Abs (0x00173); (0x00175,0x00175), `Abs (0x00175); (0x00177,0x00177), `Abs (0x00177); (0x0017A,0x0017A), `Abs (0x0017A); (0x0017C,0x0017C), `Abs (0x0017C); (0x0017E,0x00180), `Delta (0); (0x00183,0x00183), `Abs (0x00183); (0x00185,0x00185), `Abs (0x00185); (0x00188,0x00188), `Abs (0x00188); (0x0018C,0x0018D), `Delta (0); (0x00192,0x00192), `Abs (0x00192); (0x00195,0x00195), `Abs (0x00195); (0x00199,0x0019B), `Delta (0); (0x0019E,0x0019E), `Abs (0x0019E); (0x001A1,0x001A1), `Abs (0x001A1); (0x001A3,0x001A3), `Abs (0x001A3); (0x001A5,0x001A5), `Abs (0x001A5); (0x001A8,0x001A8), `Abs (0x001A8); (0x001AA,0x001AB), `Delta (0); (0x001AD,0x001AD), `Abs (0x001AD); (0x001B0,0x001B0), `Abs (0x001B0); (0x001B4,0x001B4), `Abs (0x001B4); (0x001B6,0x001B6), `Abs (0x001B6); (0x001B9,0x001BA), `Delta (0); (0x001BD,0x001BF), `Delta (0); (0x001C6,0x001C6), `Abs (0x001C6); (0x001C9,0x001C9), `Abs (0x001C9); (0x001CC,0x001CC), `Abs (0x001CC); (0x001CE,0x001CE), `Abs (0x001CE); (0x001D0,0x001D0), `Abs (0x001D0); (0x001D2,0x001D2), `Abs (0x001D2); (0x001D4,0x001D4), `Abs (0x001D4); (0x001D6,0x001D6), `Abs (0x001D6); (0x001D8,0x001D8), `Abs (0x001D8); (0x001DA,0x001DA), `Abs (0x001DA); (0x001DC,0x001DD), `Delta (0); (0x001DF,0x001DF), `Abs (0x001DF); (0x001E1,0x001E1), `Abs (0x001E1); (0x001E3,0x001E3), `Abs (0x001E3); (0x001E5,0x001E5), `Abs (0x001E5); (0x001E7,0x001E7), `Abs (0x001E7); (0x001E9,0x001E9), `Abs (0x001E9); (0x001EB,0x001EB), `Abs (0x001EB); (0x001ED,0x001ED), `Abs (0x001ED); (0x001EF,0x001F0), `Delta (0); (0x001F3,0x001F3), `Abs (0x001F3); (0x001F5,0x001F5), `Abs (0x001F5); (0x001F9,0x001F9), `Abs (0x001F9); (0x001FB,0x001FB), `Abs (0x001FB); (0x001FD,0x001FD), `Abs (0x001FD); (0x001FF,0x001FF), `Abs (0x001FF); (0x00201,0x00201), `Abs (0x00201); (0x00203,0x00203), `Abs (0x00203); (0x00205,0x00205), `Abs (0x00205); (0x00207,0x00207), `Abs (0x00207); (0x00209,0x00209), `Abs (0x00209); (0x0020B,0x0020B), `Abs (0x0020B); (0x0020D,0x0020D), `Abs (0x0020D); (0x0020F,0x0020F), `Abs (0x0020F); (0x00211,0x00211), `Abs (0x00211); (0x00213,0x00213), `Abs (0x00213); (0x00215,0x00215), `Abs (0x00215); (0x00217,0x00217), `Abs (0x00217); (0x00219,0x00219), `Abs (0x00219); (0x0021B,0x0021B), `Abs (0x0021B); (0x0021D,0x0021D), `Abs (0x0021D); (0x0021F,0x0021F), `Abs (0x0021F); (0x00221,0x00221), `Abs (0x00221); (0x00223,0x00223), `Abs (0x00223); (0x00225,0x00225), `Abs (0x00225); (0x00227,0x00227), `Abs (0x00227); (0x00229,0x00229), `Abs (0x00229); (0x0022B,0x0022B), `Abs (0x0022B); (0x0022D,0x0022D), `Abs (0x0022D); (0x0022F,0x0022F), `Abs (0x0022F); (0x00231,0x00231), `Abs (0x00231); (0x00233,0x00239), `Delta (0); (0x0023C,0x0023C), `Abs (0x0023C); (0x0023F,0x00240), `Delta (0); (0x00242,0x00242), `Abs (0x00242); (0x00247,0x00247), `Abs (0x00247); (0x00249,0x00249), `Abs (0x00249); (0x0024B,0x0024B), `Abs (0x0024B); (0x0024D,0x0024D), `Abs (0x0024D); (0x0024F,0x00293), `Delta (0); (0x00295,0x002AF), `Delta (0); (0x00371,0x00371), `Abs (0x00371); (0x00373,0x00373), `Abs (0x00373); (0x00377,0x00377), `Abs (0x00377); (0x0037B,0x0037D), `Delta (0); (0x00390,0x00390), `Abs (0x00390); (0x003AC,0x003CE), `Delta (0); (0x003D0,0x003D1), `Delta (0); (0x003D5,0x003D7), `Delta (0); (0x003D9,0x003D9), `Abs (0x003D9); (0x003DB,0x003DB), `Abs (0x003DB); (0x003DD,0x003DD), `Abs (0x003DD); (0x003DF,0x003DF), `Abs (0x003DF); (0x003E1,0x003E1), `Abs (0x003E1); (0x003E3,0x003E3), `Abs (0x003E3); (0x003E5,0x003E5), `Abs (0x003E5); (0x003E7,0x003E7), `Abs (0x003E7); (0x003E9,0x003E9), `Abs (0x003E9); (0x003EB,0x003EB), `Abs (0x003EB); (0x003ED,0x003ED), `Abs (0x003ED); (0x003EF,0x003F3), `Delta (0); (0x003F5,0x003F5), `Abs (0x003F5); (0x003F8,0x003F8), `Abs (0x003F8); (0x003FB,0x003FC), `Delta (0); (0x00430,0x0045F), `Delta (0); (0x00461,0x00461), `Abs (0x00461); (0x00463,0x00463), `Abs (0x00463); (0x00465,0x00465), `Abs (0x00465); (0x00467,0x00467), `Abs (0x00467); (0x00469,0x00469), `Abs (0x00469); (0x0046B,0x0046B), `Abs (0x0046B); (0x0046D,0x0046D), `Abs (0x0046D); (0x0046F,0x0046F), `Abs (0x0046F); (0x00471,0x00471), `Abs (0x00471); (0x00473,0x00473), `Abs (0x00473); (0x00475,0x00475), `Abs (0x00475); (0x00477,0x00477), `Abs (0x00477); (0x00479,0x00479), `Abs (0x00479); (0x0047B,0x0047B), `Abs (0x0047B); (0x0047D,0x0047D), `Abs (0x0047D); (0x0047F,0x0047F), `Abs (0x0047F); (0x00481,0x00481), `Abs (0x00481); (0x0048B,0x0048B), `Abs (0x0048B); (0x0048D,0x0048D), `Abs (0x0048D); (0x0048F,0x0048F), `Abs (0x0048F); (0x00491,0x00491), `Abs (0x00491); (0x00493,0x00493), `Abs (0x00493); (0x00495,0x00495), `Abs (0x00495); (0x00497,0x00497), `Abs (0x00497); (0x00499,0x00499), `Abs (0x00499); (0x0049B,0x0049B), `Abs (0x0049B); (0x0049D,0x0049D), `Abs (0x0049D); (0x0049F,0x0049F), `Abs (0x0049F); (0x004A1,0x004A1), `Abs (0x004A1); (0x004A3,0x004A3), `Abs (0x004A3); (0x004A5,0x004A5), `Abs (0x004A5); (0x004A7,0x004A7), `Abs (0x004A7); (0x004A9,0x004A9), `Abs (0x004A9); (0x004AB,0x004AB), `Abs (0x004AB); (0x004AD,0x004AD), `Abs (0x004AD); (0x004AF,0x004AF), `Abs (0x004AF); (0x004B1,0x004B1), `Abs (0x004B1); (0x004B3,0x004B3), `Abs (0x004B3); (0x004B5,0x004B5), `Abs (0x004B5); (0x004B7,0x004B7), `Abs (0x004B7); (0x004B9,0x004B9), `Abs (0x004B9); (0x004BB,0x004BB), `Abs (0x004BB); (0x004BD,0x004BD), `Abs (0x004BD); (0x004BF,0x004BF), `Abs (0x004BF); (0x004C2,0x004C2), `Abs (0x004C2); (0x004C4,0x004C4), `Abs (0x004C4); (0x004C6,0x004C6), `Abs (0x004C6); (0x004C8,0x004C8), `Abs (0x004C8); (0x004CA,0x004CA), `Abs (0x004CA); (0x004CC,0x004CC), `Abs (0x004CC); (0x004CE,0x004CF), `Delta (0); (0x004D1,0x004D1), `Abs (0x004D1); (0x004D3,0x004D3), `Abs (0x004D3); (0x004D5,0x004D5), `Abs (0x004D5); (0x004D7,0x004D7), `Abs (0x004D7); (0x004D9,0x004D9), `Abs (0x004D9); (0x004DB,0x004DB), `Abs (0x004DB); (0x004DD,0x004DD), `Abs (0x004DD); (0x004DF,0x004DF), `Abs (0x004DF); (0x004E1,0x004E1), `Abs (0x004E1); (0x004E3,0x004E3), `Abs (0x004E3); (0x004E5,0x004E5), `Abs (0x004E5); (0x004E7,0x004E7), `Abs (0x004E7); (0x004E9,0x004E9), `Abs (0x004E9); (0x004EB,0x004EB), `Abs (0x004EB); (0x004ED,0x004ED), `Abs (0x004ED); (0x004EF,0x004EF), `Abs (0x004EF); (0x004F1,0x004F1), `Abs (0x004F1); (0x004F3,0x004F3), `Abs (0x004F3); (0x004F5,0x004F5), `Abs (0x004F5); (0x004F7,0x004F7), `Abs (0x004F7); (0x004F9,0x004F9), `Abs (0x004F9); (0x004FB,0x004FB), `Abs (0x004FB); (0x004FD,0x004FD), `Abs (0x004FD); (0x004FF,0x004FF), `Abs (0x004FF); (0x00501,0x00501), `Abs (0x00501); (0x00503,0x00503), `Abs (0x00503); (0x00505,0x00505), `Abs (0x00505); (0x00507,0x00507), `Abs (0x00507); (0x00509,0x00509), `Abs (0x00509); (0x0050B,0x0050B), `Abs (0x0050B); (0x0050D,0x0050D), `Abs (0x0050D); (0x0050F,0x0050F), `Abs (0x0050F); (0x00511,0x00511), `Abs (0x00511); (0x00513,0x00513), `Abs (0x00513); (0x00515,0x00515), `Abs (0x00515); (0x00517,0x00517), `Abs (0x00517); (0x00519,0x00519), `Abs (0x00519); (0x0051B,0x0051B), `Abs (0x0051B); (0x0051D,0x0051D), `Abs (0x0051D); (0x0051F,0x0051F), `Abs (0x0051F); (0x00521,0x00521), `Abs (0x00521); (0x00523,0x00523), `Abs (0x00523); (0x00525,0x00525), `Abs (0x00525); (0x00527,0x00527), `Abs (0x00527); (0x00529,0x00529), `Abs (0x00529); (0x0052B,0x0052B), `Abs (0x0052B); (0x0052D,0x0052D), `Abs (0x0052D); (0x0052F,0x0052F), `Abs (0x0052F); (0x00561,0x00587), `Delta (0); (0x013F8,0x013FD), `Delta (0); (0x01C80,0x01C88), `Delta (0); (0x01D00,0x01D2B), `Delta (0); (0x01D6B,0x01D77), `Delta (0); (0x01D79,0x01D9A), `Delta (0); (0x01E01,0x01E01), `Abs (0x01E01); (0x01E03,0x01E03), `Abs (0x01E03); (0x01E05,0x01E05), `Abs (0x01E05); (0x01E07,0x01E07), `Abs (0x01E07); (0x01E09,0x01E09), `Abs (0x01E09); (0x01E0B,0x01E0B), `Abs (0x01E0B); (0x01E0D,0x01E0D), `Abs (0x01E0D); (0x01E0F,0x01E0F), `Abs (0x01E0F); (0x01E11,0x01E11), `Abs (0x01E11); (0x01E13,0x01E13), `Abs (0x01E13); (0x01E15,0x01E15), `Abs (0x01E15); (0x01E17,0x01E17), `Abs (0x01E17); (0x01E19,0x01E19), `Abs (0x01E19); (0x01E1B,0x01E1B), `Abs (0x01E1B); (0x01E1D,0x01E1D), `Abs (0x01E1D); (0x01E1F,0x01E1F), `Abs (0x01E1F); (0x01E21,0x01E21), `Abs (0x01E21); (0x01E23,0x01E23), `Abs (0x01E23); (0x01E25,0x01E25), `Abs (0x01E25); (0x01E27,0x01E27), `Abs (0x01E27); (0x01E29,0x01E29), `Abs (0x01E29); (0x01E2B,0x01E2B), `Abs (0x01E2B); (0x01E2D,0x01E2D), `Abs (0x01E2D); (0x01E2F,0x01E2F), `Abs (0x01E2F); (0x01E31,0x01E31), `Abs (0x01E31); (0x01E33,0x01E33), `Abs (0x01E33); (0x01E35,0x01E35), `Abs (0x01E35); (0x01E37,0x01E37), `Abs (0x01E37); (0x01E39,0x01E39), `Abs (0x01E39); (0x01E3B,0x01E3B), `Abs (0x01E3B); (0x01E3D,0x01E3D), `Abs (0x01E3D); (0x01E3F,0x01E3F), `Abs (0x01E3F); (0x01E41,0x01E41), `Abs (0x01E41); (0x01E43,0x01E43), `Abs (0x01E43); (0x01E45,0x01E45), `Abs (0x01E45); (0x01E47,0x01E47), `Abs (0x01E47); (0x01E49,0x01E49), `Abs (0x01E49); (0x01E4B,0x01E4B), `Abs (0x01E4B); (0x01E4D,0x01E4D), `Abs (0x01E4D); (0x01E4F,0x01E4F), `Abs (0x01E4F); (0x01E51,0x01E51), `Abs (0x01E51); (0x01E53,0x01E53), `Abs (0x01E53); (0x01E55,0x01E55), `Abs (0x01E55); (0x01E57,0x01E57), `Abs (0x01E57); (0x01E59,0x01E59), `Abs (0x01E59); (0x01E5B,0x01E5B), `Abs (0x01E5B); (0x01E5D,0x01E5D), `Abs (0x01E5D); (0x01E5F,0x01E5F), `Abs (0x01E5F); (0x01E61,0x01E61), `Abs (0x01E61); (0x01E63,0x01E63), `Abs (0x01E63); (0x01E65,0x01E65), `Abs (0x01E65); (0x01E67,0x01E67), `Abs (0x01E67); (0x01E69,0x01E69), `Abs (0x01E69); (0x01E6B,0x01E6B), `Abs (0x01E6B); (0x01E6D,0x01E6D), `Abs (0x01E6D); (0x01E6F,0x01E6F), `Abs (0x01E6F); (0x01E71,0x01E71), `Abs (0x01E71); (0x01E73,0x01E73), `Abs (0x01E73); (0x01E75,0x01E75), `Abs (0x01E75); (0x01E77,0x01E77), `Abs (0x01E77); (0x01E79,0x01E79), `Abs (0x01E79); (0x01E7B,0x01E7B), `Abs (0x01E7B); (0x01E7D,0x01E7D), `Abs (0x01E7D); (0x01E7F,0x01E7F), `Abs (0x01E7F); (0x01E81,0x01E81), `Abs (0x01E81); (0x01E83,0x01E83), `Abs (0x01E83); (0x01E85,0x01E85), `Abs (0x01E85); (0x01E87,0x01E87), `Abs (0x01E87); (0x01E89,0x01E89), `Abs (0x01E89); (0x01E8B,0x01E8B), `Abs (0x01E8B); (0x01E8D,0x01E8D), `Abs (0x01E8D); (0x01E8F,0x01E8F), `Abs (0x01E8F); (0x01E91,0x01E91), `Abs (0x01E91); (0x01E93,0x01E93), `Abs (0x01E93); (0x01E95,0x01E9D), `Delta (0); (0x01E9F,0x01E9F), `Abs (0x01E9F); (0x01EA1,0x01EA1), `Abs (0x01EA1); (0x01EA3,0x01EA3), `Abs (0x01EA3); (0x01EA5,0x01EA5), `Abs (0x01EA5); (0x01EA7,0x01EA7), `Abs (0x01EA7); (0x01EA9,0x01EA9), `Abs (0x01EA9); (0x01EAB,0x01EAB), `Abs (0x01EAB); (0x01EAD,0x01EAD), `Abs (0x01EAD); (0x01EAF,0x01EAF), `Abs (0x01EAF); (0x01EB1,0x01EB1), `Abs (0x01EB1); (0x01EB3,0x01EB3), `Abs (0x01EB3); (0x01EB5,0x01EB5), `Abs (0x01EB5); (0x01EB7,0x01EB7), `Abs (0x01EB7); (0x01EB9,0x01EB9), `Abs (0x01EB9); (0x01EBB,0x01EBB), `Abs (0x01EBB); (0x01EBD,0x01EBD), `Abs (0x01EBD); (0x01EBF,0x01EBF), `Abs (0x01EBF); (0x01EC1,0x01EC1), `Abs (0x01EC1); (0x01EC3,0x01EC3), `Abs (0x01EC3); (0x01EC5,0x01EC5), `Abs (0x01EC5); (0x01EC7,0x01EC7), `Abs (0x01EC7); (0x01EC9,0x01EC9), `Abs (0x01EC9); (0x01ECB,0x01ECB), `Abs (0x01ECB); (0x01ECD,0x01ECD), `Abs (0x01ECD); (0x01ECF,0x01ECF), `Abs (0x01ECF); (0x01ED1,0x01ED1), `Abs (0x01ED1); (0x01ED3,0x01ED3), `Abs (0x01ED3); (0x01ED5,0x01ED5), `Abs (0x01ED5); (0x01ED7,0x01ED7), `Abs (0x01ED7); (0x01ED9,0x01ED9), `Abs (0x01ED9); (0x01EDB,0x01EDB), `Abs (0x01EDB); (0x01EDD,0x01EDD), `Abs (0x01EDD); (0x01EDF,0x01EDF), `Abs (0x01EDF); (0x01EE1,0x01EE1), `Abs (0x01EE1); (0x01EE3,0x01EE3), `Abs (0x01EE3); (0x01EE5,0x01EE5), `Abs (0x01EE5); (0x01EE7,0x01EE7), `Abs (0x01EE7); (0x01EE9,0x01EE9), `Abs (0x01EE9); (0x01EEB,0x01EEB), `Abs (0x01EEB); (0x01EED,0x01EED), `Abs (0x01EED); (0x01EEF,0x01EEF), `Abs (0x01EEF); (0x01EF1,0x01EF1), `Abs (0x01EF1); (0x01EF3,0x01EF3), `Abs (0x01EF3); (0x01EF5,0x01EF5), `Abs (0x01EF5); (0x01EF7,0x01EF7), `Abs (0x01EF7); (0x01EF9,0x01EF9), `Abs (0x01EF9); (0x01EFB,0x01EFB), `Abs (0x01EFB); (0x01EFD,0x01EFD), `Abs (0x01EFD); (0x01EFF,0x01F07), `Delta (0); (0x01F10,0x01F15), `Delta (0); (0x01F20,0x01F27), `Delta (0); (0x01F30,0x01F37), `Delta (0); (0x01F40,0x01F45), `Delta (0); (0x01F50,0x01F57), `Delta (0); (0x01F60,0x01F67), `Delta (0); (0x01F70,0x01F7D), `Delta (0); (0x01F80,0x01F87), `Delta (0); (0x01F90,0x01F97), `Delta (0); (0x01FA0,0x01FA7), `Delta (0); (0x01FB0,0x01FB4), `Delta (0); (0x01FB6,0x01FB7), `Delta (0); (0x01FBE,0x01FBE), `Abs (0x01FBE); (0x01FC2,0x01FC4), `Delta (0); (0x01FC6,0x01FC7), `Delta (0); (0x01FD0,0x01FD3), `Delta (0); (0x01FD6,0x01FD7), `Delta (0); (0x01FE0,0x01FE7), `Delta (0); (0x01FF2,0x01FF4), `Delta (0); (0x01FF6,0x01FF7), `Delta (0); (0x0210A,0x0210A), `Abs (0x0210A); (0x0210E,0x0210F), `Delta (0); (0x02113,0x02113), `Abs (0x02113); (0x0212F,0x0212F), `Abs (0x0212F); (0x02134,0x02134), `Abs (0x02134); (0x02139,0x02139), `Abs (0x02139); (0x0213C,0x0213D), `Delta (0); (0x02146,0x02149), `Delta (0); (0x0214E,0x0214E), `Abs (0x0214E); (0x02184,0x02184), `Abs (0x02184); (0x02C30,0x02C5E), `Delta (0); (0x02C61,0x02C61), `Abs (0x02C61); (0x02C65,0x02C66), `Delta (0); (0x02C68,0x02C68), `Abs (0x02C68); (0x02C6A,0x02C6A), `Abs (0x02C6A); (0x02C6C,0x02C6C), `Abs (0x02C6C); (0x02C71,0x02C71), `Abs (0x02C71); (0x02C73,0x02C74), `Delta (0); (0x02C76,0x02C7B), `Delta (0); (0x02C81,0x02C81), `Abs (0x02C81); (0x02C83,0x02C83), `Abs (0x02C83); (0x02C85,0x02C85), `Abs (0x02C85); (0x02C87,0x02C87), `Abs (0x02C87); (0x02C89,0x02C89), `Abs (0x02C89); (0x02C8B,0x02C8B), `Abs (0x02C8B); (0x02C8D,0x02C8D), `Abs (0x02C8D); (0x02C8F,0x02C8F), `Abs (0x02C8F); (0x02C91,0x02C91), `Abs (0x02C91); (0x02C93,0x02C93), `Abs (0x02C93); (0x02C95,0x02C95), `Abs (0x02C95); (0x02C97,0x02C97), `Abs (0x02C97); (0x02C99,0x02C99), `Abs (0x02C99); (0x02C9B,0x02C9B), `Abs (0x02C9B); (0x02C9D,0x02C9D), `Abs (0x02C9D); (0x02C9F,0x02C9F), `Abs (0x02C9F); (0x02CA1,0x02CA1), `Abs (0x02CA1); (0x02CA3,0x02CA3), `Abs (0x02CA3); (0x02CA5,0x02CA5), `Abs (0x02CA5); (0x02CA7,0x02CA7), `Abs (0x02CA7); (0x02CA9,0x02CA9), `Abs (0x02CA9); (0x02CAB,0x02CAB), `Abs (0x02CAB); (0x02CAD,0x02CAD), `Abs (0x02CAD); (0x02CAF,0x02CAF), `Abs (0x02CAF); (0x02CB1,0x02CB1), `Abs (0x02CB1); (0x02CB3,0x02CB3), `Abs (0x02CB3); (0x02CB5,0x02CB5), `Abs (0x02CB5); (0x02CB7,0x02CB7), `Abs (0x02CB7); (0x02CB9,0x02CB9), `Abs (0x02CB9); (0x02CBB,0x02CBB), `Abs (0x02CBB); (0x02CBD,0x02CBD), `Abs (0x02CBD); (0x02CBF,0x02CBF), `Abs (0x02CBF); (0x02CC1,0x02CC1), `Abs (0x02CC1); (0x02CC3,0x02CC3), `Abs (0x02CC3); (0x02CC5,0x02CC5), `Abs (0x02CC5); (0x02CC7,0x02CC7), `Abs (0x02CC7); (0x02CC9,0x02CC9), `Abs (0x02CC9); (0x02CCB,0x02CCB), `Abs (0x02CCB); (0x02CCD,0x02CCD), `Abs (0x02CCD); (0x02CCF,0x02CCF), `Abs (0x02CCF); (0x02CD1,0x02CD1), `Abs (0x02CD1); (0x02CD3,0x02CD3), `Abs (0x02CD3); (0x02CD5,0x02CD5), `Abs (0x02CD5); (0x02CD7,0x02CD7), `Abs (0x02CD7); (0x02CD9,0x02CD9), `Abs (0x02CD9); (0x02CDB,0x02CDB), `Abs (0x02CDB); (0x02CDD,0x02CDD), `Abs (0x02CDD); (0x02CDF,0x02CDF), `Abs (0x02CDF); (0x02CE1,0x02CE1), `Abs (0x02CE1); (0x02CE3,0x02CE4), `Delta (0); (0x02CEC,0x02CEC), `Abs (0x02CEC); (0x02CEE,0x02CEE), `Abs (0x02CEE); (0x02CF3,0x02CF3), `Abs (0x02CF3); (0x02D00,0x02D25), `Delta (0); (0x02D27,0x02D27), `Abs (0x02D27); (0x02D2D,0x02D2D), `Abs (0x02D2D); (0x0A641,0x0A641), `Abs (0x0A641); (0x0A643,0x0A643), `Abs (0x0A643); (0x0A645,0x0A645), `Abs (0x0A645); (0x0A647,0x0A647), `Abs (0x0A647); (0x0A649,0x0A649), `Abs (0x0A649); (0x0A64B,0x0A64B), `Abs (0x0A64B); (0x0A64D,0x0A64D), `Abs (0x0A64D); (0x0A64F,0x0A64F), `Abs (0x0A64F); (0x0A651,0x0A651), `Abs (0x0A651); (0x0A653,0x0A653), `Abs (0x0A653); (0x0A655,0x0A655), `Abs (0x0A655); (0x0A657,0x0A657), `Abs (0x0A657); (0x0A659,0x0A659), `Abs (0x0A659); (0x0A65B,0x0A65B), `Abs (0x0A65B); (0x0A65D,0x0A65D), `Abs (0x0A65D); (0x0A65F,0x0A65F), `Abs (0x0A65F); (0x0A661,0x0A661), `Abs (0x0A661); (0x0A663,0x0A663), `Abs (0x0A663); (0x0A665,0x0A665), `Abs (0x0A665); (0x0A667,0x0A667), `Abs (0x0A667); (0x0A669,0x0A669), `Abs (0x0A669); (0x0A66B,0x0A66B), `Abs (0x0A66B); (0x0A66D,0x0A66D), `Abs (0x0A66D); (0x0A681,0x0A681), `Abs (0x0A681); (0x0A683,0x0A683), `Abs (0x0A683); (0x0A685,0x0A685), `Abs (0x0A685); (0x0A687,0x0A687), `Abs (0x0A687); (0x0A689,0x0A689), `Abs (0x0A689); (0x0A68B,0x0A68B), `Abs (0x0A68B); (0x0A68D,0x0A68D), `Abs (0x0A68D); (0x0A68F,0x0A68F), `Abs (0x0A68F); (0x0A691,0x0A691), `Abs (0x0A691); (0x0A693,0x0A693), `Abs (0x0A693); (0x0A695,0x0A695), `Abs (0x0A695); (0x0A697,0x0A697), `Abs (0x0A697); (0x0A699,0x0A699), `Abs (0x0A699); (0x0A69B,0x0A69B), `Abs (0x0A69B); (0x0A723,0x0A723), `Abs (0x0A723); (0x0A725,0x0A725), `Abs (0x0A725); (0x0A727,0x0A727), `Abs (0x0A727); (0x0A729,0x0A729), `Abs (0x0A729); (0x0A72B,0x0A72B), `Abs (0x0A72B); (0x0A72D,0x0A72D), `Abs (0x0A72D); (0x0A72F,0x0A731), `Delta (0); (0x0A733,0x0A733), `Abs (0x0A733); (0x0A735,0x0A735), `Abs (0x0A735); (0x0A737,0x0A737), `Abs (0x0A737); (0x0A739,0x0A739), `Abs (0x0A739); (0x0A73B,0x0A73B), `Abs (0x0A73B); (0x0A73D,0x0A73D), `Abs (0x0A73D); (0x0A73F,0x0A73F), `Abs (0x0A73F); (0x0A741,0x0A741), `Abs (0x0A741); (0x0A743,0x0A743), `Abs (0x0A743); (0x0A745,0x0A745), `Abs (0x0A745); (0x0A747,0x0A747), `Abs (0x0A747); (0x0A749,0x0A749), `Abs (0x0A749); (0x0A74B,0x0A74B), `Abs (0x0A74B); (0x0A74D,0x0A74D), `Abs (0x0A74D); (0x0A74F,0x0A74F), `Abs (0x0A74F); (0x0A751,0x0A751), `Abs (0x0A751); (0x0A753,0x0A753), `Abs (0x0A753); (0x0A755,0x0A755), `Abs (0x0A755); (0x0A757,0x0A757), `Abs (0x0A757); (0x0A759,0x0A759), `Abs (0x0A759); (0x0A75B,0x0A75B), `Abs (0x0A75B); (0x0A75D,0x0A75D), `Abs (0x0A75D); (0x0A75F,0x0A75F), `Abs (0x0A75F); (0x0A761,0x0A761), `Abs (0x0A761); (0x0A763,0x0A763), `Abs (0x0A763); (0x0A765,0x0A765), `Abs (0x0A765); (0x0A767,0x0A767), `Abs (0x0A767); (0x0A769,0x0A769), `Abs (0x0A769); (0x0A76B,0x0A76B), `Abs (0x0A76B); (0x0A76D,0x0A76D), `Abs (0x0A76D); (0x0A76F,0x0A76F), `Abs (0x0A76F); (0x0A771,0x0A778), `Delta (0); (0x0A77A,0x0A77A), `Abs (0x0A77A); (0x0A77C,0x0A77C), `Abs (0x0A77C); (0x0A77F,0x0A77F), `Abs (0x0A77F); (0x0A781,0x0A781), `Abs (0x0A781); (0x0A783,0x0A783), `Abs (0x0A783); (0x0A785,0x0A785), `Abs (0x0A785); (0x0A787,0x0A787), `Abs (0x0A787); (0x0A78C,0x0A78C), `Abs (0x0A78C); (0x0A78E,0x0A78E), `Abs (0x0A78E); (0x0A791,0x0A791), `Abs (0x0A791); (0x0A793,0x0A795), `Delta (0); (0x0A797,0x0A797), `Abs (0x0A797); (0x0A799,0x0A799), `Abs (0x0A799); (0x0A79B,0x0A79B), `Abs (0x0A79B); (0x0A79D,0x0A79D), `Abs (0x0A79D); (0x0A79F,0x0A79F), `Abs (0x0A79F); (0x0A7A1,0x0A7A1), `Abs (0x0A7A1); (0x0A7A3,0x0A7A3), `Abs (0x0A7A3); (0x0A7A5,0x0A7A5), `Abs (0x0A7A5); (0x0A7A7,0x0A7A7), `Abs (0x0A7A7); (0x0A7A9,0x0A7A9), `Abs (0x0A7A9); (0x0A7B5,0x0A7B5), `Abs (0x0A7B5); (0x0A7B7,0x0A7B7), `Abs (0x0A7B7); (0x0A7FA,0x0A7FA), `Abs (0x0A7FA); (0x0AB30,0x0AB5A), `Delta (0); (0x0AB60,0x0AB65), `Delta (0); (0x0AB70,0x0ABBF), `Delta (0); (0x0FB00,0x0FB06), `Delta (0); (0x0FB13,0x0FB17), `Delta (0); (0x0FF41,0x0FF5A), `Delta (0); (0x10428,0x1044F), `Delta (0); (0x104D8,0x104FB), `Delta (0); (0x10CC0,0x10CF2), `Delta (0); (0x118C0,0x118DF), `Delta (0); (0x1D41A,0x1D433), `Delta (0); (0x1D44E,0x1D454), `Delta (0); (0x1D456,0x1D467), `Delta (0); (0x1D482,0x1D49B), `Delta (0); (0x1D4B6,0x1D4B9), `Delta (0); (0x1D4BB,0x1D4BB), `Abs (0x1D4BB); (0x1D4BD,0x1D4C3), `Delta (0); (0x1D4C5,0x1D4CF), `Delta (0); (0x1D4EA,0x1D503), `Delta (0); (0x1D51E,0x1D537), `Delta (0); (0x1D552,0x1D56B), `Delta (0); (0x1D586,0x1D59F), `Delta (0); (0x1D5BA,0x1D5D3), `Delta (0); (0x1D5EE,0x1D607), `Delta (0); (0x1D622,0x1D63B), `Delta (0); (0x1D656,0x1D66F), `Delta (0); (0x1D68A,0x1D6A5), `Delta (0); (0x1D6C2,0x1D6DA), `Delta (0); (0x1D6DC,0x1D6E1), `Delta (0); (0x1D6FC,0x1D714), `Delta (0); (0x1D716,0x1D71B), `Delta (0); (0x1D736,0x1D74E), `Delta (0); (0x1D750,0x1D755), `Delta (0); (0x1D770,0x1D788), `Delta (0); (0x1D78A,0x1D78F), `Delta (0); (0x1D7AA,0x1D7C2), `Delta (0); (0x1D7C4,0x1D7C9), `Delta (0); (0x1D7CB,0x1D7CB), `Abs (0x1D7CB); (0x1E922,0x1E943), `Delta (0); (0x001C5,0x001C5), `Abs (0x001C6); (0x001C8,0x001C8), `Abs (0x001C9); (0x001CB,0x001CB), `Abs (0x001CC); (0x001F2,0x001F2), `Abs (0x001F3); (0x01F88,0x01F8F), `Delta (-8); (0x01F98,0x01F9F), `Delta (-8); (0x01FA8,0x01FAF), `Delta (-8); (0x01FBC,0x01FBC), `Abs (0x01FB3); (0x01FCC,0x01FCC), `Abs (0x01FC3); (0x01FFC,0x01FFC), `Abs (0x01FF3); (0x00300,0x0036F), `Delta (0); (0x00483,0x00487), `Delta (0); (0x00591,0x005BD), `Delta (0); (0x005BF,0x005BF), `Abs (0x005BF); (0x005C1,0x005C2), `Delta (0); (0x005C4,0x005C5), `Delta (0); (0x005C7,0x005C7), `Abs (0x005C7); (0x00610,0x0061A), `Delta (0); (0x0064B,0x0065F), `Delta (0); (0x00670,0x00670), `Abs (0x00670); (0x006D6,0x006DC), `Delta (0); (0x006DF,0x006E4), `Delta (0); (0x006E7,0x006E8), `Delta (0); (0x006EA,0x006ED), `Delta (0); (0x00711,0x00711), `Abs (0x00711); (0x00730,0x0074A), `Delta (0); (0x007A6,0x007B0), `Delta (0); (0x007EB,0x007F3), `Delta (0); (0x00816,0x00819), `Delta (0); (0x0081B,0x00823), `Delta (0); (0x00825,0x00827), `Delta (0); (0x00829,0x0082D), `Delta (0); (0x00859,0x0085B), `Delta (0); (0x008D4,0x008E1), `Delta (0); (0x008E3,0x00902), `Delta (0); (0x0093A,0x0093A), `Abs (0x0093A); (0x0093C,0x0093C), `Abs (0x0093C); (0x00941,0x00948), `Delta (0); (0x0094D,0x0094D), `Abs (0x0094D); (0x00951,0x00957), `Delta (0); (0x00962,0x00963), `Delta (0); (0x00981,0x00981), `Abs (0x00981); (0x009BC,0x009BC), `Abs (0x009BC); (0x009C1,0x009C4), `Delta (0); (0x009CD,0x009CD), `Abs (0x009CD); (0x009E2,0x009E3), `Delta (0); (0x00A01,0x00A02), `Delta (0); (0x00A3C,0x00A3C), `Abs (0x00A3C); (0x00A41,0x00A42), `Delta (0); (0x00A47,0x00A48), `Delta (0); (0x00A4B,0x00A4D), `Delta (0); (0x00A51,0x00A51), `Abs (0x00A51); (0x00A70,0x00A71), `Delta (0); (0x00A75,0x00A75), `Abs (0x00A75); (0x00A81,0x00A82), `Delta (0); (0x00ABC,0x00ABC), `Abs (0x00ABC); (0x00AC1,0x00AC5), `Delta (0); (0x00AC7,0x00AC8), `Delta (0); (0x00ACD,0x00ACD), `Abs (0x00ACD); (0x00AE2,0x00AE3), `Delta (0); (0x00B01,0x00B01), `Abs (0x00B01); (0x00B3C,0x00B3C), `Abs (0x00B3C); (0x00B3F,0x00B3F), `Abs (0x00B3F); (0x00B41,0x00B44), `Delta (0); (0x00B4D,0x00B4D), `Abs (0x00B4D); (0x00B56,0x00B56), `Abs (0x00B56); (0x00B62,0x00B63), `Delta (0); (0x00B82,0x00B82), `Abs (0x00B82); (0x00BC0,0x00BC0), `Abs (0x00BC0); (0x00BCD,0x00BCD), `Abs (0x00BCD); (0x00C00,0x00C00), `Abs (0x00C00); (0x00C3E,0x00C40), `Delta (0); (0x00C46,0x00C48), `Delta (0); (0x00C4A,0x00C4D), `Delta (0); (0x00C55,0x00C56), `Delta (0); (0x00C62,0x00C63), `Delta (0); (0x00C81,0x00C81), `Abs (0x00C81); (0x00CBC,0x00CBC), `Abs (0x00CBC); (0x00CBF,0x00CBF), `Abs (0x00CBF); (0x00CC6,0x00CC6), `Abs (0x00CC6); (0x00CCC,0x00CCD), `Delta (0); (0x00CE2,0x00CE3), `Delta (0); (0x00D01,0x00D01), `Abs (0x00D01); (0x00D41,0x00D44), `Delta (0); (0x00D4D,0x00D4D), `Abs (0x00D4D); (0x00D62,0x00D63), `Delta (0); (0x00DCA,0x00DCA), `Abs (0x00DCA); (0x00DD2,0x00DD4), `Delta (0); (0x00DD6,0x00DD6), `Abs (0x00DD6); (0x00E31,0x00E31), `Abs (0x00E31); (0x00E34,0x00E3A), `Delta (0); (0x00E47,0x00E4E), `Delta (0); (0x00EB1,0x00EB1), `Abs (0x00EB1); (0x00EB4,0x00EB9), `Delta (0); (0x00EBB,0x00EBC), `Delta (0); (0x00EC8,0x00ECD), `Delta (0); (0x00F18,0x00F19), `Delta (0); (0x00F35,0x00F35), `Abs (0x00F35); (0x00F37,0x00F37), `Abs (0x00F37); (0x00F39,0x00F39), `Abs (0x00F39); (0x00F71,0x00F7E), `Delta (0); (0x00F80,0x00F84), `Delta (0); (0x00F86,0x00F87), `Delta (0); (0x00F8D,0x00F97), `Delta (0); (0x00F99,0x00FBC), `Delta (0); (0x00FC6,0x00FC6), `Abs (0x00FC6); (0x0102D,0x01030), `Delta (0); (0x01032,0x01037), `Delta (0); (0x01039,0x0103A), `Delta (0); (0x0103D,0x0103E), `Delta (0); (0x01058,0x01059), `Delta (0); (0x0105E,0x01060), `Delta (0); (0x01071,0x01074), `Delta (0); (0x01082,0x01082), `Abs (0x01082); (0x01085,0x01086), `Delta (0); (0x0108D,0x0108D), `Abs (0x0108D); (0x0109D,0x0109D), `Abs (0x0109D); (0x0135D,0x0135F), `Delta (0); (0x01712,0x01714), `Delta (0); (0x01732,0x01734), `Delta (0); (0x01752,0x01753), `Delta (0); (0x01772,0x01773), `Delta (0); (0x017B4,0x017B5), `Delta (0); (0x017B7,0x017BD), `Delta (0); (0x017C6,0x017C6), `Abs (0x017C6); (0x017C9,0x017D3), `Delta (0); (0x017DD,0x017DD), `Abs (0x017DD); (0x0180B,0x0180D), `Delta (0); (0x01885,0x01886), `Delta (0); (0x018A9,0x018A9), `Abs (0x018A9); (0x01920,0x01922), `Delta (0); (0x01927,0x01928), `Delta (0); (0x01932,0x01932), `Abs (0x01932); (0x01939,0x0193B), `Delta (0); (0x01A17,0x01A18), `Delta (0); (0x01A1B,0x01A1B), `Abs (0x01A1B); (0x01A56,0x01A56), `Abs (0x01A56); (0x01A58,0x01A5E), `Delta (0); (0x01A60,0x01A60), `Abs (0x01A60); (0x01A62,0x01A62), `Abs (0x01A62); (0x01A65,0x01A6C), `Delta (0); (0x01A73,0x01A7C), `Delta (0); (0x01A7F,0x01A7F), `Abs (0x01A7F); (0x01AB0,0x01ABD), `Delta (0); (0x01B00,0x01B03), `Delta (0); (0x01B34,0x01B34), `Abs (0x01B34); (0x01B36,0x01B3A), `Delta (0); (0x01B3C,0x01B3C), `Abs (0x01B3C); (0x01B42,0x01B42), `Abs (0x01B42); (0x01B6B,0x01B73), `Delta (0); (0x01B80,0x01B81), `Delta (0); (0x01BA2,0x01BA5), `Delta (0); (0x01BA8,0x01BA9), `Delta (0); (0x01BAB,0x01BAD), `Delta (0); (0x01BE6,0x01BE6), `Abs (0x01BE6); (0x01BE8,0x01BE9), `Delta (0); (0x01BED,0x01BED), `Abs (0x01BED); (0x01BEF,0x01BF1), `Delta (0); (0x01C2C,0x01C33), `Delta (0); (0x01C36,0x01C37), `Delta (0); (0x01CD0,0x01CD2), `Delta (0); (0x01CD4,0x01CE0), `Delta (0); (0x01CE2,0x01CE8), `Delta (0); (0x01CED,0x01CED), `Abs (0x01CED); (0x01CF4,0x01CF4), `Abs (0x01CF4); (0x01CF8,0x01CF9), `Delta (0); (0x01DC0,0x01DF5), `Delta (0); (0x01DFB,0x01DFF), `Delta (0); (0x020D0,0x020DC), `Delta (0); (0x020E1,0x020E1), `Abs (0x020E1); (0x020E5,0x020F0), `Delta (0); (0x02CEF,0x02CF1), `Delta (0); (0x02D7F,0x02D7F), `Abs (0x02D7F); (0x02DE0,0x02DFF), `Delta (0); (0x0302A,0x0302D), `Delta (0); (0x03099,0x0309A), `Delta (0); (0x0A66F,0x0A66F), `Abs (0x0A66F); (0x0A674,0x0A67D), `Delta (0); (0x0A69E,0x0A69F), `Delta (0); (0x0A6F0,0x0A6F1), `Delta (0); (0x0A802,0x0A802), `Abs (0x0A802); (0x0A806,0x0A806), `Abs (0x0A806); (0x0A80B,0x0A80B), `Abs (0x0A80B); (0x0A825,0x0A826), `Delta (0); (0x0A8C4,0x0A8C5), `Delta (0); (0x0A8E0,0x0A8F1), `Delta (0); (0x0A926,0x0A92D), `Delta (0); (0x0A947,0x0A951), `Delta (0); (0x0A980,0x0A982), `Delta (0); (0x0A9B3,0x0A9B3), `Abs (0x0A9B3); (0x0A9B6,0x0A9B9), `Delta (0); (0x0A9BC,0x0A9BC), `Abs (0x0A9BC); (0x0A9E5,0x0A9E5), `Abs (0x0A9E5); (0x0AA29,0x0AA2E), `Delta (0); (0x0AA31,0x0AA32), `Delta (0); (0x0AA35,0x0AA36), `Delta (0); (0x0AA43,0x0AA43), `Abs (0x0AA43); (0x0AA4C,0x0AA4C), `Abs (0x0AA4C); (0x0AA7C,0x0AA7C), `Abs (0x0AA7C); (0x0AAB0,0x0AAB0), `Abs (0x0AAB0); (0x0AAB2,0x0AAB4), `Delta (0); (0x0AAB7,0x0AAB8), `Delta (0); (0x0AABE,0x0AABF), `Delta (0); (0x0AAC1,0x0AAC1), `Abs (0x0AAC1); (0x0AAEC,0x0AAED), `Delta (0); (0x0AAF6,0x0AAF6), `Abs (0x0AAF6); (0x0ABE5,0x0ABE5), `Abs (0x0ABE5); (0x0ABE8,0x0ABE8), `Abs (0x0ABE8); (0x0ABED,0x0ABED), `Abs (0x0ABED); (0x0FB1E,0x0FB1E), `Abs (0x0FB1E); (0x0FE00,0x0FE0F), `Delta (0); (0x0FE20,0x0FE2F), `Delta (0); (0x101FD,0x101FD), `Abs (0x101FD); (0x102E0,0x102E0), `Abs (0x102E0); (0x10376,0x1037A), `Delta (0); (0x10A01,0x10A03), `Delta (0); (0x10A05,0x10A06), `Delta (0); (0x10A0C,0x10A0F), `Delta (0); (0x10A38,0x10A3A), `Delta (0); (0x10A3F,0x10A3F), `Abs (0x10A3F); (0x10AE5,0x10AE6), `Delta (0); (0x11001,0x11001), `Abs (0x11001); (0x11038,0x11046), `Delta (0); (0x1107F,0x11081), `Delta (0); (0x110B3,0x110B6), `Delta (0); (0x110B9,0x110BA), `Delta (0); (0x11100,0x11102), `Delta (0); (0x11127,0x1112B), `Delta (0); (0x1112D,0x11134), `Delta (0); (0x11173,0x11173), `Abs (0x11173); (0x11180,0x11181), `Delta (0); (0x111B6,0x111BE), `Delta (0); (0x111CA,0x111CC), `Delta (0); (0x1122F,0x11231), `Delta (0); (0x11234,0x11234), `Abs (0x11234); (0x11236,0x11237), `Delta (0); (0x1123E,0x1123E), `Abs (0x1123E); (0x112DF,0x112DF), `Abs (0x112DF); (0x112E3,0x112EA), `Delta (0); (0x11300,0x11301), `Delta (0); (0x1133C,0x1133C), `Abs (0x1133C); (0x11340,0x11340), `Abs (0x11340); (0x11366,0x1136C), `Delta (0); (0x11370,0x11374), `Delta (0); (0x11438,0x1143F), `Delta (0); (0x11442,0x11444), `Delta (0); (0x11446,0x11446), `Abs (0x11446); (0x114B3,0x114B8), `Delta (0); (0x114BA,0x114BA), `Abs (0x114BA); (0x114BF,0x114C0), `Delta (0); (0x114C2,0x114C3), `Delta (0); (0x115B2,0x115B5), `Delta (0); (0x115BC,0x115BD), `Delta (0); (0x115BF,0x115C0), `Delta (0); (0x115DC,0x115DD), `Delta (0); (0x11633,0x1163A), `Delta (0); (0x1163D,0x1163D), `Abs (0x1163D); (0x1163F,0x11640), `Delta (0); (0x116AB,0x116AB), `Abs (0x116AB); (0x116AD,0x116AD), `Abs (0x116AD); (0x116B0,0x116B5), `Delta (0); (0x116B7,0x116B7), `Abs (0x116B7); (0x1171D,0x1171F), `Delta (0); (0x11722,0x11725), `Delta (0); (0x11727,0x1172B), `Delta (0); (0x11C30,0x11C36), `Delta (0); (0x11C38,0x11C3D), `Delta (0); (0x11C3F,0x11C3F), `Abs (0x11C3F); (0x11C92,0x11CA7), `Delta (0); (0x11CAA,0x11CB0), `Delta (0); (0x11CB2,0x11CB3), `Delta (0); (0x11CB5,0x11CB6), `Delta (0); (0x16AF0,0x16AF4), `Delta (0); (0x16B30,0x16B36), `Delta (0); (0x16F8F,0x16F92), `Delta (0); (0x1BC9D,0x1BC9E), `Delta (0); (0x1D167,0x1D169), `Delta (0); (0x1D17B,0x1D182), `Delta (0); (0x1D185,0x1D18B), `Delta (0); (0x1D1AA,0x1D1AD), `Delta (0); (0x1D242,0x1D244), `Delta (0); (0x1DA00,0x1DA36), `Delta (0); (0x1DA3B,0x1DA6C), `Delta (0); (0x1DA75,0x1DA75), `Abs (0x1DA75); (0x1DA84,0x1DA84), `Abs (0x1DA84); (0x1DA9B,0x1DA9F), `Delta (0); (0x1DAA1,0x1DAAF), `Delta (0); (0x1E000,0x1E006), `Delta (0); (0x1E008,0x1E018), `Delta (0); (0x1E01B,0x1E021), `Delta (0); (0x1E023,0x1E024), `Delta (0); (0x1E026,0x1E02A), `Delta (0); (0x1E8D0,0x1E8D6), `Delta (0); (0x1E944,0x1E94A), `Delta (0); (0xE0100,0xE01EF), `Delta (0); (0x00903,0x00903), `Abs (0x00903); (0x0093B,0x0093B), `Abs (0x0093B); (0x0093E,0x00940), `Delta (0); (0x00949,0x0094C), `Delta (0); (0x0094E,0x0094F), `Delta (0); (0x00982,0x00983), `Delta (0); (0x009BE,0x009C0), `Delta (0); (0x009C7,0x009C8), `Delta (0); (0x009CB,0x009CC), `Delta (0); (0x009D7,0x009D7), `Abs (0x009D7); (0x00A03,0x00A03), `Abs (0x00A03); (0x00A3E,0x00A40), `Delta (0); (0x00A83,0x00A83), `Abs (0x00A83); (0x00ABE,0x00AC0), `Delta (0); (0x00AC9,0x00AC9), `Abs (0x00AC9); (0x00ACB,0x00ACC), `Delta (0); (0x00B02,0x00B03), `Delta (0); (0x00B3E,0x00B3E), `Abs (0x00B3E); (0x00B40,0x00B40), `Abs (0x00B40); (0x00B47,0x00B48), `Delta (0); (0x00B4B,0x00B4C), `Delta (0); (0x00B57,0x00B57), `Abs (0x00B57); (0x00BBE,0x00BBF), `Delta (0); (0x00BC1,0x00BC2), `Delta (0); (0x00BC6,0x00BC8), `Delta (0); (0x00BCA,0x00BCC), `Delta (0); (0x00BD7,0x00BD7), `Abs (0x00BD7); (0x00C01,0x00C03), `Delta (0); (0x00C41,0x00C44), `Delta (0); (0x00C82,0x00C83), `Delta (0); (0x00CBE,0x00CBE), `Abs (0x00CBE); (0x00CC0,0x00CC4), `Delta (0); (0x00CC7,0x00CC8), `Delta (0); (0x00CCA,0x00CCB), `Delta (0); (0x00CD5,0x00CD6), `Delta (0); (0x00D02,0x00D03), `Delta (0); (0x00D3E,0x00D40), `Delta (0); (0x00D46,0x00D48), `Delta (0); (0x00D4A,0x00D4C), `Delta (0); (0x00D57,0x00D57), `Abs (0x00D57); (0x00D82,0x00D83), `Delta (0); (0x00DCF,0x00DD1), `Delta (0); (0x00DD8,0x00DDF), `Delta (0); (0x00DF2,0x00DF3), `Delta (0); (0x00F3E,0x00F3F), `Delta (0); (0x00F7F,0x00F7F), `Abs (0x00F7F); (0x0102B,0x0102C), `Delta (0); (0x01031,0x01031), `Abs (0x01031); (0x01038,0x01038), `Abs (0x01038); (0x0103B,0x0103C), `Delta (0); (0x01056,0x01057), `Delta (0); (0x01062,0x01064), `Delta (0); (0x01067,0x0106D), `Delta (0); (0x01083,0x01084), `Delta (0); (0x01087,0x0108C), `Delta (0); (0x0108F,0x0108F), `Abs (0x0108F); (0x0109A,0x0109C), `Delta (0); (0x017B6,0x017B6), `Abs (0x017B6); (0x017BE,0x017C5), `Delta (0); (0x017C7,0x017C8), `Delta (0); (0x01923,0x01926), `Delta (0); (0x01929,0x0192B), `Delta (0); (0x01930,0x01931), `Delta (0); (0x01933,0x01938), `Delta (0); (0x01A19,0x01A1A), `Delta (0); (0x01A55,0x01A55), `Abs (0x01A55); (0x01A57,0x01A57), `Abs (0x01A57); (0x01A61,0x01A61), `Abs (0x01A61); (0x01A63,0x01A64), `Delta (0); (0x01A6D,0x01A72), `Delta (0); (0x01B04,0x01B04), `Abs (0x01B04); (0x01B35,0x01B35), `Abs (0x01B35); (0x01B3B,0x01B3B), `Abs (0x01B3B); (0x01B3D,0x01B41), `Delta (0); (0x01B43,0x01B44), `Delta (0); (0x01B82,0x01B82), `Abs (0x01B82); (0x01BA1,0x01BA1), `Abs (0x01BA1); (0x01BA6,0x01BA7), `Delta (0); (0x01BAA,0x01BAA), `Abs (0x01BAA); (0x01BE7,0x01BE7), `Abs (0x01BE7); (0x01BEA,0x01BEC), `Delta (0); (0x01BEE,0x01BEE), `Abs (0x01BEE); (0x01BF2,0x01BF3), `Delta (0); (0x01C24,0x01C2B), `Delta (0); (0x01C34,0x01C35), `Delta (0); (0x01CE1,0x01CE1), `Abs (0x01CE1); (0x01CF2,0x01CF3), `Delta (0); (0x0302E,0x0302F), `Delta (0); (0x0A823,0x0A824), `Delta (0); (0x0A827,0x0A827), `Abs (0x0A827); (0x0A880,0x0A881), `Delta (0); (0x0A8B4,0x0A8C3), `Delta (0); (0x0A952,0x0A953), `Delta (0); (0x0A983,0x0A983), `Abs (0x0A983); (0x0A9B4,0x0A9B5), `Delta (0); (0x0A9BA,0x0A9BB), `Delta (0); (0x0A9BD,0x0A9C0), `Delta (0); (0x0AA2F,0x0AA30), `Delta (0); (0x0AA33,0x0AA34), `Delta (0); (0x0AA4D,0x0AA4D), `Abs (0x0AA4D); (0x0AA7B,0x0AA7B), `Abs (0x0AA7B); (0x0AA7D,0x0AA7D), `Abs (0x0AA7D); (0x0AAEB,0x0AAEB), `Abs (0x0AAEB); (0x0AAEE,0x0AAEF), `Delta (0); (0x0AAF5,0x0AAF5), `Abs (0x0AAF5); (0x0ABE3,0x0ABE4), `Delta (0); (0x0ABE6,0x0ABE7), `Delta (0); (0x0ABE9,0x0ABEA), `Delta (0); (0x0ABEC,0x0ABEC), `Abs (0x0ABEC); (0x11000,0x11000), `Abs (0x11000); (0x11002,0x11002), `Abs (0x11002); (0x11082,0x11082), `Abs (0x11082); (0x110B0,0x110B2), `Delta (0); (0x110B7,0x110B8), `Delta (0); (0x1112C,0x1112C), `Abs (0x1112C); (0x11182,0x11182), `Abs (0x11182); (0x111B3,0x111B5), `Delta (0); (0x111BF,0x111C0), `Delta (0); (0x1122C,0x1122E), `Delta (0); (0x11232,0x11233), `Delta (0); (0x11235,0x11235), `Abs (0x11235); (0x112E0,0x112E2), `Delta (0); (0x11302,0x11303), `Delta (0); (0x1133E,0x1133F), `Delta (0); (0x11341,0x11344), `Delta (0); (0x11347,0x11348), `Delta (0); (0x1134B,0x1134D), `Delta (0); (0x11357,0x11357), `Abs (0x11357); (0x11362,0x11363), `Delta (0); (0x11435,0x11437), `Delta (0); (0x11440,0x11441), `Delta (0); (0x11445,0x11445), `Abs (0x11445); (0x114B0,0x114B2), `Delta (0); (0x114B9,0x114B9), `Abs (0x114B9); (0x114BB,0x114BE), `Delta (0); (0x114C1,0x114C1), `Abs (0x114C1); (0x115AF,0x115B1), `Delta (0); (0x115B8,0x115BB), `Delta (0); (0x115BE,0x115BE), `Abs (0x115BE); (0x11630,0x11632), `Delta (0); (0x1163B,0x1163C), `Delta (0); (0x1163E,0x1163E), `Abs (0x1163E); (0x116AC,0x116AC), `Abs (0x116AC); (0x116AE,0x116AF), `Delta (0); (0x116B6,0x116B6), `Abs (0x116B6); (0x11720,0x11721), `Delta (0); (0x11726,0x11726), `Abs (0x11726); (0x11C2F,0x11C2F), `Abs (0x11C2F); (0x11C3E,0x11C3E), `Abs (0x11C3E); (0x11CA9,0x11CA9), `Abs (0x11CA9); (0x11CB1,0x11CB1), `Abs (0x11CB1); (0x11CB4,0x11CB4), `Abs (0x11CB4); (0x16F51,0x16F7E), `Delta (0); (0x1D165,0x1D166), `Delta (0); (0x1D16D,0x1D172), `Delta (0); (0x00488,0x00489), `Delta (0); (0x01ABE,0x01ABE), `Abs (0x01ABE); (0x020DD,0x020E0), `Delta (0); (0x020E2,0x020E4), `Delta (0); (0x0A670,0x0A672), `Delta (0); (0x00030,0x00039), `Delta (0); (0x00660,0x00669), `Delta (0); (0x006F0,0x006F9), `Delta (0); (0x007C0,0x007C9), `Delta (0); (0x00966,0x0096F), `Delta (0); (0x009E6,0x009EF), `Delta (0); (0x00A66,0x00A6F), `Delta (0); (0x00AE6,0x00AEF), `Delta (0); (0x00B66,0x00B6F), `Delta (0); (0x00BE6,0x00BEF), `Delta (0); (0x00C66,0x00C6F), `Delta (0); (0x00CE6,0x00CEF), `Delta (0); (0x00D66,0x00D6F), `Delta (0); (0x00DE6,0x00DEF), `Delta (0); (0x00E50,0x00E59), `Delta (0); (0x00ED0,0x00ED9), `Delta (0); (0x00F20,0x00F29), `Delta (0); (0x01040,0x01049), `Delta (0); (0x01090,0x01099), `Delta (0); (0x017E0,0x017E9), `Delta (0); (0x01810,0x01819), `Delta (0); (0x01946,0x0194F), `Delta (0); (0x019D0,0x019D9), `Delta (0); (0x01A80,0x01A89), `Delta (0); (0x01A90,0x01A99), `Delta (0); (0x01B50,0x01B59), `Delta (0); (0x01BB0,0x01BB9), `Delta (0); (0x01C40,0x01C49), `Delta (0); (0x01C50,0x01C59), `Delta (0); (0x0A620,0x0A629), `Delta (0); (0x0A8D0,0x0A8D9), `Delta (0); (0x0A900,0x0A909), `Delta (0); (0x0A9D0,0x0A9D9), `Delta (0); (0x0A9F0,0x0A9F9), `Delta (0); (0x0AA50,0x0AA59), `Delta (0); (0x0ABF0,0x0ABF9), `Delta (0); (0x0FF10,0x0FF19), `Delta (0); (0x104A0,0x104A9), `Delta (0); (0x11066,0x1106F), `Delta (0); (0x110F0,0x110F9), `Delta (0); (0x11136,0x1113F), `Delta (0); (0x111D0,0x111D9), `Delta (0); (0x112F0,0x112F9), `Delta (0); (0x11450,0x11459), `Delta (0); (0x114D0,0x114D9), `Delta (0); (0x11650,0x11659), `Delta (0); (0x116C0,0x116C9), `Delta (0); (0x11730,0x11739), `Delta (0); (0x118E0,0x118E9), `Delta (0); (0x11C50,0x11C59), `Delta (0); (0x16A60,0x16A69), `Delta (0); (0x16B50,0x16B59), `Delta (0); (0x1D7CE,0x1D7FF), `Delta (0); (0x1E950,0x1E959), `Delta (0); (0x016EE,0x016F0), `Delta (0); (0x02160,0x0216F), `Delta (16); (0x02170,0x02182), `Delta (0); (0x02185,0x02188), `Delta (0); (0x03007,0x03007), `Abs (0x03007); (0x03021,0x03029), `Delta (0); (0x03038,0x0303A), `Delta (0); (0x0A6E6,0x0A6EF), `Delta (0); (0x10140,0x10174), `Delta (0); (0x10341,0x10341), `Abs (0x10341); (0x1034A,0x1034A), `Abs (0x1034A); (0x103D1,0x103D5), `Delta (0); (0x12400,0x1246E), `Delta (0); (0x000B2,0x000B3), `Delta (0); (0x000B9,0x000B9), `Abs (0x000B9); (0x000BC,0x000BE), `Delta (0); (0x009F4,0x009F9), `Delta (0); (0x00B72,0x00B77), `Delta (0); (0x00BF0,0x00BF2), `Delta (0); (0x00C78,0x00C7E), `Delta (0); (0x00D58,0x00D5E), `Delta (0); (0x00D70,0x00D78), `Delta (0); (0x00F2A,0x00F33), `Delta (0); (0x01369,0x0137C), `Delta (0); (0x017F0,0x017F9), `Delta (0); (0x019DA,0x019DA), `Abs (0x019DA); (0x02070,0x02070), `Abs (0x02070); (0x02074,0x02079), `Delta (0); (0x02080,0x02089), `Delta (0); (0x02150,0x0215F), `Delta (0); (0x02189,0x02189), `Abs (0x02189); (0x02460,0x0249B), `Delta (0); (0x024EA,0x024FF), `Delta (0); (0x02776,0x02793), `Delta (0); (0x02CFD,0x02CFD), `Abs (0x02CFD); (0x03192,0x03195), `Delta (0); (0x03220,0x03229), `Delta (0); (0x03248,0x0324F), `Delta (0); (0x03251,0x0325F), `Delta (0); (0x03280,0x03289), `Delta (0); (0x032B1,0x032BF), `Delta (0); (0x0A830,0x0A835), `Delta (0); (0x10107,0x10133), `Delta (0); (0x10175,0x10178), `Delta (0); (0x1018A,0x1018B), `Delta (0); (0x102E1,0x102FB), `Delta (0); (0x10320,0x10323), `Delta (0); (0x10858,0x1085F), `Delta (0); (0x10879,0x1087F), `Delta (0); (0x108A7,0x108AF), `Delta (0); (0x108FB,0x108FF), `Delta (0); (0x10916,0x1091B), `Delta (0); (0x109BC,0x109BD), `Delta (0); (0x109C0,0x109CF), `Delta (0); (0x109D2,0x109FF), `Delta (0); (0x10A40,0x10A47), `Delta (0); (0x10A7D,0x10A7E), `Delta (0); (0x10A9D,0x10A9F), `Delta (0); (0x10AEB,0x10AEF), `Delta (0); (0x10B58,0x10B5F), `Delta (0); (0x10B78,0x10B7F), `Delta (0); (0x10BA9,0x10BAF), `Delta (0); (0x10CFA,0x10CFF), `Delta (0); (0x10E60,0x10E7E), `Delta (0); (0x11052,0x11065), `Delta (0); (0x111E1,0x111F4), `Delta (0); (0x1173A,0x1173B), `Delta (0); (0x118EA,0x118F2), `Delta (0); (0x11C5A,0x11C6C), `Delta (0); (0x16B5B,0x16B61), `Delta (0); (0x1D360,0x1D371), `Delta (0); (0x1E8C7,0x1E8CF), `Delta (0); (0x1F100,0x1F10C), `Delta (0); (0x00020,0x00020), `Abs (0x00020); (0x000A0,0x000A0), `Abs (0x000A0); (0x01680,0x01680), `Abs (0x01680); (0x02000,0x0200A), `Delta (0); (0x0202F,0x0202F), `Abs (0x0202F); (0x0205F,0x0205F), `Abs (0x0205F); (0x03000,0x03000), `Abs (0x03000); (0x02028,0x02029), `Delta (0); (0x00001,0x0001F), `Delta (0); (0x0007F,0x0009F), `Delta (0); (0x000AD,0x000AD), `Abs (0x000AD); (0x00600,0x00605), `Delta (0); (0x0061C,0x0061C), `Abs (0x0061C); (0x006DD,0x006DD), `Abs (0x006DD); (0x0070F,0x0070F), `Abs (0x0070F); (0x008E2,0x008E2), `Abs (0x008E2); (0x0180E,0x0180E), `Abs (0x0180E); (0x0200B,0x0200F), `Delta (0); (0x0202A,0x0202E), `Delta (0); (0x02060,0x02064), `Delta (0); (0x02066,0x0206F), `Delta (0); (0x0FEFF,0x0FEFF), `Abs (0x0FEFF); (0x0FFF9,0x0FFFB), `Delta (0); (0x110BD,0x110BD), `Abs (0x110BD); (0x1BCA0,0x1BCA3), `Delta (0); (0x1D173,0x1D17A), `Delta (0); (0xE0001,0xE0001), `Abs (0xE0001); (0xE0020,0xE007F), `Delta (0); (0x0D800,0x0F8FF), `Delta (0); (0xF0000,0xFFFFD), `Delta (0); (0x100000,0x10FFFD), `Delta (0); (0x00378,0x00379), `Delta (0); (0x00380,0x00383), `Delta (0); (0x0038B,0x0038B), `Abs (0x0038B); (0x0038D,0x0038D), `Abs (0x0038D); (0x003A2,0x003A2), `Abs (0x003A2); (0x00530,0x00530), `Abs (0x00530); (0x00557,0x00558), `Delta (0); (0x00560,0x00560), `Abs (0x00560); (0x00588,0x00588), `Abs (0x00588); (0x0058B,0x0058C), `Delta (0); (0x00590,0x00590), `Abs (0x00590); (0x005C8,0x005CF), `Delta (0); (0x005EB,0x005EF), `Delta (0); (0x005F5,0x005FF), `Delta (0); (0x0061D,0x0061D), `Abs (0x0061D); (0x0070E,0x0070E), `Abs (0x0070E); (0x0074B,0x0074C), `Delta (0); (0x007B2,0x007BF), `Delta (0); (0x007FB,0x007FF), `Delta (0); (0x0082E,0x0082F), `Delta (0); (0x0083F,0x0083F), `Abs (0x0083F); (0x0085C,0x0085D), `Delta (0); (0x0085F,0x0089F), `Delta (0); (0x008B5,0x008B5), `Abs (0x008B5); (0x008BE,0x008D3), `Delta (0); (0x00984,0x00984), `Abs (0x00984); (0x0098D,0x0098E), `Delta (0); (0x00991,0x00992), `Delta (0); (0x009A9,0x009A9), `Abs (0x009A9); (0x009B1,0x009B1), `Abs (0x009B1); (0x009B3,0x009B5), `Delta (0); (0x009BA,0x009BB), `Delta (0); (0x009C5,0x009C6), `Delta (0); (0x009C9,0x009CA), `Delta (0); (0x009CF,0x009D6), `Delta (0); (0x009D8,0x009DB), `Delta (0); (0x009DE,0x009DE), `Abs (0x009DE); (0x009E4,0x009E5), `Delta (0); (0x009FC,0x00A00), `Delta (0); (0x00A04,0x00A04), `Abs (0x00A04); (0x00A0B,0x00A0E), `Delta (0); (0x00A11,0x00A12), `Delta (0); (0x00A29,0x00A29), `Abs (0x00A29); (0x00A31,0x00A31), `Abs (0x00A31); (0x00A34,0x00A34), `Abs (0x00A34); (0x00A37,0x00A37), `Abs (0x00A37); (0x00A3A,0x00A3B), `Delta (0); (0x00A3D,0x00A3D), `Abs (0x00A3D); (0x00A43,0x00A46), `Delta (0); (0x00A49,0x00A4A), `Delta (0); (0x00A4E,0x00A50), `Delta (0); (0x00A52,0x00A58), `Delta (0); (0x00A5D,0x00A5D), `Abs (0x00A5D); (0x00A5F,0x00A65), `Delta (0); (0x00A76,0x00A80), `Delta (0); (0x00A84,0x00A84), `Abs (0x00A84); (0x00A8E,0x00A8E), `Abs (0x00A8E); (0x00A92,0x00A92), `Abs (0x00A92); (0x00AA9,0x00AA9), `Abs (0x00AA9); (0x00AB1,0x00AB1), `Abs (0x00AB1); (0x00AB4,0x00AB4), `Abs (0x00AB4); (0x00ABA,0x00ABB), `Delta (0); (0x00AC6,0x00AC6), `Abs (0x00AC6); (0x00ACA,0x00ACA), `Abs (0x00ACA); (0x00ACE,0x00ACF), `Delta (0); (0x00AD1,0x00ADF), `Delta (0); (0x00AE4,0x00AE5), `Delta (0); (0x00AF2,0x00AF8), `Delta (0); (0x00AFA,0x00B00), `Delta (0); (0x00B04,0x00B04), `Abs (0x00B04); (0x00B0D,0x00B0E), `Delta (0); (0x00B11,0x00B12), `Delta (0); (0x00B29,0x00B29), `Abs (0x00B29); (0x00B31,0x00B31), `Abs (0x00B31); (0x00B34,0x00B34), `Abs (0x00B34); (0x00B3A,0x00B3B), `Delta (0); (0x00B45,0x00B46), `Delta (0); (0x00B49,0x00B4A), `Delta (0); (0x00B4E,0x00B55), `Delta (0); (0x00B58,0x00B5B), `Delta (0); (0x00B5E,0x00B5E), `Abs (0x00B5E); (0x00B64,0x00B65), `Delta (0); (0x00B78,0x00B81), `Delta (0); (0x00B84,0x00B84), `Abs (0x00B84); (0x00B8B,0x00B8D), `Delta (0); (0x00B91,0x00B91), `Abs (0x00B91); (0x00B96,0x00B98), `Delta (0); (0x00B9B,0x00B9B), `Abs (0x00B9B); (0x00B9D,0x00B9D), `Abs (0x00B9D); (0x00BA0,0x00BA2), `Delta (0); (0x00BA5,0x00BA7), `Delta (0); (0x00BAB,0x00BAD), `Delta (0); (0x00BBA,0x00BBD), `Delta (0); (0x00BC3,0x00BC5), `Delta (0); (0x00BC9,0x00BC9), `Abs (0x00BC9); (0x00BCE,0x00BCF), `Delta (0); (0x00BD1,0x00BD6), `Delta (0); (0x00BD8,0x00BE5), `Delta (0); (0x00BFB,0x00BFF), `Delta (0); (0x00C04,0x00C04), `Abs (0x00C04); (0x00C0D,0x00C0D), `Abs (0x00C0D); (0x00C11,0x00C11), `Abs (0x00C11); (0x00C29,0x00C29), `Abs (0x00C29); (0x00C3A,0x00C3C), `Delta (0); (0x00C45,0x00C45), `Abs (0x00C45); (0x00C49,0x00C49), `Abs (0x00C49); (0x00C4E,0x00C54), `Delta (0); (0x00C57,0x00C57), `Abs (0x00C57); (0x00C5B,0x00C5F), `Delta (0); (0x00C64,0x00C65), `Delta (0); (0x00C70,0x00C77), `Delta (0); (0x00C84,0x00C84), `Abs (0x00C84); (0x00C8D,0x00C8D), `Abs (0x00C8D); (0x00C91,0x00C91), `Abs (0x00C91); (0x00CA9,0x00CA9), `Abs (0x00CA9); (0x00CB4,0x00CB4), `Abs (0x00CB4); (0x00CBA,0x00CBB), `Delta (0); (0x00CC5,0x00CC5), `Abs (0x00CC5); (0x00CC9,0x00CC9), `Abs (0x00CC9); (0x00CCE,0x00CD4), `Delta (0); (0x00CD7,0x00CDD), `Delta (0); (0x00CDF,0x00CDF), `Abs (0x00CDF); (0x00CE4,0x00CE5), `Delta (0); (0x00CF0,0x00CF0), `Abs (0x00CF0); (0x00CF3,0x00D00), `Delta (0); (0x00D04,0x00D04), `Abs (0x00D04); (0x00D0D,0x00D0D), `Abs (0x00D0D); (0x00D11,0x00D11), `Abs (0x00D11); (0x00D3B,0x00D3C), `Delta (0); (0x00D45,0x00D45), `Abs (0x00D45); (0x00D49,0x00D49), `Abs (0x00D49); (0x00D50,0x00D53), `Delta (0); (0x00D64,0x00D65), `Delta (0); (0x00D80,0x00D81), `Delta (0); (0x00D84,0x00D84), `Abs (0x00D84); (0x00D97,0x00D99), `Delta (0); (0x00DB2,0x00DB2), `Abs (0x00DB2); (0x00DBC,0x00DBC), `Abs (0x00DBC); (0x00DBE,0x00DBF), `Delta (0); (0x00DC7,0x00DC9), `Delta (0); (0x00DCB,0x00DCE), `Delta (0); (0x00DD5,0x00DD5), `Abs (0x00DD5); (0x00DD7,0x00DD7), `Abs (0x00DD7); (0x00DE0,0x00DE5), `Delta (0); (0x00DF0,0x00DF1), `Delta (0); (0x00DF5,0x00E00), `Delta (0); (0x00E3B,0x00E3E), `Delta (0); (0x00E5C,0x00E80), `Delta (0); (0x00E83,0x00E83), `Abs (0x00E83); (0x00E85,0x00E86), `Delta (0); (0x00E89,0x00E89), `Abs (0x00E89); (0x00E8B,0x00E8C), `Delta (0); (0x00E8E,0x00E93), `Delta (0); (0x00E98,0x00E98), `Abs (0x00E98); (0x00EA0,0x00EA0), `Abs (0x00EA0); (0x00EA4,0x00EA4), `Abs (0x00EA4); (0x00EA6,0x00EA6), `Abs (0x00EA6); (0x00EA8,0x00EA9), `Delta (0); (0x00EAC,0x00EAC), `Abs (0x00EAC); (0x00EBA,0x00EBA), `Abs (0x00EBA); (0x00EBE,0x00EBF), `Delta (0); (0x00EC5,0x00EC5), `Abs (0x00EC5); (0x00EC7,0x00EC7), `Abs (0x00EC7); (0x00ECE,0x00ECF), `Delta (0); (0x00EDA,0x00EDB), `Delta (0); (0x00EE0,0x00EFF), `Delta (0); (0x00F48,0x00F48), `Abs (0x00F48); (0x00F6D,0x00F70), `Delta (0); (0x00F98,0x00F98), `Abs (0x00F98); (0x00FBD,0x00FBD), `Abs (0x00FBD); (0x00FCD,0x00FCD), `Abs (0x00FCD); (0x00FDB,0x00FFF), `Delta (0); (0x010C6,0x010C6), `Abs (0x010C6); (0x010C8,0x010CC), `Delta (0); (0x010CE,0x010CF), `Delta (0); (0x01249,0x01249), `Abs (0x01249); (0x0124E,0x0124F), `Delta (0); (0x01257,0x01257), `Abs (0x01257); (0x01259,0x01259), `Abs (0x01259); (0x0125E,0x0125F), `Delta (0); (0x01289,0x01289), `Abs (0x01289); (0x0128E,0x0128F), `Delta (0); (0x012B1,0x012B1), `Abs (0x012B1); (0x012B6,0x012B7), `Delta (0); (0x012BF,0x012BF), `Abs (0x012BF); (0x012C1,0x012C1), `Abs (0x012C1); (0x012C6,0x012C7), `Delta (0); (0x012D7,0x012D7), `Abs (0x012D7); (0x01311,0x01311), `Abs (0x01311); (0x01316,0x01317), `Delta (0); (0x0135B,0x0135C), `Delta (0); (0x0137D,0x0137F), `Delta (0); (0x0139A,0x0139F), `Delta (0); (0x013F6,0x013F7), `Delta (0); (0x013FE,0x013FF), `Delta (0); (0x0169D,0x0169F), `Delta (0); (0x016F9,0x016FF), `Delta (0); (0x0170D,0x0170D), `Abs (0x0170D); (0x01715,0x0171F), `Delta (0); (0x01737,0x0173F), `Delta (0); (0x01754,0x0175F), `Delta (0); (0x0176D,0x0176D), `Abs (0x0176D); (0x01771,0x01771), `Abs (0x01771); (0x01774,0x0177F), `Delta (0); (0x017DE,0x017DF), `Delta (0); (0x017EA,0x017EF), `Delta (0); (0x017FA,0x017FF), `Delta (0); (0x0180F,0x0180F), `Abs (0x0180F); (0x0181A,0x0181F), `Delta (0); (0x01878,0x0187F), `Delta (0); (0x018AB,0x018AF), `Delta (0); (0x018F6,0x018FF), `Delta (0); (0x0191F,0x0191F), `Abs (0x0191F); (0x0192C,0x0192F), `Delta (0); (0x0193C,0x0193F), `Delta (0); (0x01941,0x01943), `Delta (0); (0x0196E,0x0196F), `Delta (0); (0x01975,0x0197F), `Delta (0); (0x019AC,0x019AF), `Delta (0); (0x019CA,0x019CF), `Delta (0); (0x019DB,0x019DD), `Delta (0); (0x01A1C,0x01A1D), `Delta (0); (0x01A5F,0x01A5F), `Abs (0x01A5F); (0x01A7D,0x01A7E), `Delta (0); (0x01A8A,0x01A8F), `Delta (0); (0x01A9A,0x01A9F), `Delta (0); (0x01AAE,0x01AAF), `Delta (0); (0x01ABF,0x01AFF), `Delta (0); (0x01B4C,0x01B4F), `Delta (0); (0x01B7D,0x01B7F), `Delta (0); (0x01BF4,0x01BFB), `Delta (0); (0x01C38,0x01C3A), `Delta (0); (0x01C4A,0x01C4C), `Delta (0); (0x01C89,0x01CBF), `Delta (0); (0x01CC8,0x01CCF), `Delta (0); (0x01CF7,0x01CF7), `Abs (0x01CF7); (0x01CFA,0x01CFF), `Delta (0); (0x01DF6,0x01DFA), `Delta (0); (0x01F16,0x01F17), `Delta (0); (0x01F1E,0x01F1F), `Delta (0); (0x01F46,0x01F47), `Delta (0); (0x01F4E,0x01F4F), `Delta (0); (0x01F58,0x01F58), `Abs (0x01F58); (0x01F5A,0x01F5A), `Abs (0x01F5A); (0x01F5C,0x01F5C), `Abs (0x01F5C); (0x01F5E,0x01F5E), `Abs (0x01F5E); (0x01F7E,0x01F7F), `Delta (0); (0x01FB5,0x01FB5), `Abs (0x01FB5); (0x01FC5,0x01FC5), `Abs (0x01FC5); (0x01FD4,0x01FD5), `Delta (0); (0x01FDC,0x01FDC), `Abs (0x01FDC); (0x01FF0,0x01FF1), `Delta (0); (0x01FF5,0x01FF5), `Abs (0x01FF5); (0x01FFF,0x01FFF), `Abs (0x01FFF); (0x02065,0x02065), `Abs (0x02065); (0x02072,0x02073), `Delta (0); (0x0208F,0x0208F), `Abs (0x0208F); (0x0209D,0x0209F), `Delta (0); (0x020BF,0x020CF), `Delta (0); (0x020F1,0x020FF), `Delta (0); (0x0218C,0x0218F), `Delta (0); (0x023FF,0x023FF), `Abs (0x023FF); (0x02427,0x0243F), `Delta (0); (0x0244B,0x0245F), `Delta (0); (0x02B74,0x02B75), `Delta (0); (0x02B96,0x02B97), `Delta (0); (0x02BBA,0x02BBC), `Delta (0); (0x02BC9,0x02BC9), `Abs (0x02BC9); (0x02BD2,0x02BEB), `Delta (0); (0x02BF0,0x02BFF), `Delta (0); (0x02C2F,0x02C2F), `Abs (0x02C2F); (0x02C5F,0x02C5F), `Abs (0x02C5F); (0x02CF4,0x02CF8), `Delta (0); (0x02D26,0x02D26), `Abs (0x02D26); (0x02D28,0x02D2C), `Delta (0); (0x02D2E,0x02D2F), `Delta (0); (0x02D68,0x02D6E), `Delta (0); (0x02D71,0x02D7E), `Delta (0); (0x02D97,0x02D9F), `Delta (0); (0x02DA7,0x02DA7), `Abs (0x02DA7); (0x02DAF,0x02DAF), `Abs (0x02DAF); (0x02DB7,0x02DB7), `Abs (0x02DB7); (0x02DBF,0x02DBF), `Abs (0x02DBF); (0x02DC7,0x02DC7), `Abs (0x02DC7); (0x02DCF,0x02DCF), `Abs (0x02DCF); (0x02DD7,0x02DD7), `Abs (0x02DD7); (0x02DDF,0x02DDF), `Abs (0x02DDF); (0x02E45,0x02E7F), `Delta (0); (0x02E9A,0x02E9A), `Abs (0x02E9A); (0x02EF4,0x02EFF), `Delta (0); (0x02FD6,0x02FEF), `Delta (0); (0x02FFC,0x02FFF), `Delta (0); (0x03040,0x03040), `Abs (0x03040); (0x03097,0x03098), `Delta (0); (0x03100,0x03104), `Delta (0); (0x0312E,0x03130), `Delta (0); (0x0318F,0x0318F), `Abs (0x0318F); (0x031BB,0x031BF), `Delta (0); (0x031E4,0x031EF), `Delta (0); (0x0321F,0x0321F), `Abs (0x0321F); (0x032FF,0x032FF), `Abs (0x032FF); (0x04DB6,0x04DBF), `Delta (0); (0x09FD6,0x09FFF), `Delta (0); (0x0A48D,0x0A48F), `Delta (0); (0x0A4C7,0x0A4CF), `Delta (0); (0x0A62C,0x0A63F), `Delta (0); (0x0A6F8,0x0A6FF), `Delta (0); (0x0A7AF,0x0A7AF), `Abs (0x0A7AF); (0x0A7B8,0x0A7F6), `Delta (0); (0x0A82C,0x0A82F), `Delta (0); (0x0A83A,0x0A83F), `Delta (0); (0x0A878,0x0A87F), `Delta (0); (0x0A8C6,0x0A8CD), `Delta (0); (0x0A8DA,0x0A8DF), `Delta (0); (0x0A8FE,0x0A8FF), `Delta (0); (0x0A954,0x0A95E), `Delta (0); (0x0A97D,0x0A97F), `Delta (0); (0x0A9CE,0x0A9CE), `Abs (0x0A9CE); (0x0A9DA,0x0A9DD), `Delta (0); (0x0A9FF,0x0A9FF), `Abs (0x0A9FF); (0x0AA37,0x0AA3F), `Delta (0); (0x0AA4E,0x0AA4F), `Delta (0); (0x0AA5A,0x0AA5B), `Delta (0); (0x0AAC3,0x0AADA), `Delta (0); (0x0AAF7,0x0AB00), `Delta (0); (0x0AB07,0x0AB08), `Delta (0); (0x0AB0F,0x0AB10), `Delta (0); (0x0AB17,0x0AB1F), `Delta (0); (0x0AB27,0x0AB27), `Abs (0x0AB27); (0x0AB2F,0x0AB2F), `Abs (0x0AB2F); (0x0AB66,0x0AB6F), `Delta (0); (0x0ABEE,0x0ABEF), `Delta (0); (0x0ABFA,0x0ABFF), `Delta (0); (0x0D7A4,0x0D7AF), `Delta (0); (0x0D7C7,0x0D7CA), `Delta (0); (0x0D7FC,0x0D7FF), `Delta (0); (0x0FA6E,0x0FA6F), `Delta (0); (0x0FADA,0x0FAFF), `Delta (0); (0x0FB07,0x0FB12), `Delta (0); (0x0FB18,0x0FB1C), `Delta (0); (0x0FB37,0x0FB37), `Abs (0x0FB37); (0x0FB3D,0x0FB3D), `Abs (0x0FB3D); (0x0FB3F,0x0FB3F), `Abs (0x0FB3F); (0x0FB42,0x0FB42), `Abs (0x0FB42); (0x0FB45,0x0FB45), `Abs (0x0FB45); (0x0FBC2,0x0FBD2), `Delta (0); (0x0FD40,0x0FD4F), `Delta (0); (0x0FD90,0x0FD91), `Delta (0); (0x0FDC8,0x0FDEF), `Delta (0); (0x0FDFE,0x0FDFF), `Delta (0); (0x0FE1A,0x0FE1F), `Delta (0); (0x0FE53,0x0FE53), `Abs (0x0FE53); (0x0FE67,0x0FE67), `Abs (0x0FE67); (0x0FE6C,0x0FE6F), `Delta (0); (0x0FE75,0x0FE75), `Abs (0x0FE75); (0x0FEFD,0x0FEFE), `Delta (0); (0x0FF00,0x0FF00), `Abs (0x0FF00); (0x0FFBF,0x0FFC1), `Delta (0); (0x0FFC8,0x0FFC9), `Delta (0); (0x0FFD0,0x0FFD1), `Delta (0); (0x0FFD8,0x0FFD9), `Delta (0); (0x0FFDD,0x0FFDF), `Delta (0); (0x0FFE7,0x0FFE7), `Abs (0x0FFE7); (0x0FFEF,0x0FFF8), `Delta (0); (0x0FFFE,0x0FFFF), `Delta (0); (0x1000C,0x1000C), `Abs (0x1000C); (0x10027,0x10027), `Abs (0x10027); (0x1003B,0x1003B), `Abs (0x1003B); (0x1003E,0x1003E), `Abs (0x1003E); (0x1004E,0x1004F), `Delta (0); (0x1005E,0x1007F), `Delta (0); (0x100FB,0x100FF), `Delta (0); (0x10103,0x10106), `Delta (0); (0x10134,0x10136), `Delta (0); (0x1018F,0x1018F), `Abs (0x1018F); (0x1019C,0x1019F), `Delta (0); (0x101A1,0x101CF), `Delta (0); (0x101FE,0x1027F), `Delta (0); (0x1029D,0x1029F), `Delta (0); (0x102D1,0x102DF), `Delta (0); (0x102FC,0x102FF), `Delta (0); (0x10324,0x1032F), `Delta (0); (0x1034B,0x1034F), `Delta (0); (0x1037B,0x1037F), `Delta (0); (0x1039E,0x1039E), `Abs (0x1039E); (0x103C4,0x103C7), `Delta (0); (0x103D6,0x103FF), `Delta (0); (0x1049E,0x1049F), `Delta (0); (0x104AA,0x104AF), `Delta (0); (0x104D4,0x104D7), `Delta (0); (0x104FC,0x104FF), `Delta (0); (0x10528,0x1052F), `Delta (0); (0x10564,0x1056E), `Delta (0); (0x10570,0x105FF), `Delta (0); (0x10737,0x1073F), `Delta (0); (0x10756,0x1075F), `Delta (0); (0x10768,0x107FF), `Delta (0); (0x10806,0x10807), `Delta (0); (0x10809,0x10809), `Abs (0x10809); (0x10836,0x10836), `Abs (0x10836); (0x10839,0x1083B), `Delta (0); (0x1083D,0x1083E), `Delta (0); (0x10856,0x10856), `Abs (0x10856); (0x1089F,0x108A6), `Delta (0); (0x108B0,0x108DF), `Delta (0); (0x108F3,0x108F3), `Abs (0x108F3); (0x108F6,0x108FA), `Delta (0); (0x1091C,0x1091E), `Delta (0); (0x1093A,0x1093E), `Delta (0); (0x10940,0x1097F), `Delta (0); (0x109B8,0x109BB), `Delta (0); (0x109D0,0x109D1), `Delta (0); (0x10A04,0x10A04), `Abs (0x10A04); (0x10A07,0x10A0B), `Delta (0); (0x10A14,0x10A14), `Abs (0x10A14); (0x10A18,0x10A18), `Abs (0x10A18); (0x10A34,0x10A37), `Delta (0); (0x10A3B,0x10A3E), `Delta (0); (0x10A48,0x10A4F), `Delta (0); (0x10A59,0x10A5F), `Delta (0); (0x10AA0,0x10ABF), `Delta (0); (0x10AE7,0x10AEA), `Delta (0); (0x10AF7,0x10AFF), `Delta (0); (0x10B36,0x10B38), `Delta (0); (0x10B56,0x10B57), `Delta (0); (0x10B73,0x10B77), `Delta (0); (0x10B92,0x10B98), `Delta (0); (0x10B9D,0x10BA8), `Delta (0); (0x10BB0,0x10BFF), `Delta (0); (0x10C49,0x10C7F), `Delta (0); (0x10CB3,0x10CBF), `Delta (0); (0x10CF3,0x10CF9), `Delta (0); (0x10D00,0x10E5F), `Delta (0); (0x10E7F,0x10FFF), `Delta (0); (0x1104E,0x11051), `Delta (0); (0x11070,0x1107E), `Delta (0); (0x110C2,0x110CF), `Delta (0); (0x110E9,0x110EF), `Delta (0); (0x110FA,0x110FF), `Delta (0); (0x11135,0x11135), `Abs (0x11135); (0x11144,0x1114F), `Delta (0); (0x11177,0x1117F), `Delta (0); (0x111CE,0x111CF), `Delta (0); (0x111E0,0x111E0), `Abs (0x111E0); (0x111F5,0x111FF), `Delta (0); (0x11212,0x11212), `Abs (0x11212); (0x1123F,0x1127F), `Delta (0); (0x11287,0x11287), `Abs (0x11287); (0x11289,0x11289), `Abs (0x11289); (0x1128E,0x1128E), `Abs (0x1128E); (0x1129E,0x1129E), `Abs (0x1129E); (0x112AA,0x112AF), `Delta (0); (0x112EB,0x112EF), `Delta (0); (0x112FA,0x112FF), `Delta (0); (0x11304,0x11304), `Abs (0x11304); (0x1130D,0x1130E), `Delta (0); (0x11311,0x11312), `Delta (0); (0x11329,0x11329), `Abs (0x11329); (0x11331,0x11331), `Abs (0x11331); (0x11334,0x11334), `Abs (0x11334); (0x1133A,0x1133B), `Delta (0); (0x11345,0x11346), `Delta (0); (0x11349,0x1134A), `Delta (0); (0x1134E,0x1134F), `Delta (0); (0x11351,0x11356), `Delta (0); (0x11358,0x1135C), `Delta (0); (0x11364,0x11365), `Delta (0); (0x1136D,0x1136F), `Delta (0); (0x11375,0x113FF), `Delta (0); (0x1145A,0x1145A), `Abs (0x1145A); (0x1145C,0x1145C), `Abs (0x1145C); (0x1145E,0x1147F), `Delta (0); (0x114C8,0x114CF), `Delta (0); (0x114DA,0x1157F), `Delta (0); (0x115B6,0x115B7), `Delta (0); (0x115DE,0x115FF), `Delta (0); (0x11645,0x1164F), `Delta (0); (0x1165A,0x1165F), `Delta (0); (0x1166D,0x1167F), `Delta (0); (0x116B8,0x116BF), `Delta (0); (0x116CA,0x116FF), `Delta (0); (0x1171A,0x1171C), `Delta (0); (0x1172C,0x1172F), `Delta (0); (0x11740,0x1189F), `Delta (0); (0x118F3,0x118FE), `Delta (0); (0x11900,0x11ABF), `Delta (0); (0x11AF9,0x11BFF), `Delta (0); (0x11C09,0x11C09), `Abs (0x11C09); (0x11C37,0x11C37), `Abs (0x11C37); (0x11C46,0x11C4F), `Delta (0); (0x11C6D,0x11C6F), `Delta (0); (0x11C90,0x11C91), `Delta (0); (0x11CA8,0x11CA8), `Abs (0x11CA8); (0x11CB7,0x11FFF), `Delta (0); (0x1239A,0x123FF), `Delta (0); (0x1246F,0x1246F), `Abs (0x1246F); (0x12475,0x1247F), `Delta (0); (0x12544,0x12FFF), `Delta (0); (0x1342F,0x143FF), `Delta (0); (0x14647,0x167FF), `Delta (0); (0x16A39,0x16A3F), `Delta (0); (0x16A5F,0x16A5F), `Abs (0x16A5F); (0x16A6A,0x16A6D), `Delta (0); (0x16A70,0x16ACF), `Delta (0); (0x16AEE,0x16AEF), `Delta (0); (0x16AF6,0x16AFF), `Delta (0); (0x16B46,0x16B4F), `Delta (0); (0x16B5A,0x16B5A), `Abs (0x16B5A); (0x16B62,0x16B62), `Abs (0x16B62); (0x16B78,0x16B7C), `Delta (0); (0x16B90,0x16EFF), `Delta (0); (0x16F45,0x16F4F), `Delta (0); (0x16F7F,0x16F8E), `Delta (0); (0x16FA0,0x16FDF), `Delta (0); (0x16FE1,0x16FFF), `Delta (0); (0x187ED,0x187FF), `Delta (0); (0x18AF3,0x1AFFF), `Delta (0); (0x1B002,0x1BBFF), `Delta (0); (0x1BC6B,0x1BC6F), `Delta (0); (0x1BC7D,0x1BC7F), `Delta (0); (0x1BC89,0x1BC8F), `Delta (0); (0x1BC9A,0x1BC9B), `Delta (0); (0x1BCA4,0x1CFFF), `Delta (0); (0x1D0F6,0x1D0FF), `Delta (0); (0x1D127,0x1D128), `Delta (0); (0x1D1E9,0x1D1FF), `Delta (0); (0x1D246,0x1D2FF), `Delta (0); (0x1D357,0x1D35F), `Delta (0); (0x1D372,0x1D3FF), `Delta (0); (0x1D455,0x1D455), `Abs (0x1D455); (0x1D49D,0x1D49D), `Abs (0x1D49D); (0x1D4A0,0x1D4A1), `Delta (0); (0x1D4A3,0x1D4A4), `Delta (0); (0x1D4A7,0x1D4A8), `Delta (0); (0x1D4AD,0x1D4AD), `Abs (0x1D4AD); (0x1D4BA,0x1D4BA), `Abs (0x1D4BA); (0x1D4BC,0x1D4BC), `Abs (0x1D4BC); (0x1D4C4,0x1D4C4), `Abs (0x1D4C4); (0x1D506,0x1D506), `Abs (0x1D506); (0x1D50B,0x1D50C), `Delta (0); (0x1D515,0x1D515), `Abs (0x1D515); (0x1D51D,0x1D51D), `Abs (0x1D51D); (0x1D53A,0x1D53A), `Abs (0x1D53A); (0x1D53F,0x1D53F), `Abs (0x1D53F); (0x1D545,0x1D545), `Abs (0x1D545); (0x1D547,0x1D549), `Delta (0); (0x1D551,0x1D551), `Abs (0x1D551); (0x1D6A6,0x1D6A7), `Delta (0); (0x1D7CC,0x1D7CD), `Delta (0); (0x1DA8C,0x1DA9A), `Delta (0); (0x1DAA0,0x1DAA0), `Abs (0x1DAA0); (0x1DAB0,0x1DFFF), `Delta (0); (0x1E007,0x1E007), `Abs (0x1E007); (0x1E019,0x1E01A), `Delta (0); (0x1E022,0x1E022), `Abs (0x1E022); (0x1E025,0x1E025), `Abs (0x1E025); (0x1E02B,0x1E7FF), `Delta (0); (0x1E8C5,0x1E8C6), `Delta (0); (0x1E8D7,0x1E8FF), `Delta (0); (0x1E94B,0x1E94F), `Delta (0); (0x1E95A,0x1E95D), `Delta (0); (0x1E960,0x1EDFF), `Delta (0); (0x1EE04,0x1EE04), `Abs (0x1EE04); (0x1EE20,0x1EE20), `Abs (0x1EE20); (0x1EE23,0x1EE23), `Abs (0x1EE23); (0x1EE25,0x1EE26), `Delta (0); (0x1EE28,0x1EE28), `Abs (0x1EE28); (0x1EE33,0x1EE33), `Abs (0x1EE33); (0x1EE38,0x1EE38), `Abs (0x1EE38); (0x1EE3A,0x1EE3A), `Abs (0x1EE3A); (0x1EE3C,0x1EE41), `Delta (0); (0x1EE43,0x1EE46), `Delta (0); (0x1EE48,0x1EE48), `Abs (0x1EE48); (0x1EE4A,0x1EE4A), `Abs (0x1EE4A); (0x1EE4C,0x1EE4C), `Abs (0x1EE4C); (0x1EE50,0x1EE50), `Abs (0x1EE50); (0x1EE53,0x1EE53), `Abs (0x1EE53); (0x1EE55,0x1EE56), `Delta (0); (0x1EE58,0x1EE58), `Abs (0x1EE58); (0x1EE5A,0x1EE5A), `Abs (0x1EE5A); (0x1EE5C,0x1EE5C), `Abs (0x1EE5C); (0x1EE5E,0x1EE5E), `Abs (0x1EE5E); (0x1EE60,0x1EE60), `Abs (0x1EE60); (0x1EE63,0x1EE63), `Abs (0x1EE63); (0x1EE65,0x1EE66), `Delta (0); (0x1EE6B,0x1EE6B), `Abs (0x1EE6B); (0x1EE73,0x1EE73), `Abs (0x1EE73); (0x1EE78,0x1EE78), `Abs (0x1EE78); (0x1EE7D,0x1EE7D), `Abs (0x1EE7D); (0x1EE7F,0x1EE7F), `Abs (0x1EE7F); (0x1EE8A,0x1EE8A), `Abs (0x1EE8A); (0x1EE9C,0x1EEA0), `Delta (0); (0x1EEA4,0x1EEA4), `Abs (0x1EEA4); (0x1EEAA,0x1EEAA), `Abs (0x1EEAA); (0x1EEBC,0x1EEEF), `Delta (0); (0x1EEF2,0x1EFFF), `Delta (0); (0x1F02C,0x1F02F), `Delta (0); (0x1F094,0x1F09F), `Delta (0); (0x1F0AF,0x1F0B0), `Delta (0); (0x1F0C0,0x1F0C0), `Abs (0x1F0C0); (0x1F0D0,0x1F0D0), `Abs (0x1F0D0); (0x1F0F6,0x1F0FF), `Delta (0); (0x1F10D,0x1F10F), `Delta (0); (0x1F12F,0x1F12F), `Abs (0x1F12F); (0x1F16C,0x1F16F), `Delta (0); (0x1F1AD,0x1F1E5), `Delta (0); (0x1F203,0x1F20F), `Delta (0); (0x1F23C,0x1F23F), `Delta (0); (0x1F249,0x1F24F), `Delta (0); (0x1F252,0x1F2FF), `Delta (0); (0x1F6D3,0x1F6DF), `Delta (0); (0x1F6ED,0x1F6EF), `Delta (0); (0x1F6F7,0x1F6FF), `Delta (0); (0x1F774,0x1F77F), `Delta (0); (0x1F7D5,0x1F7FF), `Delta (0); (0x1F80C,0x1F80F), `Delta (0); (0x1F848,0x1F84F), `Delta (0); (0x1F85A,0x1F85F), `Delta (0); (0x1F888,0x1F88F), `Delta (0); (0x1F8AE,0x1F90F), `Delta (0); (0x1F91F,0x1F91F), `Abs (0x1F91F); (0x1F928,0x1F92F), `Delta (0); (0x1F931,0x1F932), `Delta (0); (0x1F93F,0x1F93F), `Abs (0x1F93F); (0x1F94C,0x1F94F), `Delta (0); (0x1F95F,0x1F97F), `Delta (0); (0x1F992,0x1F9BF), `Delta (0); (0x1F9C1,0x1FFFF), `Delta (0); (0x2A6D7,0x2A6FF), `Delta (0); (0x2B735,0x2B73F), `Delta (0); (0x2B81E,0x2B81F), `Delta (0); (0x2CEA2,0x2F7FF), `Delta (0); (0x2FA1E,0xE0000), `Delta (0); (0xE0002,0xE001F), `Delta (0); (0xE0080,0xE00FF), `Delta (0); (0xE01F0,0xEFFFF), `Delta (0); (0xFFFFE,0xFFFFF), `Delta (0); (0x10FFFE,0x10FFFF), `Delta (0); (0x002B0,0x002C1), `Delta (0); (0x002C6,0x002D1), `Delta (0); (0x002E0,0x002E4), `Delta (0); (0x002EC,0x002EC), `Abs (0x002EC); (0x002EE,0x002EE), `Abs (0x002EE); (0x00374,0x00374), `Abs (0x00374); (0x0037A,0x0037A), `Abs (0x0037A); (0x00559,0x00559), `Abs (0x00559); (0x00640,0x00640), `Abs (0x00640); (0x006E5,0x006E6), `Delta (0); (0x007F4,0x007F5), `Delta (0); (0x007FA,0x007FA), `Abs (0x007FA); (0x0081A,0x0081A), `Abs (0x0081A); (0x00824,0x00824), `Abs (0x00824); (0x00828,0x00828), `Abs (0x00828); (0x00971,0x00971), `Abs (0x00971); (0x00E46,0x00E46), `Abs (0x00E46); (0x00EC6,0x00EC6), `Abs (0x00EC6); (0x010FC,0x010FC), `Abs (0x010FC); (0x017D7,0x017D7), `Abs (0x017D7); (0x01843,0x01843), `Abs (0x01843); (0x01AA7,0x01AA7), `Abs (0x01AA7); (0x01C78,0x01C7D), `Delta (0); (0x01D2C,0x01D6A), `Delta (0); (0x01D78,0x01D78), `Abs (0x01D78); (0x01D9B,0x01DBF), `Delta (0); (0x02071,0x02071), `Abs (0x02071); (0x0207F,0x0207F), `Abs (0x0207F); (0x02090,0x0209C), `Delta (0); (0x02C7C,0x02C7D), `Delta (0); (0x02D6F,0x02D6F), `Abs (0x02D6F); (0x02E2F,0x02E2F), `Abs (0x02E2F); (0x03005,0x03005), `Abs (0x03005); (0x03031,0x03035), `Delta (0); (0x0303B,0x0303B), `Abs (0x0303B); (0x0309D,0x0309E), `Delta (0); (0x030FC,0x030FE), `Delta (0); (0x0A015,0x0A015), `Abs (0x0A015); (0x0A4F8,0x0A4FD), `Delta (0); (0x0A60C,0x0A60C), `Abs (0x0A60C); (0x0A67F,0x0A67F), `Abs (0x0A67F); (0x0A69C,0x0A69D), `Delta (0); (0x0A717,0x0A71F), `Delta (0); (0x0A770,0x0A770), `Abs (0x0A770); (0x0A788,0x0A788), `Abs (0x0A788); (0x0A7F8,0x0A7F9), `Delta (0); (0x0A9CF,0x0A9CF), `Abs (0x0A9CF); (0x0A9E6,0x0A9E6), `Abs (0x0A9E6); (0x0AA70,0x0AA70), `Abs (0x0AA70); (0x0AADD,0x0AADD), `Abs (0x0AADD); (0x0AAF3,0x0AAF4), `Delta (0); (0x0AB5C,0x0AB5F), `Delta (0); (0x0FF70,0x0FF70), `Abs (0x0FF70); (0x0FF9E,0x0FF9F), `Delta (0); (0x16B40,0x16B43), `Delta (0); (0x16F93,0x16F9F), `Delta (0); (0x16FE0,0x16FE0), `Abs (0x16FE0); (0x000AA,0x000AA), `Abs (0x000AA); (0x000BA,0x000BA), `Abs (0x000BA); (0x001BB,0x001BB), `Abs (0x001BB); (0x001C0,0x001C3), `Delta (0); (0x00294,0x00294), `Abs (0x00294); (0x005D0,0x005EA), `Delta (0); (0x005F0,0x005F2), `Delta (0); (0x00620,0x0063F), `Delta (0); (0x00641,0x0064A), `Delta (0); (0x0066E,0x0066F), `Delta (0); (0x00671,0x006D3), `Delta (0); (0x006D5,0x006D5), `Abs (0x006D5); (0x006EE,0x006EF), `Delta (0); (0x006FA,0x006FC), `Delta (0); (0x006FF,0x006FF), `Abs (0x006FF); (0x00710,0x00710), `Abs (0x00710); (0x00712,0x0072F), `Delta (0); (0x0074D,0x007A5), `Delta (0); (0x007B1,0x007B1), `Abs (0x007B1); (0x007CA,0x007EA), `Delta (0); (0x00800,0x00815), `Delta (0); (0x00840,0x00858), `Delta (0); (0x008A0,0x008B4), `Delta (0); (0x008B6,0x008BD), `Delta (0); (0x00904,0x00939), `Delta (0); (0x0093D,0x0093D), `Abs (0x0093D); (0x00950,0x00950), `Abs (0x00950); (0x00958,0x00961), `Delta (0); (0x00972,0x00980), `Delta (0); (0x00985,0x0098C), `Delta (0); (0x0098F,0x00990), `Delta (0); (0x00993,0x009A8), `Delta (0); (0x009AA,0x009B0), `Delta (0); (0x009B2,0x009B2), `Abs (0x009B2); (0x009B6,0x009B9), `Delta (0); (0x009BD,0x009BD), `Abs (0x009BD); (0x009CE,0x009CE), `Abs (0x009CE); (0x009DC,0x009DD), `Delta (0); (0x009DF,0x009E1), `Delta (0); (0x009F0,0x009F1), `Delta (0); (0x00A05,0x00A0A), `Delta (0); (0x00A0F,0x00A10), `Delta (0); (0x00A13,0x00A28), `Delta (0); (0x00A2A,0x00A30), `Delta (0); (0x00A32,0x00A33), `Delta (0); (0x00A35,0x00A36), `Delta (0); (0x00A38,0x00A39), `Delta (0); (0x00A59,0x00A5C), `Delta (0); (0x00A5E,0x00A5E), `Abs (0x00A5E); (0x00A72,0x00A74), `Delta (0); (0x00A85,0x00A8D), `Delta (0); (0x00A8F,0x00A91), `Delta (0); (0x00A93,0x00AA8), `Delta (0); (0x00AAA,0x00AB0), `Delta (0); (0x00AB2,0x00AB3), `Delta (0); (0x00AB5,0x00AB9), `Delta (0); (0x00ABD,0x00ABD), `Abs (0x00ABD); (0x00AD0,0x00AD0), `Abs (0x00AD0); (0x00AE0,0x00AE1), `Delta (0); (0x00AF9,0x00AF9), `Abs (0x00AF9); (0x00B05,0x00B0C), `Delta (0); (0x00B0F,0x00B10), `Delta (0); (0x00B13,0x00B28), `Delta (0); (0x00B2A,0x00B30), `Delta (0); (0x00B32,0x00B33), `Delta (0); (0x00B35,0x00B39), `Delta (0); (0x00B3D,0x00B3D), `Abs (0x00B3D); (0x00B5C,0x00B5D), `Delta (0); (0x00B5F,0x00B61), `Delta (0); (0x00B71,0x00B71), `Abs (0x00B71); (0x00B83,0x00B83), `Abs (0x00B83); (0x00B85,0x00B8A), `Delta (0); (0x00B8E,0x00B90), `Delta (0); (0x00B92,0x00B95), `Delta (0); (0x00B99,0x00B9A), `Delta (0); (0x00B9C,0x00B9C), `Abs (0x00B9C); (0x00B9E,0x00B9F), `Delta (0); (0x00BA3,0x00BA4), `Delta (0); (0x00BA8,0x00BAA), `Delta (0); (0x00BAE,0x00BB9), `Delta (0); (0x00BD0,0x00BD0), `Abs (0x00BD0); (0x00C05,0x00C0C), `Delta (0); (0x00C0E,0x00C10), `Delta (0); (0x00C12,0x00C28), `Delta (0); (0x00C2A,0x00C39), `Delta (0); (0x00C3D,0x00C3D), `Abs (0x00C3D); (0x00C58,0x00C5A), `Delta (0); (0x00C60,0x00C61), `Delta (0); (0x00C80,0x00C80), `Abs (0x00C80); (0x00C85,0x00C8C), `Delta (0); (0x00C8E,0x00C90), `Delta (0); (0x00C92,0x00CA8), `Delta (0); (0x00CAA,0x00CB3), `Delta (0); (0x00CB5,0x00CB9), `Delta (0); (0x00CBD,0x00CBD), `Abs (0x00CBD); (0x00CDE,0x00CDE), `Abs (0x00CDE); (0x00CE0,0x00CE1), `Delta (0); (0x00CF1,0x00CF2), `Delta (0); (0x00D05,0x00D0C), `Delta (0); (0x00D0E,0x00D10), `Delta (0); (0x00D12,0x00D3A), `Delta (0); (0x00D3D,0x00D3D), `Abs (0x00D3D); (0x00D4E,0x00D4E), `Abs (0x00D4E); (0x00D54,0x00D56), `Delta (0); (0x00D5F,0x00D61), `Delta (0); (0x00D7A,0x00D7F), `Delta (0); (0x00D85,0x00D96), `Delta (0); (0x00D9A,0x00DB1), `Delta (0); (0x00DB3,0x00DBB), `Delta (0); (0x00DBD,0x00DBD), `Abs (0x00DBD); (0x00DC0,0x00DC6), `Delta (0); (0x00E01,0x00E30), `Delta (0); (0x00E32,0x00E33), `Delta (0); (0x00E40,0x00E45), `Delta (0); (0x00E81,0x00E82), `Delta (0); (0x00E84,0x00E84), `Abs (0x00E84); (0x00E87,0x00E88), `Delta (0); (0x00E8A,0x00E8A), `Abs (0x00E8A); (0x00E8D,0x00E8D), `Abs (0x00E8D); (0x00E94,0x00E97), `Delta (0); (0x00E99,0x00E9F), `Delta (0); (0x00EA1,0x00EA3), `Delta (0); (0x00EA5,0x00EA5), `Abs (0x00EA5); (0x00EA7,0x00EA7), `Abs (0x00EA7); (0x00EAA,0x00EAB), `Delta (0); (0x00EAD,0x00EB0), `Delta (0); (0x00EB2,0x00EB3), `Delta (0); (0x00EBD,0x00EBD), `Abs (0x00EBD); (0x00EC0,0x00EC4), `Delta (0); (0x00EDC,0x00EDF), `Delta (0); (0x00F00,0x00F00), `Abs (0x00F00); (0x00F40,0x00F47), `Delta (0); (0x00F49,0x00F6C), `Delta (0); (0x00F88,0x00F8C), `Delta (0); (0x01000,0x0102A), `Delta (0); (0x0103F,0x0103F), `Abs (0x0103F); (0x01050,0x01055), `Delta (0); (0x0105A,0x0105D), `Delta (0); (0x01061,0x01061), `Abs (0x01061); (0x01065,0x01066), `Delta (0); (0x0106E,0x01070), `Delta (0); (0x01075,0x01081), `Delta (0); (0x0108E,0x0108E), `Abs (0x0108E); (0x010D0,0x010FA), `Delta (0); (0x010FD,0x01248), `Delta (0); (0x0124A,0x0124D), `Delta (0); (0x01250,0x01256), `Delta (0); (0x01258,0x01258), `Abs (0x01258); (0x0125A,0x0125D), `Delta (0); (0x01260,0x01288), `Delta (0); (0x0128A,0x0128D), `Delta (0); (0x01290,0x012B0), `Delta (0); (0x012B2,0x012B5), `Delta (0); (0x012B8,0x012BE), `Delta (0); (0x012C0,0x012C0), `Abs (0x012C0); (0x012C2,0x012C5), `Delta (0); (0x012C8,0x012D6), `Delta (0); (0x012D8,0x01310), `Delta (0); (0x01312,0x01315), `Delta (0); (0x01318,0x0135A), `Delta (0); (0x01380,0x0138F), `Delta (0); (0x01401,0x0166C), `Delta (0); (0x0166F,0x0167F), `Delta (0); (0x01681,0x0169A), `Delta (0); (0x016A0,0x016EA), `Delta (0); (0x016F1,0x016F8), `Delta (0); (0x01700,0x0170C), `Delta (0); (0x0170E,0x01711), `Delta (0); (0x01720,0x01731), `Delta (0); (0x01740,0x01751), `Delta (0); (0x01760,0x0176C), `Delta (0); (0x0176E,0x01770), `Delta (0); (0x01780,0x017B3), `Delta (0); (0x017DC,0x017DC), `Abs (0x017DC); (0x01820,0x01842), `Delta (0); (0x01844,0x01877), `Delta (0); (0x01880,0x01884), `Delta (0); (0x01887,0x018A8), `Delta (0); (0x018AA,0x018AA), `Abs (0x018AA); (0x018B0,0x018F5), `Delta (0); (0x01900,0x0191E), `Delta (0); (0x01950,0x0196D), `Delta (0); (0x01970,0x01974), `Delta (0); (0x01980,0x019AB), `Delta (0); (0x019B0,0x019C9), `Delta (0); (0x01A00,0x01A16), `Delta (0); (0x01A20,0x01A54), `Delta (0); (0x01B05,0x01B33), `Delta (0); (0x01B45,0x01B4B), `Delta (0); (0x01B83,0x01BA0), `Delta (0); (0x01BAE,0x01BAF), `Delta (0); (0x01BBA,0x01BE5), `Delta (0); (0x01C00,0x01C23), `Delta (0); (0x01C4D,0x01C4F), `Delta (0); (0x01C5A,0x01C77), `Delta (0); (0x01CE9,0x01CEC), `Delta (0); (0x01CEE,0x01CF1), `Delta (0); (0x01CF5,0x01CF6), `Delta (0); (0x02135,0x02138), `Delta (0); (0x02D30,0x02D67), `Delta (0); (0x02D80,0x02D96), `Delta (0); (0x02DA0,0x02DA6), `Delta (0); (0x02DA8,0x02DAE), `Delta (0); (0x02DB0,0x02DB6), `Delta (0); (0x02DB8,0x02DBE), `Delta (0); (0x02DC0,0x02DC6), `Delta (0); (0x02DC8,0x02DCE), `Delta (0); (0x02DD0,0x02DD6), `Delta (0); (0x02DD8,0x02DDE), `Delta (0); (0x03006,0x03006), `Abs (0x03006); (0x0303C,0x0303C), `Abs (0x0303C); (0x03041,0x03096), `Delta (0); (0x0309F,0x0309F), `Abs (0x0309F); (0x030A1,0x030FA), `Delta (0); (0x030FF,0x030FF), `Abs (0x030FF); (0x03105,0x0312D), `Delta (0); (0x03131,0x0318E), `Delta (0); (0x031A0,0x031BA), `Delta (0); (0x031F0,0x031FF), `Delta (0); (0x03400,0x04DB5), `Delta (0); (0x04E00,0x09FD5), `Delta (0); (0x0A000,0x0A014), `Delta (0); (0x0A016,0x0A48C), `Delta (0); (0x0A4D0,0x0A4F7), `Delta (0); (0x0A500,0x0A60B), `Delta (0); (0x0A610,0x0A61F), `Delta (0); (0x0A62A,0x0A62B), `Delta (0); (0x0A66E,0x0A66E), `Abs (0x0A66E); (0x0A6A0,0x0A6E5), `Delta (0); (0x0A78F,0x0A78F), `Abs (0x0A78F); (0x0A7F7,0x0A7F7), `Abs (0x0A7F7); (0x0A7FB,0x0A801), `Delta (0); (0x0A803,0x0A805), `Delta (0); (0x0A807,0x0A80A), `Delta (0); (0x0A80C,0x0A822), `Delta (0); (0x0A840,0x0A873), `Delta (0); (0x0A882,0x0A8B3), `Delta (0); (0x0A8F2,0x0A8F7), `Delta (0); (0x0A8FB,0x0A8FB), `Abs (0x0A8FB); (0x0A8FD,0x0A8FD), `Abs (0x0A8FD); (0x0A90A,0x0A925), `Delta (0); (0x0A930,0x0A946), `Delta (0); (0x0A960,0x0A97C), `Delta (0); (0x0A984,0x0A9B2), `Delta (0); (0x0A9E0,0x0A9E4), `Delta (0); (0x0A9E7,0x0A9EF), `Delta (0); (0x0A9FA,0x0A9FE), `Delta (0); (0x0AA00,0x0AA28), `Delta (0); (0x0AA40,0x0AA42), `Delta (0); (0x0AA44,0x0AA4B), `Delta (0); (0x0AA60,0x0AA6F), `Delta (0); (0x0AA71,0x0AA76), `Delta (0); (0x0AA7A,0x0AA7A), `Abs (0x0AA7A); (0x0AA7E,0x0AAAF), `Delta (0); (0x0AAB1,0x0AAB1), `Abs (0x0AAB1); (0x0AAB5,0x0AAB6), `Delta (0); (0x0AAB9,0x0AABD), `Delta (0); (0x0AAC0,0x0AAC0), `Abs (0x0AAC0); (0x0AAC2,0x0AAC2), `Abs (0x0AAC2); (0x0AADB,0x0AADC), `Delta (0); (0x0AAE0,0x0AAEA), `Delta (0); (0x0AAF2,0x0AAF2), `Abs (0x0AAF2); (0x0AB01,0x0AB06), `Delta (0); (0x0AB09,0x0AB0E), `Delta (0); (0x0AB11,0x0AB16), `Delta (0); (0x0AB20,0x0AB26), `Delta (0); (0x0AB28,0x0AB2E), `Delta (0); (0x0ABC0,0x0ABE2), `Delta (0); (0x0AC00,0x0D7A3), `Delta (0); (0x0D7B0,0x0D7C6), `Delta (0); (0x0D7CB,0x0D7FB), `Delta (0); (0x0F900,0x0FA6D), `Delta (0); (0x0FA70,0x0FAD9), `Delta (0); (0x0FB1D,0x0FB1D), `Abs (0x0FB1D); (0x0FB1F,0x0FB28), `Delta (0); (0x0FB2A,0x0FB36), `Delta (0); (0x0FB38,0x0FB3C), `Delta (0); (0x0FB3E,0x0FB3E), `Abs (0x0FB3E); (0x0FB40,0x0FB41), `Delta (0); (0x0FB43,0x0FB44), `Delta (0); (0x0FB46,0x0FBB1), `Delta (0); (0x0FBD3,0x0FD3D), `Delta (0); (0x0FD50,0x0FD8F), `Delta (0); (0x0FD92,0x0FDC7), `Delta (0); (0x0FDF0,0x0FDFB), `Delta (0); (0x0FE70,0x0FE74), `Delta (0); (0x0FE76,0x0FEFC), `Delta (0); (0x0FF66,0x0FF6F), `Delta (0); (0x0FF71,0x0FF9D), `Delta (0); (0x0FFA0,0x0FFBE), `Delta (0); (0x0FFC2,0x0FFC7), `Delta (0); (0x0FFCA,0x0FFCF), `Delta (0); (0x0FFD2,0x0FFD7), `Delta (0); (0x0FFDA,0x0FFDC), `Delta (0); (0x10000,0x1000B), `Delta (0); (0x1000D,0x10026), `Delta (0); (0x10028,0x1003A), `Delta (0); (0x1003C,0x1003D), `Delta (0); (0x1003F,0x1004D), `Delta (0); (0x10050,0x1005D), `Delta (0); (0x10080,0x100FA), `Delta (0); (0x10280,0x1029C), `Delta (0); (0x102A0,0x102D0), `Delta (0); (0x10300,0x1031F), `Delta (0); (0x10330,0x10340), `Delta (0); (0x10342,0x10349), `Delta (0); (0x10350,0x10375), `Delta (0); (0x10380,0x1039D), `Delta (0); (0x103A0,0x103C3), `Delta (0); (0x103C8,0x103CF), `Delta (0); (0x10450,0x1049D), `Delta (0); (0x10500,0x10527), `Delta (0); (0x10530,0x10563), `Delta (0); (0x10600,0x10736), `Delta (0); (0x10740,0x10755), `Delta (0); (0x10760,0x10767), `Delta (0); (0x10800,0x10805), `Delta (0); (0x10808,0x10808), `Abs (0x10808); (0x1080A,0x10835), `Delta (0); (0x10837,0x10838), `Delta (0); (0x1083C,0x1083C), `Abs (0x1083C); (0x1083F,0x10855), `Delta (0); (0x10860,0x10876), `Delta (0); (0x10880,0x1089E), `Delta (0); (0x108E0,0x108F2), `Delta (0); (0x108F4,0x108F5), `Delta (0); (0x10900,0x10915), `Delta (0); (0x10920,0x10939), `Delta (0); (0x10980,0x109B7), `Delta (0); (0x109BE,0x109BF), `Delta (0); (0x10A00,0x10A00), `Abs (0x10A00); (0x10A10,0x10A13), `Delta (0); (0x10A15,0x10A17), `Delta (0); (0x10A19,0x10A33), `Delta (0); (0x10A60,0x10A7C), `Delta (0); (0x10A80,0x10A9C), `Delta (0); (0x10AC0,0x10AC7), `Delta (0); (0x10AC9,0x10AE4), `Delta (0); (0x10B00,0x10B35), `Delta (0); (0x10B40,0x10B55), `Delta (0); (0x10B60,0x10B72), `Delta (0); (0x10B80,0x10B91), `Delta (0); (0x10C00,0x10C48), `Delta (0); (0x11003,0x11037), `Delta (0); (0x11083,0x110AF), `Delta (0); (0x110D0,0x110E8), `Delta (0); (0x11103,0x11126), `Delta (0); (0x11150,0x11172), `Delta (0); (0x11176,0x11176), `Abs (0x11176); (0x11183,0x111B2), `Delta (0); (0x111C1,0x111C4), `Delta (0); (0x111DA,0x111DA), `Abs (0x111DA); (0x111DC,0x111DC), `Abs (0x111DC); (0x11200,0x11211), `Delta (0); (0x11213,0x1122B), `Delta (0); (0x11280,0x11286), `Delta (0); (0x11288,0x11288), `Abs (0x11288); (0x1128A,0x1128D), `Delta (0); (0x1128F,0x1129D), `Delta (0); (0x1129F,0x112A8), `Delta (0); (0x112B0,0x112DE), `Delta (0); (0x11305,0x1130C), `Delta (0); (0x1130F,0x11310), `Delta (0); (0x11313,0x11328), `Delta (0); (0x1132A,0x11330), `Delta (0); (0x11332,0x11333), `Delta (0); (0x11335,0x11339), `Delta (0); (0x1133D,0x1133D), `Abs (0x1133D); (0x11350,0x11350), `Abs (0x11350); (0x1135D,0x11361), `Delta (0); (0x11400,0x11434), `Delta (0); (0x11447,0x1144A), `Delta (0); (0x11480,0x114AF), `Delta (0); (0x114C4,0x114C5), `Delta (0); (0x114C7,0x114C7), `Abs (0x114C7); (0x11580,0x115AE), `Delta (0); (0x115D8,0x115DB), `Delta (0); (0x11600,0x1162F), `Delta (0); (0x11644,0x11644), `Abs (0x11644); (0x11680,0x116AA), `Delta (0); (0x11700,0x11719), `Delta (0); (0x118FF,0x118FF), `Abs (0x118FF); (0x11AC0,0x11AF8), `Delta (0); (0x11C00,0x11C08), `Delta (0); (0x11C0A,0x11C2E), `Delta (0); (0x11C40,0x11C40), `Abs (0x11C40); (0x11C72,0x11C8F), `Delta (0); (0x12000,0x12399), `Delta (0); (0x12480,0x12543), `Delta (0); (0x13000,0x1342E), `Delta (0); (0x14400,0x14646), `Delta (0); (0x16800,0x16A38), `Delta (0); (0x16A40,0x16A5E), `Delta (0); (0x16AD0,0x16AED), `Delta (0); (0x16B00,0x16B2F), `Delta (0); (0x16B63,0x16B77), `Delta (0); (0x16B7D,0x16B8F), `Delta (0); (0x16F00,0x16F44), `Delta (0); (0x16F50,0x16F50), `Abs (0x16F50); (0x17000,0x187EC), `Delta (0); (0x18800,0x18AF2), `Delta (0); (0x1B000,0x1B001), `Delta (0); (0x1BC00,0x1BC6A), `Delta (0); (0x1BC70,0x1BC7C), `Delta (0); (0x1BC80,0x1BC88), `Delta (0); (0x1BC90,0x1BC99), `Delta (0); (0x1E800,0x1E8C4), `Delta (0); (0x1EE00,0x1EE03), `Delta (0); (0x1EE05,0x1EE1F), `Delta (0); (0x1EE21,0x1EE22), `Delta (0); (0x1EE24,0x1EE24), `Abs (0x1EE24); (0x1EE27,0x1EE27), `Abs (0x1EE27); (0x1EE29,0x1EE32), `Delta (0); (0x1EE34,0x1EE37), `Delta (0); (0x1EE39,0x1EE39), `Abs (0x1EE39); (0x1EE3B,0x1EE3B), `Abs (0x1EE3B); (0x1EE42,0x1EE42), `Abs (0x1EE42); (0x1EE47,0x1EE47), `Abs (0x1EE47); (0x1EE49,0x1EE49), `Abs (0x1EE49); (0x1EE4B,0x1EE4B), `Abs (0x1EE4B); (0x1EE4D,0x1EE4F), `Delta (0); (0x1EE51,0x1EE52), `Delta (0); (0x1EE54,0x1EE54), `Abs (0x1EE54); (0x1EE57,0x1EE57), `Abs (0x1EE57); (0x1EE59,0x1EE59), `Abs (0x1EE59); (0x1EE5B,0x1EE5B), `Abs (0x1EE5B); (0x1EE5D,0x1EE5D), `Abs (0x1EE5D); (0x1EE5F,0x1EE5F), `Abs (0x1EE5F); (0x1EE61,0x1EE62), `Delta (0); (0x1EE64,0x1EE64), `Abs (0x1EE64); (0x1EE67,0x1EE6A), `Delta (0); (0x1EE6C,0x1EE72), `Delta (0); (0x1EE74,0x1EE77), `Delta (0); (0x1EE79,0x1EE7C), `Delta (0); (0x1EE7E,0x1EE7E), `Abs (0x1EE7E); (0x1EE80,0x1EE89), `Delta (0); (0x1EE8B,0x1EE9B), `Delta (0); (0x1EEA1,0x1EEA3), `Delta (0); (0x1EEA5,0x1EEA9), `Delta (0); (0x1EEAB,0x1EEBB), `Delta (0); (0x20000,0x2A6D6), `Delta (0); (0x2A700,0x2B734), `Delta (0); (0x2B740,0x2B81D), `Delta (0); (0x2B820,0x2CEA1), `Delta (0); (0x2F800,0x2FA1D), `Delta (0); (0x0005F,0x0005F), `Abs (0x0005F); (0x0203F,0x02040), `Delta (0); (0x02054,0x02054), `Abs (0x02054); (0x0FE33,0x0FE34), `Delta (0); (0x0FE4D,0x0FE4F), `Delta (0); (0x0FF3F,0x0FF3F), `Abs (0x0FF3F); (0x0002D,0x0002D), `Abs (0x0002D); (0x0058A,0x0058A), `Abs (0x0058A); (0x005BE,0x005BE), `Abs (0x005BE); (0x01400,0x01400), `Abs (0x01400); (0x01806,0x01806), `Abs (0x01806); (0x02010,0x02015), `Delta (0); (0x02E17,0x02E17), `Abs (0x02E17); (0x02E1A,0x02E1A), `Abs (0x02E1A); (0x02E3A,0x02E3B), `Delta (0); (0x02E40,0x02E40), `Abs (0x02E40); (0x0301C,0x0301C), `Abs (0x0301C); (0x03030,0x03030), `Abs (0x03030); (0x030A0,0x030A0), `Abs (0x030A0); (0x0FE31,0x0FE32), `Delta (0); (0x0FE58,0x0FE58), `Abs (0x0FE58); (0x0FE63,0x0FE63), `Abs (0x0FE63); (0x0FF0D,0x0FF0D), `Abs (0x0FF0D); (0x00028,0x00028), `Abs (0x00028); (0x0005B,0x0005B), `Abs (0x0005B); (0x0007B,0x0007B), `Abs (0x0007B); (0x00F3A,0x00F3A), `Abs (0x00F3A); (0x00F3C,0x00F3C), `Abs (0x00F3C); (0x0169B,0x0169B), `Abs (0x0169B); (0x0201A,0x0201A), `Abs (0x0201A); (0x0201E,0x0201E), `Abs (0x0201E); (0x02045,0x02045), `Abs (0x02045); (0x0207D,0x0207D), `Abs (0x0207D); (0x0208D,0x0208D), `Abs (0x0208D); (0x02308,0x02308), `Abs (0x02308); (0x0230A,0x0230A), `Abs (0x0230A); (0x02329,0x02329), `Abs (0x02329); (0x02768,0x02768), `Abs (0x02768); (0x0276A,0x0276A), `Abs (0x0276A); (0x0276C,0x0276C), `Abs (0x0276C); (0x0276E,0x0276E), `Abs (0x0276E); (0x02770,0x02770), `Abs (0x02770); (0x02772,0x02772), `Abs (0x02772); (0x02774,0x02774), `Abs (0x02774); (0x027C5,0x027C5), `Abs (0x027C5); (0x027E6,0x027E6), `Abs (0x027E6); (0x027E8,0x027E8), `Abs (0x027E8); (0x027EA,0x027EA), `Abs (0x027EA); (0x027EC,0x027EC), `Abs (0x027EC); (0x027EE,0x027EE), `Abs (0x027EE); (0x02983,0x02983), `Abs (0x02983); (0x02985,0x02985), `Abs (0x02985); (0x02987,0x02987), `Abs (0x02987); (0x02989,0x02989), `Abs (0x02989); (0x0298B,0x0298B), `Abs (0x0298B); (0x0298D,0x0298D), `Abs (0x0298D); (0x0298F,0x0298F), `Abs (0x0298F); (0x02991,0x02991), `Abs (0x02991); (0x02993,0x02993), `Abs (0x02993); (0x02995,0x02995), `Abs (0x02995); (0x02997,0x02997), `Abs (0x02997); (0x029D8,0x029D8), `Abs (0x029D8); (0x029DA,0x029DA), `Abs (0x029DA); (0x029FC,0x029FC), `Abs (0x029FC); (0x02E22,0x02E22), `Abs (0x02E22); (0x02E24,0x02E24), `Abs (0x02E24); (0x02E26,0x02E26), `Abs (0x02E26); (0x02E28,0x02E28), `Abs (0x02E28); (0x02E42,0x02E42), `Abs (0x02E42); (0x03008,0x03008), `Abs (0x03008); (0x0300A,0x0300A), `Abs (0x0300A); (0x0300C,0x0300C), `Abs (0x0300C); (0x0300E,0x0300E), `Abs (0x0300E); (0x03010,0x03010), `Abs (0x03010); (0x03014,0x03014), `Abs (0x03014); (0x03016,0x03016), `Abs (0x03016); (0x03018,0x03018), `Abs (0x03018); (0x0301A,0x0301A), `Abs (0x0301A); (0x0301D,0x0301D), `Abs (0x0301D); (0x0FD3F,0x0FD3F), `Abs (0x0FD3F); (0x0FE17,0x0FE17), `Abs (0x0FE17); (0x0FE35,0x0FE35), `Abs (0x0FE35); (0x0FE37,0x0FE37), `Abs (0x0FE37); (0x0FE39,0x0FE39), `Abs (0x0FE39); (0x0FE3B,0x0FE3B), `Abs (0x0FE3B); (0x0FE3D,0x0FE3D), `Abs (0x0FE3D); (0x0FE3F,0x0FE3F), `Abs (0x0FE3F); (0x0FE41,0x0FE41), `Abs (0x0FE41); (0x0FE43,0x0FE43), `Abs (0x0FE43); (0x0FE47,0x0FE47), `Abs (0x0FE47); (0x0FE59,0x0FE59), `Abs (0x0FE59); (0x0FE5B,0x0FE5B), `Abs (0x0FE5B); (0x0FE5D,0x0FE5D), `Abs (0x0FE5D); (0x0FF08,0x0FF08), `Abs (0x0FF08); (0x0FF3B,0x0FF3B), `Abs (0x0FF3B); (0x0FF5B,0x0FF5B), `Abs (0x0FF5B); (0x0FF5F,0x0FF5F), `Abs (0x0FF5F); (0x0FF62,0x0FF62), `Abs (0x0FF62); (0x00029,0x00029), `Abs (0x00029); (0x0005D,0x0005D), `Abs (0x0005D); (0x0007D,0x0007D), `Abs (0x0007D); (0x00F3B,0x00F3B), `Abs (0x00F3B); (0x00F3D,0x00F3D), `Abs (0x00F3D); (0x0169C,0x0169C), `Abs (0x0169C); (0x02046,0x02046), `Abs (0x02046); (0x0207E,0x0207E), `Abs (0x0207E); (0x0208E,0x0208E), `Abs (0x0208E); (0x02309,0x02309), `Abs (0x02309); (0x0230B,0x0230B), `Abs (0x0230B); (0x0232A,0x0232A), `Abs (0x0232A); (0x02769,0x02769), `Abs (0x02769); (0x0276B,0x0276B), `Abs (0x0276B); (0x0276D,0x0276D), `Abs (0x0276D); (0x0276F,0x0276F), `Abs (0x0276F); (0x02771,0x02771), `Abs (0x02771); (0x02773,0x02773), `Abs (0x02773); (0x02775,0x02775), `Abs (0x02775); (0x027C6,0x027C6), `Abs (0x027C6); (0x027E7,0x027E7), `Abs (0x027E7); (0x027E9,0x027E9), `Abs (0x027E9); (0x027EB,0x027EB), `Abs (0x027EB); (0x027ED,0x027ED), `Abs (0x027ED); (0x027EF,0x027EF), `Abs (0x027EF); (0x02984,0x02984), `Abs (0x02984); (0x02986,0x02986), `Abs (0x02986); (0x02988,0x02988), `Abs (0x02988); (0x0298A,0x0298A), `Abs (0x0298A); (0x0298C,0x0298C), `Abs (0x0298C); (0x0298E,0x0298E), `Abs (0x0298E); (0x02990,0x02990), `Abs (0x02990); (0x02992,0x02992), `Abs (0x02992); (0x02994,0x02994), `Abs (0x02994); (0x02996,0x02996), `Abs (0x02996); (0x02998,0x02998), `Abs (0x02998); (0x029D9,0x029D9), `Abs (0x029D9); (0x029DB,0x029DB), `Abs (0x029DB); (0x029FD,0x029FD), `Abs (0x029FD); (0x02E23,0x02E23), `Abs (0x02E23); (0x02E25,0x02E25), `Abs (0x02E25); (0x02E27,0x02E27), `Abs (0x02E27); (0x02E29,0x02E29), `Abs (0x02E29); (0x03009,0x03009), `Abs (0x03009); (0x0300B,0x0300B), `Abs (0x0300B); (0x0300D,0x0300D), `Abs (0x0300D); (0x0300F,0x0300F), `Abs (0x0300F); (0x03011,0x03011), `Abs (0x03011); (0x03015,0x03015), `Abs (0x03015); (0x03017,0x03017), `Abs (0x03017); (0x03019,0x03019), `Abs (0x03019); (0x0301B,0x0301B), `Abs (0x0301B); (0x0301E,0x0301F), `Delta (0); (0x0FD3E,0x0FD3E), `Abs (0x0FD3E); (0x0FE18,0x0FE18), `Abs (0x0FE18); (0x0FE36,0x0FE36), `Abs (0x0FE36); (0x0FE38,0x0FE38), `Abs (0x0FE38); (0x0FE3A,0x0FE3A), `Abs (0x0FE3A); (0x0FE3C,0x0FE3C), `Abs (0x0FE3C); (0x0FE3E,0x0FE3E), `Abs (0x0FE3E); (0x0FE40,0x0FE40), `Abs (0x0FE40); (0x0FE42,0x0FE42), `Abs (0x0FE42); (0x0FE44,0x0FE44), `Abs (0x0FE44); (0x0FE48,0x0FE48), `Abs (0x0FE48); (0x0FE5A,0x0FE5A), `Abs (0x0FE5A); (0x0FE5C,0x0FE5C), `Abs (0x0FE5C); (0x0FE5E,0x0FE5E), `Abs (0x0FE5E); (0x0FF09,0x0FF09), `Abs (0x0FF09); (0x0FF3D,0x0FF3D), `Abs (0x0FF3D); (0x0FF5D,0x0FF5D), `Abs (0x0FF5D); (0x0FF60,0x0FF60), `Abs (0x0FF60); (0x0FF63,0x0FF63), `Abs (0x0FF63); (0x000AB,0x000AB), `Abs (0x000AB); (0x02018,0x02018), `Abs (0x02018); (0x0201B,0x0201C), `Delta (0); (0x0201F,0x0201F), `Abs (0x0201F); (0x02039,0x02039), `Abs (0x02039); (0x02E02,0x02E02), `Abs (0x02E02); (0x02E04,0x02E04), `Abs (0x02E04); (0x02E09,0x02E09), `Abs (0x02E09); (0x02E0C,0x02E0C), `Abs (0x02E0C); (0x02E1C,0x02E1C), `Abs (0x02E1C); (0x02E20,0x02E20), `Abs (0x02E20); (0x000BB,0x000BB), `Abs (0x000BB); (0x02019,0x02019), `Abs (0x02019); (0x0201D,0x0201D), `Abs (0x0201D); (0x0203A,0x0203A), `Abs (0x0203A); (0x02E03,0x02E03), `Abs (0x02E03); (0x02E05,0x02E05), `Abs (0x02E05); (0x02E0A,0x02E0A), `Abs (0x02E0A); (0x02E0D,0x02E0D), `Abs (0x02E0D); (0x02E1D,0x02E1D), `Abs (0x02E1D); (0x02E21,0x02E21), `Abs (0x02E21); (0x00021,0x00023), `Delta (0); (0x00025,0x00027), `Delta (0); (0x0002A,0x0002A), `Abs (0x0002A); (0x0002C,0x0002C), `Abs (0x0002C); (0x0002E,0x0002F), `Delta (0); (0x0003A,0x0003B), `Delta (0); (0x0003F,0x00040), `Delta (0); (0x0005C,0x0005C), `Abs (0x0005C); (0x000A1,0x000A1), `Abs (0x000A1); (0x000A7,0x000A7), `Abs (0x000A7); (0x000B6,0x000B7), `Delta (0); (0x000BF,0x000BF), `Abs (0x000BF); (0x0037E,0x0037E), `Abs (0x0037E); (0x00387,0x00387), `Abs (0x00387); (0x0055A,0x0055F), `Delta (0); (0x00589,0x00589), `Abs (0x00589); (0x005C0,0x005C0), `Abs (0x005C0); (0x005C3,0x005C3), `Abs (0x005C3); (0x005C6,0x005C6), `Abs (0x005C6); (0x005F3,0x005F4), `Delta (0); (0x00609,0x0060A), `Delta (0); (0x0060C,0x0060D), `Delta (0); (0x0061B,0x0061B), `Abs (0x0061B); (0x0061E,0x0061F), `Delta (0); (0x0066A,0x0066D), `Delta (0); (0x006D4,0x006D4), `Abs (0x006D4); (0x00700,0x0070D), `Delta (0); (0x007F7,0x007F9), `Delta (0); (0x00830,0x0083E), `Delta (0); (0x0085E,0x0085E), `Abs (0x0085E); (0x00964,0x00965), `Delta (0); (0x00970,0x00970), `Abs (0x00970); (0x00AF0,0x00AF0), `Abs (0x00AF0); (0x00DF4,0x00DF4), `Abs (0x00DF4); (0x00E4F,0x00E4F), `Abs (0x00E4F); (0x00E5A,0x00E5B), `Delta (0); (0x00F04,0x00F12), `Delta (0); (0x00F14,0x00F14), `Abs (0x00F14); (0x00F85,0x00F85), `Abs (0x00F85); (0x00FD0,0x00FD4), `Delta (0); (0x00FD9,0x00FDA), `Delta (0); (0x0104A,0x0104F), `Delta (0); (0x010FB,0x010FB), `Abs (0x010FB); (0x01360,0x01368), `Delta (0); (0x0166D,0x0166E), `Delta (0); (0x016EB,0x016ED), `Delta (0); (0x01735,0x01736), `Delta (0); (0x017D4,0x017D6), `Delta (0); (0x017D8,0x017DA), `Delta (0); (0x01800,0x01805), `Delta (0); (0x01807,0x0180A), `Delta (0); (0x01944,0x01945), `Delta (0); (0x01A1E,0x01A1F), `Delta (0); (0x01AA0,0x01AA6), `Delta (0); (0x01AA8,0x01AAD), `Delta (0); (0x01B5A,0x01B60), `Delta (0); (0x01BFC,0x01BFF), `Delta (0); (0x01C3B,0x01C3F), `Delta (0); (0x01C7E,0x01C7F), `Delta (0); (0x01CC0,0x01CC7), `Delta (0); (0x01CD3,0x01CD3), `Abs (0x01CD3); (0x02016,0x02017), `Delta (0); (0x02020,0x02027), `Delta (0); (0x02030,0x02038), `Delta (0); (0x0203B,0x0203E), `Delta (0); (0x02041,0x02043), `Delta (0); (0x02047,0x02051), `Delta (0); (0x02053,0x02053), `Abs (0x02053); (0x02055,0x0205E), `Delta (0); (0x02CF9,0x02CFC), `Delta (0); (0x02CFE,0x02CFF), `Delta (0); (0x02D70,0x02D70), `Abs (0x02D70); (0x02E00,0x02E01), `Delta (0); (0x02E06,0x02E08), `Delta (0); (0x02E0B,0x02E0B), `Abs (0x02E0B); (0x02E0E,0x02E16), `Delta (0); (0x02E18,0x02E19), `Delta (0); (0x02E1B,0x02E1B), `Abs (0x02E1B); (0x02E1E,0x02E1F), `Delta (0); (0x02E2A,0x02E2E), `Delta (0); (0x02E30,0x02E39), `Delta (0); (0x02E3C,0x02E3F), `Delta (0); (0x02E41,0x02E41), `Abs (0x02E41); (0x02E43,0x02E44), `Delta (0); (0x03001,0x03003), `Delta (0); (0x0303D,0x0303D), `Abs (0x0303D); (0x030FB,0x030FB), `Abs (0x030FB); (0x0A4FE,0x0A4FF), `Delta (0); (0x0A60D,0x0A60F), `Delta (0); (0x0A673,0x0A673), `Abs (0x0A673); (0x0A67E,0x0A67E), `Abs (0x0A67E); (0x0A6F2,0x0A6F7), `Delta (0); (0x0A874,0x0A877), `Delta (0); (0x0A8CE,0x0A8CF), `Delta (0); (0x0A8F8,0x0A8FA), `Delta (0); (0x0A8FC,0x0A8FC), `Abs (0x0A8FC); (0x0A92E,0x0A92F), `Delta (0); (0x0A95F,0x0A95F), `Abs (0x0A95F); (0x0A9C1,0x0A9CD), `Delta (0); (0x0A9DE,0x0A9DF), `Delta (0); (0x0AA5C,0x0AA5F), `Delta (0); (0x0AADE,0x0AADF), `Delta (0); (0x0AAF0,0x0AAF1), `Delta (0); (0x0ABEB,0x0ABEB), `Abs (0x0ABEB); (0x0FE10,0x0FE16), `Delta (0); (0x0FE19,0x0FE19), `Abs (0x0FE19); (0x0FE30,0x0FE30), `Abs (0x0FE30); (0x0FE45,0x0FE46), `Delta (0); (0x0FE49,0x0FE4C), `Delta (0); (0x0FE50,0x0FE52), `Delta (0); (0x0FE54,0x0FE57), `Delta (0); (0x0FE5F,0x0FE61), `Delta (0); (0x0FE68,0x0FE68), `Abs (0x0FE68); (0x0FE6A,0x0FE6B), `Delta (0); (0x0FF01,0x0FF03), `Delta (0); (0x0FF05,0x0FF07), `Delta (0); (0x0FF0A,0x0FF0A), `Abs (0x0FF0A); (0x0FF0C,0x0FF0C), `Abs (0x0FF0C); (0x0FF0E,0x0FF0F), `Delta (0); (0x0FF1A,0x0FF1B), `Delta (0); (0x0FF1F,0x0FF20), `Delta (0); (0x0FF3C,0x0FF3C), `Abs (0x0FF3C); (0x0FF61,0x0FF61), `Abs (0x0FF61); (0x0FF64,0x0FF65), `Delta (0); (0x10100,0x10102), `Delta (0); (0x1039F,0x1039F), `Abs (0x1039F); (0x103D0,0x103D0), `Abs (0x103D0); (0x1056F,0x1056F), `Abs (0x1056F); (0x10857,0x10857), `Abs (0x10857); (0x1091F,0x1091F), `Abs (0x1091F); (0x1093F,0x1093F), `Abs (0x1093F); (0x10A50,0x10A58), `Delta (0); (0x10A7F,0x10A7F), `Abs (0x10A7F); (0x10AF0,0x10AF6), `Delta (0); (0x10B39,0x10B3F), `Delta (0); (0x10B99,0x10B9C), `Delta (0); (0x11047,0x1104D), `Delta (0); (0x110BB,0x110BC), `Delta (0); (0x110BE,0x110C1), `Delta (0); (0x11140,0x11143), `Delta (0); (0x11174,0x11175), `Delta (0); (0x111C5,0x111C9), `Delta (0); (0x111CD,0x111CD), `Abs (0x111CD); (0x111DB,0x111DB), `Abs (0x111DB); (0x111DD,0x111DF), `Delta (0); (0x11238,0x1123D), `Delta (0); (0x112A9,0x112A9), `Abs (0x112A9); (0x1144B,0x1144F), `Delta (0); (0x1145B,0x1145B), `Abs (0x1145B); (0x1145D,0x1145D), `Abs (0x1145D); (0x114C6,0x114C6), `Abs (0x114C6); (0x115C1,0x115D7), `Delta (0); (0x11641,0x11643), `Delta (0); (0x11660,0x1166C), `Delta (0); (0x1173C,0x1173E), `Delta (0); (0x11C41,0x11C45), `Delta (0); (0x11C70,0x11C71), `Delta (0); (0x12470,0x12474), `Delta (0); (0x16A6E,0x16A6F), `Delta (0); (0x16AF5,0x16AF5), `Abs (0x16AF5); (0x16B37,0x16B3B), `Delta (0); (0x16B44,0x16B44), `Abs (0x16B44); (0x1BC9F,0x1BC9F), `Abs (0x1BC9F); (0x1DA87,0x1DA8B), `Delta (0); (0x1E95E,0x1E95F), `Delta (0); (0x0002B,0x0002B), `Abs (0x0002B); (0x0003C,0x0003E), `Delta (0); (0x0007C,0x0007C), `Abs (0x0007C); (0x0007E,0x0007E), `Abs (0x0007E); (0x000AC,0x000AC), `Abs (0x000AC); (0x000B1,0x000B1), `Abs (0x000B1); (0x000D7,0x000D7), `Abs (0x000D7); (0x000F7,0x000F7), `Abs (0x000F7); (0x003F6,0x003F6), `Abs (0x003F6); (0x00606,0x00608), `Delta (0); (0x02044,0x02044), `Abs (0x02044); (0x02052,0x02052), `Abs (0x02052); (0x0207A,0x0207C), `Delta (0); (0x0208A,0x0208C), `Delta (0); (0x02118,0x02118), `Abs (0x02118); (0x02140,0x02144), `Delta (0); (0x0214B,0x0214B), `Abs (0x0214B); (0x02190,0x02194), `Delta (0); (0x0219A,0x0219B), `Delta (0); (0x021A0,0x021A0), `Abs (0x021A0); (0x021A3,0x021A3), `Abs (0x021A3); (0x021A6,0x021A6), `Abs (0x021A6); (0x021AE,0x021AE), `Abs (0x021AE); (0x021CE,0x021CF), `Delta (0); (0x021D2,0x021D2), `Abs (0x021D2); (0x021D4,0x021D4), `Abs (0x021D4); (0x021F4,0x022FF), `Delta (0); (0x02320,0x02321), `Delta (0); (0x0237C,0x0237C), `Abs (0x0237C); (0x0239B,0x023B3), `Delta (0); (0x023DC,0x023E1), `Delta (0); (0x025B7,0x025B7), `Abs (0x025B7); (0x025C1,0x025C1), `Abs (0x025C1); (0x025F8,0x025FF), `Delta (0); (0x0266F,0x0266F), `Abs (0x0266F); (0x027C0,0x027C4), `Delta (0); (0x027C7,0x027E5), `Delta (0); (0x027F0,0x027FF), `Delta (0); (0x02900,0x02982), `Delta (0); (0x02999,0x029D7), `Delta (0); (0x029DC,0x029FB), `Delta (0); (0x029FE,0x02AFF), `Delta (0); (0x02B30,0x02B44), `Delta (0); (0x02B47,0x02B4C), `Delta (0); (0x0FB29,0x0FB29), `Abs (0x0FB29); (0x0FE62,0x0FE62), `Abs (0x0FE62); (0x0FE64,0x0FE66), `Delta (0); (0x0FF0B,0x0FF0B), `Abs (0x0FF0B); (0x0FF1C,0x0FF1E), `Delta (0); (0x0FF5C,0x0FF5C), `Abs (0x0FF5C); (0x0FF5E,0x0FF5E), `Abs (0x0FF5E); (0x0FFE2,0x0FFE2), `Abs (0x0FFE2); (0x0FFE9,0x0FFEC), `Delta (0); (0x1D6C1,0x1D6C1), `Abs (0x1D6C1); (0x1D6DB,0x1D6DB), `Abs (0x1D6DB); (0x1D6FB,0x1D6FB), `Abs (0x1D6FB); (0x1D715,0x1D715), `Abs (0x1D715); (0x1D735,0x1D735), `Abs (0x1D735); (0x1D74F,0x1D74F), `Abs (0x1D74F); (0x1D76F,0x1D76F), `Abs (0x1D76F); (0x1D789,0x1D789), `Abs (0x1D789); (0x1D7A9,0x1D7A9), `Abs (0x1D7A9); (0x1D7C3,0x1D7C3), `Abs (0x1D7C3); (0x1EEF0,0x1EEF1), `Delta (0); (0x00024,0x00024), `Abs (0x00024); (0x000A2,0x000A5), `Delta (0); (0x0058F,0x0058F), `Abs (0x0058F); (0x0060B,0x0060B), `Abs (0x0060B); (0x009F2,0x009F3), `Delta (0); (0x009FB,0x009FB), `Abs (0x009FB); (0x00AF1,0x00AF1), `Abs (0x00AF1); (0x00BF9,0x00BF9), `Abs (0x00BF9); (0x00E3F,0x00E3F), `Abs (0x00E3F); (0x017DB,0x017DB), `Abs (0x017DB); (0x020A0,0x020BE), `Delta (0); (0x0A838,0x0A838), `Abs (0x0A838); (0x0FDFC,0x0FDFC), `Abs (0x0FDFC); (0x0FE69,0x0FE69), `Abs (0x0FE69); (0x0FF04,0x0FF04), `Abs (0x0FF04); (0x0FFE0,0x0FFE1), `Delta (0); (0x0FFE5,0x0FFE6), `Delta (0); (0x0005E,0x0005E), `Abs (0x0005E); (0x00060,0x00060), `Abs (0x00060); (0x000A8,0x000A8), `Abs (0x000A8); (0x000AF,0x000AF), `Abs (0x000AF); (0x000B4,0x000B4), `Abs (0x000B4); (0x000B8,0x000B8), `Abs (0x000B8); (0x002C2,0x002C5), `Delta (0); (0x002D2,0x002DF), `Delta (0); (0x002E5,0x002EB), `Delta (0); (0x002ED,0x002ED), `Abs (0x002ED); (0x002EF,0x002FF), `Delta (0); (0x00375,0x00375), `Abs (0x00375); (0x00384,0x00385), `Delta (0); (0x01FBD,0x01FBD), `Abs (0x01FBD); (0x01FBF,0x01FC1), `Delta (0); (0x01FCD,0x01FCF), `Delta (0); (0x01FDD,0x01FDF), `Delta (0); (0x01FED,0x01FEF), `Delta (0); (0x01FFD,0x01FFE), `Delta (0); (0x0309B,0x0309C), `Delta (0); (0x0A700,0x0A716), `Delta (0); (0x0A720,0x0A721), `Delta (0); (0x0A789,0x0A78A), `Delta (0); (0x0AB5B,0x0AB5B), `Abs (0x0AB5B); (0x0FBB2,0x0FBC1), `Delta (0); (0x0FF3E,0x0FF3E), `Abs (0x0FF3E); (0x0FF40,0x0FF40), `Abs (0x0FF40); (0x0FFE3,0x0FFE3), `Abs (0x0FFE3); (0x1F3FB,0x1F3FF), `Delta (0); (0x000A6,0x000A6), `Abs (0x000A6); (0x000A9,0x000A9), `Abs (0x000A9); (0x000AE,0x000AE), `Abs (0x000AE); (0x000B0,0x000B0), `Abs (0x000B0); (0x00482,0x00482), `Abs (0x00482); (0x0058D,0x0058E), `Delta (0); (0x0060E,0x0060F), `Delta (0); (0x006DE,0x006DE), `Abs (0x006DE); (0x006E9,0x006E9), `Abs (0x006E9); (0x006FD,0x006FE), `Delta (0); (0x007F6,0x007F6), `Abs (0x007F6); (0x009FA,0x009FA), `Abs (0x009FA); (0x00B70,0x00B70), `Abs (0x00B70); (0x00BF3,0x00BF8), `Delta (0); (0x00BFA,0x00BFA), `Abs (0x00BFA); (0x00C7F,0x00C7F), `Abs (0x00C7F); (0x00D4F,0x00D4F), `Abs (0x00D4F); (0x00D79,0x00D79), `Abs (0x00D79); (0x00F01,0x00F03), `Delta (0); (0x00F13,0x00F13), `Abs (0x00F13); (0x00F15,0x00F17), `Delta (0); (0x00F1A,0x00F1F), `Delta (0); (0x00F34,0x00F34), `Abs (0x00F34); (0x00F36,0x00F36), `Abs (0x00F36); (0x00F38,0x00F38), `Abs (0x00F38); (0x00FBE,0x00FC5), `Delta (0); (0x00FC7,0x00FCC), `Delta (0); (0x00FCE,0x00FCF), `Delta (0); (0x00FD5,0x00FD8), `Delta (0); (0x0109E,0x0109F), `Delta (0); (0x01390,0x01399), `Delta (0); (0x01940,0x01940), `Abs (0x01940); (0x019DE,0x019FF), `Delta (0); (0x01B61,0x01B6A), `Delta (0); (0x01B74,0x01B7C), `Delta (0); (0x02100,0x02101), `Delta (0); (0x02103,0x02106), `Delta (0); (0x02108,0x02109), `Delta (0); (0x02114,0x02114), `Abs (0x02114); (0x02116,0x02117), `Delta (0); (0x0211E,0x02123), `Delta (0); (0x02125,0x02125), `Abs (0x02125); (0x02127,0x02127), `Abs (0x02127); (0x02129,0x02129), `Abs (0x02129); (0x0212E,0x0212E), `Abs (0x0212E); (0x0213A,0x0213B), `Delta (0); (0x0214A,0x0214A), `Abs (0x0214A); (0x0214C,0x0214D), `Delta (0); (0x0214F,0x0214F), `Abs (0x0214F); (0x0218A,0x0218B), `Delta (0); (0x02195,0x02199), `Delta (0); (0x0219C,0x0219F), `Delta (0); (0x021A1,0x021A2), `Delta (0); (0x021A4,0x021A5), `Delta (0); (0x021A7,0x021AD), `Delta (0); (0x021AF,0x021CD), `Delta (0); (0x021D0,0x021D1), `Delta (0); (0x021D3,0x021D3), `Abs (0x021D3); (0x021D5,0x021F3), `Delta (0); (0x02300,0x02307), `Delta (0); (0x0230C,0x0231F), `Delta (0); (0x02322,0x02328), `Delta (0); (0x0232B,0x0237B), `Delta (0); (0x0237D,0x0239A), `Delta (0); (0x023B4,0x023DB), `Delta (0); (0x023E2,0x023FE), `Delta (0); (0x02400,0x02426), `Delta (0); (0x02440,0x0244A), `Delta (0); (0x0249C,0x024B5), `Delta (0); (0x024B6,0x024CF), `Delta (26); (0x024D0,0x024E9), `Delta (0); (0x02500,0x025B6), `Delta (0); (0x025B8,0x025C0), `Delta (0); (0x025C2,0x025F7), `Delta (0); (0x02600,0x0266E), `Delta (0); (0x02670,0x02767), `Delta (0); (0x02794,0x027BF), `Delta (0); (0x02800,0x028FF), `Delta (0); (0x02B00,0x02B2F), `Delta (0); (0x02B45,0x02B46), `Delta (0); (0x02B4D,0x02B73), `Delta (0); (0x02B76,0x02B95), `Delta (0); (0x02B98,0x02BB9), `Delta (0); (0x02BBD,0x02BC8), `Delta (0); (0x02BCA,0x02BD1), `Delta (0); (0x02BEC,0x02BEF), `Delta (0); (0x02CE5,0x02CEA), `Delta (0); (0x02E80,0x02E99), `Delta (0); (0x02E9B,0x02EF3), `Delta (0); (0x02F00,0x02FD5), `Delta (0); (0x02FF0,0x02FFB), `Delta (0); (0x03004,0x03004), `Abs (0x03004); (0x03012,0x03013), `Delta (0); (0x03020,0x03020), `Abs (0x03020); (0x03036,0x03037), `Delta (0); (0x0303E,0x0303F), `Delta (0); (0x03190,0x03191), `Delta (0); (0x03196,0x0319F), `Delta (0); (0x031C0,0x031E3), `Delta (0); (0x03200,0x0321E), `Delta (0); (0x0322A,0x03247), `Delta (0); (0x03250,0x03250), `Abs (0x03250); (0x03260,0x0327F), `Delta (0); (0x0328A,0x032B0), `Delta (0); (0x032C0,0x032FE), `Delta (0); (0x03300,0x033FF), `Delta (0); (0x04DC0,0x04DFF), `Delta (0); (0x0A490,0x0A4C6), `Delta (0); (0x0A828,0x0A82B), `Delta (0); (0x0A836,0x0A837), `Delta (0); (0x0A839,0x0A839), `Abs (0x0A839); (0x0AA77,0x0AA79), `Delta (0); (0x0FDFD,0x0FDFD), `Abs (0x0FDFD); (0x0FFE4,0x0FFE4), `Abs (0x0FFE4); (0x0FFE8,0x0FFE8), `Abs (0x0FFE8); (0x0FFED,0x0FFEE), `Delta (0); (0x0FFFC,0x0FFFD), `Delta (0); (0x10137,0x1013F), `Delta (0); (0x10179,0x10189), `Delta (0); (0x1018C,0x1018E), `Delta (0); (0x10190,0x1019B), `Delta (0); (0x101A0,0x101A0), `Abs (0x101A0); (0x101D0,0x101FC), `Delta (0); (0x10877,0x10878), `Delta (0); (0x10AC8,0x10AC8), `Abs (0x10AC8); (0x1173F,0x1173F), `Abs (0x1173F); (0x16B3C,0x16B3F), `Delta (0); (0x16B45,0x16B45), `Abs (0x16B45); (0x1BC9C,0x1BC9C), `Abs (0x1BC9C); (0x1D000,0x1D0F5), `Delta (0); (0x1D100,0x1D126), `Delta (0); (0x1D129,0x1D164), `Delta (0); (0x1D16A,0x1D16C), `Delta (0); (0x1D183,0x1D184), `Delta (0); (0x1D18C,0x1D1A9), `Delta (0); (0x1D1AE,0x1D1E8), `Delta (0); (0x1D200,0x1D241), `Delta (0); (0x1D245,0x1D245), `Abs (0x1D245); (0x1D300,0x1D356), `Delta (0); (0x1D800,0x1D9FF), `Delta (0); (0x1DA37,0x1DA3A), `Delta (0); (0x1DA6D,0x1DA74), `Delta (0); (0x1DA76,0x1DA83), `Delta (0); (0x1DA85,0x1DA86), `Delta (0); (0x1F000,0x1F02B), `Delta (0); (0x1F030,0x1F093), `Delta (0); (0x1F0A0,0x1F0AE), `Delta (0); (0x1F0B1,0x1F0BF), `Delta (0); (0x1F0C1,0x1F0CF), `Delta (0); (0x1F0D1,0x1F0F5), `Delta (0); (0x1F110,0x1F12E), `Delta (0); (0x1F130,0x1F16B), `Delta (0); (0x1F170,0x1F1AC), `Delta (0); (0x1F1E6,0x1F202), `Delta (0); (0x1F210,0x1F23B), `Delta (0); (0x1F240,0x1F248), `Delta (0); (0x1F250,0x1F251), `Delta (0); (0x1F300,0x1F3FA), `Delta (0); (0x1F400,0x1F6D2), `Delta (0); (0x1F6E0,0x1F6EC), `Delta (0); (0x1F6F0,0x1F6F6), `Delta (0); (0x1F700,0x1F773), `Delta (0); (0x1F780,0x1F7D4), `Delta (0); (0x1F800,0x1F80B), `Delta (0); (0x1F810,0x1F847), `Delta (0); (0x1F850,0x1F859), `Delta (0); (0x1F860,0x1F887), `Delta (0); (0x1F890,0x1F8AD), `Delta (0); (0x1F910,0x1F91E), `Delta (0); (0x1F920,0x1F927), `Delta (0); (0x1F930,0x1F930), `Abs (0x1F930); (0x1F933,0x1F93E), `Delta (0); (0x1F940,0x1F94B), `Delta (0); (0x1F950,0x1F95E), `Delta (0); (0x1F980,0x1F991), `Delta (0) ];; coq-8.15.0/clib/unionfind.ml000066400000000000000000000073301417001151100156150ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t (** Add (in place) an element in the partition, or do nothing if the element is already in the partition. *) val add : elt -> t -> unit (** Find the canonical representative of an element. Raise [not_found] if the element isn't known yet. *) val find : elt -> t -> elt (** Merge (in place) the equivalence classes of two elements. This will add the elements in the partition if necessary. *) val union : elt -> elt -> t -> unit (** Merge (in place) the equivalence classes of many elements. *) val union_set : set -> t -> unit (** Listing the different components of the partition *) val partition : t -> set list end module type SetS = sig type t type elt val singleton : elt -> t val union : t -> t -> t val choose : t -> elt val iter : (elt -> unit) -> t -> unit end module type MapS = sig type key type +'a t val empty : 'a t val find : key -> 'a t -> 'a val add : key -> 'a -> 'a t -> 'a t val mem : key -> 'a t -> bool val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b end module Make (S:SetS)(M:MapS with type key = S.elt) = struct type elt = S.elt type set = S.t type node = | Canon of set | Equiv of elt type t = node ref M.t ref let create () = ref (M.empty : node ref M.t) let fresh x p = let node = ref (Canon (S.singleton x)) in p := M.add x node !p; x, node let rec lookup x p = let node = M.find x !p in match !node with | Canon _ -> x, node | Equiv y -> let ((z,_) as res) = lookup y p in if not (z == y) then node := Equiv z; res let add x p = if not (M.mem x !p) then ignore (fresh x p) let find x p = fst (lookup x p) let canonical x p = try lookup x p with Not_found -> fresh x p let union x y p = let ((x,_) as xcan) = canonical x p in let ((y,_) as ycan) = canonical y p in if x = y then () else let xcan, ycan = if x < y then xcan, ycan else ycan, xcan in let x,xnode = xcan and y,ynode = ycan in match !xnode, !ynode with | Canon lx, Canon ly -> xnode := Canon (S.union lx ly); ynode := Equiv x; | _ -> assert false let union_set s p = try let x = S.choose s in S.iter (fun y -> union x y p) s with Not_found -> () let partition p = List.rev (M.fold (fun x node acc -> match !node with | Equiv _ -> acc | Canon lx -> lx::acc) !p []) end coq-8.15.0/clib/unionfind.mli000066400000000000000000000050651417001151100157710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t (** Add (in place) an element in the partition, or do nothing if the element is already in the partition. *) val add : elt -> t -> unit (** Find the canonical representative of an element. Raise [not_found] if the element isn't known yet. *) val find : elt -> t -> elt (** Merge (in place) the equivalence classes of two elements. This will add the elements in the partition if necessary. *) val union : elt -> elt -> t -> unit (** Merge (in place) the equivalence classes of many elements. *) val union_set : set -> t -> unit (** Listing the different components of the partition *) val partition : t -> set list end module type SetS = sig type t type elt val singleton : elt -> t val union : t -> t -> t val choose : t -> elt val iter : (elt -> unit) -> t -> unit end (** Minimal interface for sets, subtype of stdlib's Set. *) module type MapS = sig type key type +'a t val empty : 'a t val find : key -> 'a t -> 'a val add : key -> 'a -> 'a t -> 'a t val mem : key -> 'a t -> bool val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b end (** Minimal interface for maps, subtype of stdlib's Map. *) module Make : functor (S:SetS) -> functor (M:MapS with type key = S.elt) -> PartitionSig with type elt = S.elt and type set = S.t coq-8.15.0/config/000077500000000000000000000000001417001151100136235ustar00rootroot00000000000000coq-8.15.0/config/coq_config.mli000066400000000000000000000045411417001151100164410ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* [||] let () = Array.sort compare plugins let () =Array.iter (fun f -> let f' = "plugins/"^f in if Sys.is_directory f' && f.[0] <> '.' then print_endline f) plugins coq-8.15.0/configure000077500000000000000000000006201417001151100142630ustar00rootroot00000000000000#!/bin/sh ## This micro-configure shell script is here only to ## launch the real configuration via ocaml configure=./tools/configure/configure.exe ## Check that dune is available, provide an error message otherwise if ! command -v dune > /dev/null then 1>&2 echo "Dune could not be found, please ensure you have a working OCaml enviroment" exit 1 fi dune exec --root . -- $configure "$@" coq-8.15.0/coq-core.opam000066400000000000000000000033451417001151100147510ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant -- Core Binaries and Tools" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching. This package includes the Coq core binaries, plugins, and tools, but not the vernacular standard library. Note that in this setup, Coq needs to be started with the -boot and -noinit options, as will otherwise fail to find the regular Coq prelude, now living in the coq-stdlib package.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "LGPL-2.1-only" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {>= "2.5"} "ocaml" {>= "4.05.0"} "ocamlfind" {>= "1.8.1"} "zarith" {>= "1.10"} "ounit2" {with-test} ] build: [ # Requires dune 2.8 due to https://github.com/ocaml/dune/issues/3219 # ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" build-env: [ [ COQ_CONFIGURE_PREFIX = "%{prefix}" ] ] coq-8.15.0/coq-core.opam.template000066400000000000000000000000701417001151100165530ustar00rootroot00000000000000build-env: [ [ COQ_CONFIGURE_PREFIX = "%{prefix}" ] ] coq-8.15.0/coq-doc.opam000066400000000000000000000021161417001151100145610ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant --- Reference Manual" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the Coq Reference Manual.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "OPL-1.0" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {build & >= "2.5.0"} "conf-python-3" {build} "coq" {build & = version} ] build: [ # Requires dune 2.8 due to https://github.com/ocaml/dune/issues/3219 # ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" coq-8.15.0/coq-stdlib.opam000066400000000000000000000026711417001151100153030ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant -- Standard Library" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching. This package includes the Coq Standard Library, that is to say, the set of modules usually bound to the Coq.* namespace.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "LGPL-2.1-only" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {>= "2.5"} "coq-core" {= version} ] build: [ # Requires dune 2.8 due to https://github.com/ocaml/dune/issues/3219 # ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" coq-8.15.0/coq.opam000066400000000000000000000023751417001151100140250ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "LGPL-2.1-only" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {>= "2.5"} "coq-core" {= version} "coq-stdlib" {= version} ] build: [ ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" coq-8.15.0/coq.opam.docker000066400000000000000000000024571417001151100152740ustar00rootroot00000000000000synopsis: "The Coq Proof Assistant" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching. """ opam-version: "2.0" maintainer: "The Coq development team " authors: "The Coq development team, INRIA, CNRS, and contributors." homepage: "https://coq.inria.fr/" bug-reports: "https://github.com/coq/coq/issues" dev-repo: "git+https://github.com/coq/coq.git" license: "LGPL-2.1" version: "dev" depends: [ "ocaml" { >= "4.05.0" } "ocamlfind" { build } "zarith" { >= "1.10" } "conf-findutils" {build} ] depopts: [ "coq-native" ] build: [ [ "./configure" "-prefix" prefix "-coqide" "no" "-native-compiler" "yes" {coq-native:installed} "no" {!coq-native:installed} ] [make "-j%{jobs}%"] [make "-j%{jobs}%" "byte"] ] install: [ [make "install"] [make "install-byte"] ] coq-8.15.0/coqide-server.opam000066400000000000000000000023561417001151100160120ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant, XML protocol server" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the `coqidetop` language server, an implementation of Coq's [XML protocol](https://github.com/coq/coq/blob/master/dev/doc/xml-protocol.md) which allows clients, such as CoqIDE, to interact with Coq in a structured way.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "LGPL-2.1-only" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {>= "2.5"} "coq-core" {= version} ] build: [ # Requires dune 2.8 due to https://github.com/ocaml/dune/issues/3219 # ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" coq-8.15.0/coqide.opam000066400000000000000000000021421417001151100144770ustar00rootroot00000000000000# This file is generated by dune, edit dune-project instead opam-version: "2.0" version: "dev" synopsis: "The Coq Proof Assistant --- GTK3 IDE" description: """ Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the CoqIDE, a graphical user interface for the development of interactive proofs.""" maintainer: ["The Coq development team "] authors: ["The Coq development team, INRIA, CNRS, and contributors"] license: "LGPL-2.1-only" homepage: "https://coq.inria.fr/" doc: "https://coq.github.io/doc/" bug-reports: "https://github.com/coq/coq/issues" depends: [ "dune" {>= "2.5"} "coqide-server" {= version} ] build: [ # Requires dune 2.8 due to https://github.com/ocaml/dune/issues/3219 # ["dune" "subst"] {pinned} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/coq/coq.git" coq-8.15.0/coqpp/000077500000000000000000000000001417001151100135005ustar00rootroot00000000000000coq-8.15.0/coqpp/coqpp_ast.mli000066400000000000000000000073651417001151100162070ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Buffer.add_string ocaml_buf "{" | Extend -> ocaml_start_pos := lexeme_start_p lexbuf in incr num_braces let end_ocaml lexbuf = let () = decr num_braces in if !num_braces < 0 then lex_error lexbuf "Unexpected end of OCaml code" else if !num_braces = 0 then let s = Buffer.contents ocaml_buf in let () = Buffer.reset ocaml_buf in let loc = { Coqpp_ast.loc_start = !ocaml_start_pos; Coqpp_ast.loc_end = lexeme_end_p lexbuf } in Some (CODE { Coqpp_ast.code = s; loc }) else let () = Buffer.add_string ocaml_buf "}" in None } let letter = ['a'-'z' 'A'-'Z'] let letterlike = ['_' 'a'-'z' 'A'-'Z'] let alphanum = ['_' 'a'-'z' 'A'-'Z' '0'-'9' '\''] let ident = letterlike alphanum* let qualid = ident ('.' ident)* let space = [' ' '\t' '\r'] let number = [ '0'-'9' ] rule extend = parse | "(*" { start_comment (); comment lexbuf } | "{" { start_ocaml lexbuf; ocaml lexbuf } | "GRAMMAR" { GRAMMAR } | "VERNAC" { VERNAC } | "COMMAND" { COMMAND } | "TACTIC" { TACTIC } | "EXTEND" { EXTEND } | "DOC_GRAMMAR" { DOC_GRAMMAR } | "END" { END } | "DECLARE" { DECLARE } | "PLUGIN" { PLUGIN } | "DEPRECATED" { DEPRECATED } | "CLASSIFIED" { CLASSIFIED } | "STATE" { STATE } | "PRINTED" { PRINTED } | "TYPED" { TYPED } | "INTERPRETED" { INTERPRETED } | "GLOBALIZED" { GLOBALIZED } | "SUBSTITUTED" { SUBSTITUTED } | "ARGUMENT" { ARGUMENT } | "RAW_PRINTED" { RAW_PRINTED } | "GLOB_PRINTED" { GLOB_PRINTED } | "BY" { BY } | "AS" { AS } (** Camlp5 specific keywords *) | "GLOBAL" { GLOBAL } | "TOP" { TOP } | "FIRST" { FIRST } | "LAST" { LAST } | "BEFORE" { BEFORE } | "AFTER" { AFTER } | "LEVEL" { LEVEL } | "LEFTA" { LEFTA } | "RIGHTA" { RIGHTA } | "NONA" { NONA } (** Standard *) | ident { IDENT (Lexing.lexeme lexbuf) } | qualid { QUALID (Lexing.lexeme lexbuf) } | number { INT (int_of_string (Lexing.lexeme lexbuf)) } | space { extend lexbuf } | '\"' { string lexbuf } | '\n' { newline lexbuf; extend lexbuf } | "![" { BANGBRACKET } | "#[" { HASHBRACKET } | '[' { LBRACKET } | ']' { RBRACKET } | '|' { PIPE } | "->" { ARROW } | "=>" { FUN } | ',' { COMMA } | ':' { COLON } | ';' { SEMICOLON } | '(' { LPAREN } | ')' { RPAREN } | '=' { EQUAL } | '*' { STAR } | _ { lex_error lexbuf "syntax error" } | eof { EOF } and ocaml = parse | "{" { start_ocaml lexbuf; ocaml lexbuf } | "}" { match end_ocaml lexbuf with Some tk -> tk | None -> ocaml lexbuf } | '\n' { newline lexbuf; Buffer.add_char ocaml_buf '\n'; ocaml lexbuf } | '\"' { Buffer.add_char ocaml_buf '\"'; ocaml_string lexbuf } | (_ as c) { Buffer.add_char ocaml_buf c; ocaml lexbuf } | eof { lex_unexpected_eof lexbuf "OCaml code" } and comment = parse | "*)" { match end_comment lexbuf with Some _ -> extend lexbuf | None -> comment lexbuf } | "(*" { start_comment lexbuf; comment lexbuf } | '\n' { newline lexbuf; Buffer.add_char comment_buf '\n'; comment lexbuf } | (_ as c) { Buffer.add_char comment_buf c; comment lexbuf } | eof { lex_unexpected_eof lexbuf "comment" } and string = parse | '\"' { let s = Buffer.contents string_buf in let () = Buffer.reset string_buf in STRING s } | "\\\"" { Buffer.add_char string_buf '\"'; string lexbuf } | '\n' { newline lexbuf; Buffer.add_char string_buf '\n'; string lexbuf } | (_ as c) { Buffer.add_char string_buf c; string lexbuf } | eof { lex_unexpected_eof lexbuf "string" } and ocaml_string = parse | "\\\"" { Buffer.add_string ocaml_buf "\\\""; ocaml_string lexbuf } | '\"' { Buffer.add_char ocaml_buf '\"'; ocaml lexbuf } | (_ as c) { Buffer.add_char ocaml_buf c; ocaml_string lexbuf } | eof { lex_unexpected_eof lexbuf "OCaml string" } { let token lexbuf = match mode () with | OCaml -> ocaml lexbuf | Extend -> extend lexbuf } coq-8.15.0/coqpp/coqpp_main.ml000066400000000000000000000571161417001151100161720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* %s@]@\n" code_insert module StringSet = Set.Make(String) let string_split s = let len = String.length s in let rec split n = try let pos = String.index_from s n '.' in let dir = String.sub s n (pos-n) in dir :: split (succ pos) with | Not_found -> [String.sub s n (len-n)] in if len == 0 then [] else split 0 let plugin_name = "__coq_plugin_name" let print_list fmt pr l = let rec prl fmt = function | [] -> () | [x] -> fprintf fmt "%a" pr x | x :: l -> fprintf fmt "%a;@ %a" pr x prl l in fprintf fmt "@[[%a]@]" prl l let rec print_binders fmt = function | [] -> () | ExtTerminal _ :: rem -> print_binders fmt rem | ExtNonTerminal (_, TokNone) :: rem -> fprintf fmt "_@ %a" print_binders rem | ExtNonTerminal (_, TokName id) :: rem -> fprintf fmt "%s@ %a" id print_binders rem let rec print_symbol fmt = function | Ulist1 s -> fprintf fmt "@[Extend.TUlist1 (%a)@]" print_symbol s | Ulist1sep (s, sep) -> fprintf fmt "@[Extend.TUlist1sep (%a, \"%s\")@]" print_symbol s sep | Ulist0 s -> fprintf fmt "@[Extend.TUlist0 (%a)@]" print_symbol s | Ulist0sep (s, sep) -> fprintf fmt "@[Extend.TUlist0sep (%a, \"%s\")@]" print_symbol s sep | Uopt s -> fprintf fmt "@[Extend.TUopt (%a)@]" print_symbol s | Uentry e -> fprintf fmt "@[Extend.TUentry (Genarg.get_arg_tag wit_%s)@]" e | Uentryl (e, l) -> assert (e = "tactic"); fprintf fmt "@[Extend.TUentryl (Genarg.get_arg_tag wit_%s, %i)@]" e l let print_string fmt s = fprintf fmt "\"%s\"" s let print_opt fmt pr = function | None -> fprintf fmt "None" | Some x -> fprintf fmt "Some@ @[(%a)@]" pr x module GramExt : sig val print_extrule : Format.formatter -> (symb list * string option list * code) -> unit val print_ast : Format.formatter -> grammar_ext -> unit end = struct let is_uident s = match s.[0] with | 'A'..'Z' -> true | _ -> false let is_qualified = is_uident let get_local_entries ext = let global = StringSet.of_list ext.gramext_globals in let map e = e.gentry_name in let entries = List.map map ext.gramext_entries in let local = List.filter (fun e -> not (is_qualified e || StringSet.mem e global)) entries in let rec uniquize seen = function | [] -> [] | id :: rem -> let rem = uniquize (StringSet.add id seen) rem in if StringSet.mem id seen then rem else id :: rem in uniquize StringSet.empty local let print_local fmt ext = let locals = get_local_entries ext in match locals with | [] -> () | e :: locals -> let mk_e fmt e = fprintf fmt "Pcoq.Entry.make \"%s\"" e in let () = fprintf fmt "@[let %s =@ @[%a@]@]@ " e mk_e e in let iter e = fprintf fmt "@[and %s =@ @[%a@]@]@ " e mk_e e in let () = List.iter iter locals in fprintf fmt "in@ " let print_position fmt pos = match pos with | First -> fprintf fmt "Gramlib.Gramext.First" | Last -> fprintf fmt "Gramlib.Gramext.Last" | Before s -> fprintf fmt "Gramlib.Gramext.Before@ \"%s\"" s | After s -> fprintf fmt "Gramlib.Gramext.After@ \"%s\"" s let print_assoc fmt = function | LeftA -> fprintf fmt "Gramlib.Gramext.LeftA" | RightA -> fprintf fmt "Gramlib.Gramext.RightA" | NonA -> fprintf fmt "Gramlib.Gramext.NonA" let is_token s = match string_split s with | [s] -> is_uident s | _ -> false let rec parse_tokens ?(in_anon=false) = let err_anon () = if in_anon then fatal (Printf.sprintf "'SELF' or 'NEXT' illegal in anonymous entry level") in function | [GSymbString s] -> SymbToken ("", Some s) | [GSymbQualid ("QUOTATION", None); GSymbString s] -> SymbToken ("QUOTATION", Some s) | [GSymbQualid ("SELF", None)] -> err_anon (); SymbSelf | [GSymbQualid ("NEXT", None)] -> err_anon (); SymbNext | [GSymbQualid ("LIST0", None); tkn] -> SymbList0 (parse_token ~in_anon tkn, None) | [GSymbQualid ("LIST1", None); tkn] -> SymbList1 (parse_token ~in_anon tkn, None) | [GSymbQualid ("LIST0", None); tkn; GSymbQualid ("SEP", None); tkn'] -> SymbList0 (parse_token ~in_anon tkn, Some (parse_token ~in_anon tkn')) | [GSymbQualid ("LIST1", None); tkn; GSymbQualid ("SEP", None); tkn'] -> SymbList1 (parse_token ~in_anon tkn, Some (parse_token ~in_anon tkn')) | [GSymbQualid ("OPT", None); tkn] -> SymbOpt (parse_token ~in_anon tkn) | [GSymbQualid (e, None)] when is_token e -> SymbToken (e, None) | [GSymbQualid (e, None); GSymbString s] when is_token e -> SymbToken (e, Some s) | [GSymbQualid (e, lvl)] when not (is_token e) -> SymbEntry (e, lvl) | [GSymbParen tkns] -> parse_tokens ~in_anon tkns | [GSymbProd prds] -> let map p = let map (pat, tkns) = (pat, parse_tokens ~in_anon:true tkns) in (List.map map p.gprod_symbs, p.gprod_body) in SymbRules (List.map map prds) | t -> let rec db_token = function | GSymbString s -> Printf.sprintf "\"%s\"" s | GSymbQualid (s, _) -> Printf.sprintf "%s" s | GSymbParen s -> Printf.sprintf "(%s)" (db_tokens s) | GSymbProd _ -> Printf.sprintf "[...]" and db_tokens tkns = let s = List.map db_token tkns in String.concat " " s in fatal (Printf.sprintf "Invalid token: %s" (db_tokens t)) and parse_token ~in_anon tkn = parse_tokens ~in_anon [tkn] let print_fun fmt (vars, body) = let vars = List.rev vars in let iter = function | None -> fprintf fmt "_@ " | Some id -> fprintf fmt "%s@ " id in let () = fprintf fmt "fun@ " in let () = List.iter iter vars in let () = fprintf fmt "loc ->@ @[%a@]" print_code body in () (** Meta-program instead of calling Tok.of_pattern here because otherwise violates value restriction *) let print_tok fmt = let print_pat fmt = print_opt fmt print_string in function | "", Some s -> fprintf fmt "Tok.PKEYWORD (%a)" print_string s | "IDENT", s -> fprintf fmt "Tok.PIDENT (%a)" print_pat s | "PATTERNIDENT", s -> fprintf fmt "Tok.PPATTERNIDENT (%a)" print_pat s | "FIELD", s -> fprintf fmt "Tok.PFIELD (%a)" print_pat s | "NUMBER", None -> fprintf fmt "Tok.PNUMBER None" | "NUMBER", Some s -> fprintf fmt "Tok.PNUMBER (Some (NumTok.Unsigned.of_string %a))" print_string s | "STRING", s -> fprintf fmt "Tok.PSTRING (%a)" print_pat s | "LEFTQMARK", None -> fprintf fmt "Tok.PLEFTQMARK" | "BULLET", s -> fprintf fmt "Tok.PBULLET (%a)" print_pat s | "QUOTATION", Some s -> fprintf fmt "Tok.PQUOTATION %a" print_string s | "EOI", None -> fprintf fmt "Tok.PEOI" | _ -> failwith "Tok.of_pattern: not a constructor" let rec print_prod fmt p = let (vars, tkns) = List.split p.gprod_symbs in let tkn = List.map parse_tokens tkns in print_extrule fmt (tkn, vars, p.gprod_body) and print_extrule fmt (tkn, vars, body) = let tkn = List.rev tkn in fprintf fmt "@[Pcoq.Production.make@ @[(%a)@]@ @[(%a)@]@]" (print_symbols ~norec:false) tkn print_fun (vars, body) and print_symbols ~norec fmt = function | [] -> fprintf fmt "Pcoq.Rule.stop" | tkn :: tkns -> let c = if norec then "Pcoq.Rule.next_norec" else "Pcoq.Rule.next" in fprintf fmt "%s @[(%a)@ (%a)@]" c (print_symbols ~norec) tkns print_symbol tkn and print_symbol fmt tkn = match tkn with | SymbToken (t, s) -> fprintf fmt "(Pcoq.Symbol.token (%a))" print_tok (t, s) | SymbEntry (e, None) -> fprintf fmt "(Pcoq.Symbol.nterm %s)" e | SymbEntry (e, Some l) -> fprintf fmt "(Pcoq.Symbol.nterml %s (%a))" e print_string l | SymbSelf -> fprintf fmt "Pcoq.Symbol.self" | SymbNext -> fprintf fmt "Pcoq.Symbol.next" | SymbList0 (s, None) -> fprintf fmt "(Pcoq.Symbol.list0 %a)" print_symbol s | SymbList0 (s, Some sep) -> fprintf fmt "(Pcoq.Symbol.list0sep (%a) (%a) false)" print_symbol s print_anonymized_symbol sep | SymbList1 (s, None) -> fprintf fmt "(Pcoq.Symbol.list1 (%a))" print_symbol s | SymbList1 (s, Some sep) -> fprintf fmt "(Pcoq.Symbol.list1sep (%a) (%a) false)" print_symbol s print_anonymized_symbol sep | SymbOpt s -> fprintf fmt "(Pcoq.Symbol.opt %a)" print_symbol s | SymbRules rules -> let pr fmt (r, body) = let (vars, tkn) = List.split r in let tkn = List.rev tkn in fprintf fmt "Pcoq.Rules.make @[(%a)@ (%a)@]" (print_symbols ~norec:true) tkn print_fun (vars, body) in let pr fmt rules = print_list fmt pr rules in fprintf fmt "(Pcoq.Symbol.rules %a)" pr (List.rev rules) | SymbQuote c -> fprintf fmt "(%s)" c and print_anonymized_symbol fmt tkn = match tkn with | SymbToken (t, s) -> fprintf fmt "(Pcoq.Symbol.tokens [Pcoq.TPattern (%a)])" print_tok (t, s) | _ -> print_symbol fmt (SymbRules [[None, tkn], mk_code "()"]) let print_rule fmt r = let pr_lvl fmt lvl = print_opt fmt print_string lvl in let pr_asc fmt asc = print_opt fmt print_assoc asc in let pr_prd fmt prd = print_list fmt print_prod prd in fprintf fmt "@[(%a,@ %a,@ %a)@]" pr_lvl r.grule_label pr_asc r.grule_assoc pr_prd (List.rev r.grule_prods) let print_entry fmt e = match e.gentry_rules with | GDataReuse (pos, r) -> let rules = List.rev r in let pr_pos fmt pos = print_opt fmt print_string pos in let pr_prd fmt prd = print_list fmt print_prod prd in fprintf fmt "let () =@ @[Pcoq.grammar_extend@ %s@ @[(Pcoq.Reuse (%a, %a))@]@]@ in@ " e.gentry_name pr_pos pos pr_prd rules | GDataFresh (pos, rules) -> let print_rules fmt rules = print_list fmt print_rule rules in let pr_check fmt () = match pos with | None -> fprintf fmt "let () =@ @[assert@ (Pcoq.Entry.is_empty@ %s)@]@ in@\n" e.gentry_name | Some _ -> fprintf fmt "" in let pos = match pos with None -> First | Some pos -> pos in fprintf fmt "%alet () =@ @[Pcoq.grammar_extend@ %s@ @[(Pcoq.Fresh@ (%a, %a))@]@]@ in@ " pr_check () e.gentry_name print_position pos print_rules rules let print_ast fmt ext = let () = fprintf fmt "let _ = @[" in let () = fprintf fmt "@[%a@]" print_local ext in let () = List.iter (fun e -> print_entry fmt e) ext.gramext_entries in let () = fprintf fmt "()@]@\n" in () end module VernacExt : sig val print_ast : Format.formatter -> vernac_ext -> unit end = struct let print_rule_classifier fmt r = match r.vernac_class with | None -> fprintf fmt "None" | Some f -> let no_binder = function ExtTerminal _ -> true | ExtNonTerminal _ -> false in if List.for_all no_binder r.vernac_toks then fprintf fmt "Some @[%a@]" print_code f else fprintf fmt "Some @[(fun %a-> %a)@]" print_binders r.vernac_toks print_code f (* let print_atts fmt = function *) (* | None -> fprintf fmt "@[let () = Attributes.unsupported_attributes atts in@] " *) (* | Some atts -> *) (* let rec print_left fmt = function *) (* | [] -> assert false *) (* | [x,_] -> fprintf fmt "%s" x *) (* | (x,_) :: rem -> fprintf fmt "(%s, %a)" x print_left rem *) (* in *) (* let rec print_right fmt = function *) (* | [] -> assert false *) (* | [_,y] -> fprintf fmt "%s" y *) (* | (_,y) :: rem -> fprintf fmt "(%s ++ %a)" y print_right rem *) (* in *) (* let nota = match atts with [_] -> "" | _ -> "Attributes.Notations." in *) (* fprintf fmt "@[let %a = Attributes.parse %s(%a) atts in@] " *) (* print_left atts nota print_right atts *) let print_atts_left fmt = function | None -> fprintf fmt "()" | Some atts -> let rec aux fmt = function | [] -> assert false | [x,_] -> fprintf fmt "%s" x | (x,_) :: rem -> fprintf fmt "(%s, %a)" x aux rem in aux fmt atts let print_atts_right fmt = function | None -> fprintf fmt "(Attributes.unsupported_attributes atts)" | Some atts -> let rec aux fmt = function | [] -> assert false | [_,y] -> fprintf fmt "%s" y | (_,y) :: rem -> fprintf fmt "(%s ++ %a)" y aux rem in let nota = match atts with [_] -> "" | _ -> "Attributes.Notations." in fprintf fmt "(Attributes.parse %s%a atts)" nota aux atts let understand_state = function | "close_proof" -> "vtcloseproof", false | "open_proof" -> "vtopenproof", true | "proof" -> "vtmodifyproof", false | "proof_opt_query" -> "vtreadproofopt", false | "proof_query" -> "vtreadproof", false | "read_program" -> "vtreadprogram", false | "program" -> "vtmodifyprogram", false | "declare_program" -> "vtdeclareprogram", false | "program_interactive" -> "vtopenproofprogram", false | s -> fatal ("unsupported state specifier: " ^ s) let print_body_state state fmt r = let state = match r.vernac_state with Some _ as s -> s | None -> state in match state with | None -> fprintf fmt "Vernacextend.vtdefault (fun () -> %a)" print_code r.vernac_body | Some "CUSTOM" -> print_code fmt r.vernac_body | Some state -> let state, unit_wrap = understand_state state in fprintf fmt "Vernacextend.%s (%s%a)" state (if unit_wrap then "fun () ->" else "") print_code r.vernac_body let print_body_fun state fmt r = fprintf fmt "let coqpp_body %a%a = @[%a@] in " print_binders r.vernac_toks print_atts_left r.vernac_atts (print_body_state state) r let print_body state fmt r = fprintf fmt "@[(%afun %a?loc ~atts ()@ -> coqpp_body %a%a)@]" (print_body_fun state) r print_binders r.vernac_toks print_binders r.vernac_toks print_atts_right r.vernac_atts let rec print_sig fmt = function | [] -> fprintf fmt "@[Vernacextend.TyNil@]" | ExtTerminal s :: rem -> fprintf fmt "@[Vernacextend.TyTerminal (\"%s\", %a)@]" s print_sig rem | ExtNonTerminal (symb, _) :: rem -> fprintf fmt "@[Vernacextend.TyNonTerminal (%a, %a)@]" print_symbol symb print_sig rem let print_rule state fmt r = fprintf fmt "Vernacextend.TyML (%b, %a, %a, %a)" r.vernac_depr print_sig r.vernac_toks (print_body state) r print_rule_classifier r let print_rules state fmt rules = print_list fmt (fun fmt r -> fprintf fmt "(%a)" (print_rule state) r) rules let print_classifier fmt = function | ClassifDefault -> fprintf fmt "" | ClassifName "QUERY" -> fprintf fmt "~classifier:(fun _ -> Vernacextend.classify_as_query)" | ClassifName "SIDEFF" -> fprintf fmt "~classifier:(fun _ -> Vernacextend.classify_as_sideeff)" | ClassifName s -> fatal (Printf.sprintf "Unknown classifier %s" s) | ClassifCode c -> fprintf fmt "~classifier:(%s)" c.code let print_entry fmt = function | None -> fprintf fmt "None" | Some e -> fprintf fmt "(Some (%s))" e.code let print_ast fmt ext = let pr fmt () = fprintf fmt "Vernacextend.vernac_extend ~command:\"%s\" %a ?entry:%a %a" ext.vernacext_name print_classifier ext.vernacext_class print_entry ext.vernacext_entry (print_rules ext.vernacext_state) ext.vernacext_rules in let () = fprintf fmt "let () = @[%a@]@\n" pr () in () end module TacticExt : sig val print_ast : Format.formatter -> tactic_ext -> unit end = struct let rec print_clause fmt = function | [] -> fprintf fmt "@[Tacentries.TyNil@]" | ExtTerminal s :: cl -> fprintf fmt "@[Tacentries.TyIdent (\"%s\", %a)@]" s print_clause cl | ExtNonTerminal (g, _) :: cl -> fprintf fmt "@[Tacentries.TyArg (%a, %a)@]" print_symbol g print_clause cl let print_rule fmt r = fprintf fmt "@[Tacentries.TyML (%a, @[(fun %aist@ -> %a)@])@]" print_clause r.tac_toks print_binders r.tac_toks print_code r.tac_body let print_rules fmt rules = print_list fmt (fun fmt r -> fprintf fmt "(%a)" print_rule r) rules let print_ast fmt ext = let deprecation fmt = function | None -> () | Some { code } -> fprintf fmt "~deprecation:(%s) " code in let pr fmt () = let level = match ext.tacext_level with None -> 0 | Some i -> i in fprintf fmt "Tacentries.tactic_extend %s \"%s\" ~level:%i %a%a" plugin_name ext.tacext_name level deprecation ext.tacext_deprecated print_rules ext.tacext_rules in let () = fprintf fmt "let () = @[%a@]\n" pr () in () end module VernacArgumentExt : sig val print_ast : Format.formatter -> vernac_argument_ext -> unit val print_rules : Format.formatter -> string * tactic_rule list -> unit end = struct let terminal s = let p = if s <> "" && s.[0] >= '0' && s.[0] <= '9' then "CLexer.terminal_number" else "CLexer.terminal" in let c = Printf.sprintf "Pcoq.Symbol.token (%s \"%s\")" p s in SymbQuote c let rec parse_symb self = function | Ulist1 s -> SymbList1 (parse_symb self s, None) | Ulist1sep (s, sep) -> SymbList1 (parse_symb self s, Some (terminal sep)) | Ulist0 s -> SymbList0 (parse_symb self s, None) | Ulist0sep (s, sep) -> SymbList0 (parse_symb self s, Some (terminal sep)) | Uopt s -> SymbOpt (parse_symb self s) | Uentry e -> if e = self then SymbSelf else SymbEntry (e, None) | Uentryl (e, l) -> assert (e = "tactic"); if l = 5 then SymbEntry ("Pltac.binder_tactic", None) else SymbEntry ("Pltac.ltac_expr", Some (string_of_int l)) let parse_token self = function | ExtTerminal s -> (terminal s, None) | ExtNonTerminal (e, TokNone) -> (parse_symb self e, None) | ExtNonTerminal (e, TokName s) -> (parse_symb self e, Some s) let parse_rule self r = let symbs = List.map (fun t -> parse_token self t) r.tac_toks in let symbs, vars = List.split symbs in (symbs, vars, r.tac_body) let print_rules fmt (name, rules) = (* Rules are reversed. *) let rules = List.rev rules in let rules = List.map (fun r -> parse_rule name r) rules in let pr fmt l = print_list fmt (fun fmt r -> fprintf fmt "(%a)" GramExt.print_extrule r) l in match rules with | [([SymbEntry (e, None)], [Some s], { code = c } )] when String.trim c = s -> (* This is a horrible hack to work around limitations of camlp5 regarding factorization of parsing rules. It allows to recognize rules of the form [ entry(x) ] -> [ x ] so as not to generate a proxy entry and reuse the same entry directly. *) fprintf fmt "@[Vernacextend.Arg_alias (%s)@]" e | _ -> fprintf fmt "@[Vernacextend.Arg_rules (%a)@]" pr rules let print_printer fmt = function | None -> fprintf fmt "@[fun _ -> Pp.str \"missing printer\"@]" | Some f -> print_code fmt f let print_ast fmt arg = let name = arg.vernacargext_name in let pr fmt () = fprintf fmt "Vernacextend.vernac_argument_extend ~name:%a @[{@\n\ Vernacextend.arg_parsing = %a;@\n\ Vernacextend.arg_printer = fun env sigma -> %a;@\n}@]" print_string name print_rules (name, arg.vernacargext_rules) print_printer arg.vernacargext_printer in fprintf fmt "let (wit_%s, %s) = @[%a@]@\nlet _ = (wit_%s, %s)@\n" name name pr () name name end module ArgumentExt : sig val print_ast : Format.formatter -> argument_ext -> unit end = struct let rec print_argtype fmt = function | ExtraArgType s -> fprintf fmt "Geninterp.val_tag (Genarg.topwit wit_%s)" s | PairArgType (arg1, arg2) -> fprintf fmt "Geninterp.Val.Pair (@[(%a)@], @[(%a)@])" print_argtype arg1 print_argtype arg2 | ListArgType arg -> fprintf fmt "Geninterp.Val.List @[(%a)@]" print_argtype arg | OptArgType arg -> fprintf fmt "Geninterp.Val.Opt @[(%a)@]" print_argtype arg let rec print_wit fmt = function | ExtraArgType s -> fprintf fmt "wit_%s" s | PairArgType (arg1, arg2) -> fprintf fmt "Genarg.PairArg (@[(%a)@], @[(%a)@])" print_wit arg1 print_wit arg2 | ListArgType arg -> fprintf fmt "Genarg.ListArg @[(%a)@]" print_wit arg | OptArgType arg -> fprintf fmt "Genarg.OptArg @[(%a)@]" print_wit arg let print_ast fmt arg = let name = arg.argext_name in let pr_tag fmt t = print_opt fmt print_argtype t in let intern fmt () = match arg.argext_glob, arg.argext_type with | Some f, (None | Some _) -> fprintf fmt "@[Tacentries.ArgInternFun ((fun f ist v -> (ist, f ist v)) (%a))@]" print_code f | None, Some t -> fprintf fmt "@[Tacentries.ArgInternWit (%a)@]" print_wit t | None, None -> fprintf fmt "@[Tacentries.ArgInternFun (fun ist v -> (ist, v))@]" in let subst fmt () = match arg.argext_subst, arg.argext_type with | Some f, (None | Some _) -> fprintf fmt "@[Tacentries.ArgSubstFun (%a)@]" print_code f | None, Some t -> fprintf fmt "@[Tacentries.ArgSubstWit (%a)@]" print_wit t | None, None -> fprintf fmt "@[Tacentries.ArgSubstFun (fun s v -> v)@]" in let interp fmt () = match arg.argext_interp, arg.argext_type with | Some (None, f), (None | Some _) -> fprintf fmt "@[Tacentries.ArgInterpSimple (%a)@]" print_code f | Some (Some "legacy", f), (None | Some _) -> fprintf fmt "@[Tacentries.ArgInterpLegacy (%a)@]" print_code f | Some (Some kind, f), (None | Some _) -> fatal (Printf.sprintf "Unknown kind %s of interpretation function" kind) | None, Some t -> fprintf fmt "@[Tacentries.ArgInterpWit (%a)@]" print_wit t | None, None -> fprintf fmt "@[Tacentries.ArgInterpRet@]" in let default_printer = mk_code "fun _ _ _ _ -> Pp.str \"missing printer\"" in let rpr = match arg.argext_rprinter, arg.argext_tprinter with | Some f, (None | Some _) -> f | None, Some f -> f | None, None -> default_printer in let gpr = match arg.argext_gprinter, arg.argext_tprinter with | Some f, (None | Some _) -> f | None, Some f -> f | None, None -> default_printer in let tpr = match arg.argext_tprinter with | Some f -> f | None -> default_printer in let pr fmt () = fprintf fmt "Tacentries.argument_extend ~name:%a @[{@\n\ Tacentries.arg_parsing = %a;@\n\ Tacentries.arg_tag = @[%a@];@\n\ Tacentries.arg_intern = @[%a@];@\n\ Tacentries.arg_subst = @[%a@];@\n\ Tacentries.arg_interp = @[%a@];@\n\ Tacentries.arg_printer = @[((fun env sigma -> %a), (fun env sigma -> %a), (fun env sigma -> %a))@];@\n}@]" print_string name VernacArgumentExt.print_rules (name, arg.argext_rules) pr_tag arg.argext_type intern () subst () interp () print_code rpr print_code gpr print_code tpr in fprintf fmt "let (wit_%s, %s) = @[%a@]@\nlet _ = (wit_%s, %s)@\n" name name pr () name name end let declare_plugin fmt name = fprintf fmt "let %s = \"%s\"@\n" plugin_name name; fprintf fmt "let _ = Mltop.add_known_module %s@\n" plugin_name let pr_ast fmt = function | Code s -> fprintf fmt "%a@\n" print_code s | Comment s -> fprintf fmt "%s@\n" s | DeclarePlugin name -> declare_plugin fmt name | GramExt gram -> fprintf fmt "%a@\n" GramExt.print_ast gram | VernacExt vernac -> fprintf fmt "%a@\n" VernacExt.print_ast vernac | VernacArgumentExt arg -> fprintf fmt "%a@\n" VernacArgumentExt.print_ast arg | TacticExt tac -> fprintf fmt "%a@\n" TacticExt.print_ast tac | ArgumentExt arg -> fprintf fmt "%a@\n" ArgumentExt.print_ast arg let help () = Format.eprintf "Usage: coqpp file.mlg@\n%!"; exit 1 let parse () = let () = if Array.length Sys.argv <> 2 then help () in match Sys.argv.(1) with | "-help" | "--help" -> help () | file -> file let output_name file = try Filename.chop_extension file ^ ".ml" with | Invalid_argument _ -> fatal "Input file must have an extension for coqpp [input.ext -> input.ml]" let () = let file = parse () in let output = output_name file in let ast = parse_file file in let chan = open_out output in let fmt = formatter_of_out_channel chan in let iter ast = Format.fprintf fmt "@[%a@]%!"pr_ast ast in let () = List.iter iter ast in let () = close_out chan in exit 0 coq-8.15.0/coqpp/coqpp_parse.mly000066400000000000000000000234101417001151100165370ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* None | Some s -> ends s pat2 let without_sep k sep r = if sep <> "" then raise Parsing.Parse_error else k r let parse_user_entry s sep = let table = [ "ne_", "_list", without_sep (fun r -> Ulist1 r); "ne_", "_list_sep", (fun sep r -> Ulist1sep (r, sep)); "", "_list", without_sep (fun r -> Ulist0 r); "", "_list_sep", (fun sep r -> Ulist0sep (r, sep)); "", "_opt", without_sep (fun r -> Uopt r); ] in let rec parse s sep = function | [] -> let () = without_sep ignore sep () in begin match starts s "tactic" with | Some ("0"|"1"|"2"|"3"|"4"|"5" as s) -> Uentryl ("tactic", int_of_string s) | Some _ | None -> Uentry s end | (pat1, pat2, k) :: rem -> match between s pat1 pat2 with | None -> parse s sep rem | Some s -> let r = parse s "" table in k sep r in parse s sep table let no_code = { code = ""; loc = { loc_start=Lexing.dummy_pos; loc_end=Lexing.dummy_pos} } %} %token CODE %token COMMENT %token IDENT QUALID %token STRING %token INT %token VERNAC TACTIC GRAMMAR DOC_GRAMMAR EXTEND END DECLARE PLUGIN DEPRECATED ARGUMENT %token RAW_PRINTED GLOB_PRINTED %token COMMAND CLASSIFIED STATE PRINTED TYPED INTERPRETED GLOBALIZED SUBSTITUTED BY AS %token BANGBRACKET HASHBRACKET LBRACKET RBRACKET PIPE ARROW FUN COMMA EQUAL STAR %token LPAREN RPAREN COLON SEMICOLON %token GLOBAL TOP FIRST LAST BEFORE AFTER LEVEL LEFTA RIGHTA NONA %token EOF %type file %start file %% file: | nodes EOF { $1 } ; nodes: | { [] } | node nodes { $1 :: $2 } ; node: | CODE { Code $1 } | COMMENT { Comment $1 } | declare_plugin { $1 } | grammar_extend { $1 } | vernac_extend { $1 } | tactic_extend { $1 } | argument_extend { $1 } | doc_gram { $1 } ; declare_plugin: | DECLARE PLUGIN STRING { DeclarePlugin $3 } ; grammar_extend: | GRAMMAR EXTEND qualid_or_ident globals gram_entries END { GramExt { gramext_name = $3; gramext_globals = $4; gramext_entries = $5 } } ; argument_extend: | ARGUMENT EXTEND IDENT typed_opt printed_opt interpreted_opt globalized_opt substituted_opt raw_printed_opt glob_printed_opt tactic_rules END { ArgumentExt { argext_name = $3; argext_rules = $11; argext_rprinter = $9; argext_gprinter = $10; argext_tprinter = $5; argext_interp = $6; argext_glob = $7; argext_subst = $8; argext_type = $4; } } | VERNAC ARGUMENT EXTEND IDENT printed_opt tactic_rules END { VernacArgumentExt { vernacargext_name = $4; vernacargext_printer = $5; vernacargext_rules = $6; } } ; printed_opt: | { None } | PRINTED BY CODE { Some $3 } ; raw_printed_opt: | { None } | RAW_PRINTED BY CODE { Some $3 } ; glob_printed_opt: | { None } | GLOB_PRINTED BY CODE { Some $3 } ; interpreted_modifier_opt: | { None } | LBRACKET IDENT RBRACKET { Some $2 } ; interpreted_opt: | { None } | INTERPRETED interpreted_modifier_opt BY CODE { Some ($2,$4) } ; globalized_opt: | { None } | GLOBALIZED BY CODE { Some $3 } ; substituted_opt: | { None } | SUBSTITUTED BY CODE { Some $3 } ; typed_opt: | { None } | TYPED AS argtype { Some $3 } ; argtype: | IDENT { ExtraArgType $1 } | argtype IDENT { match $2 with | "list" -> ListArgType $1 | "option" -> OptArgType $1 | _ -> raise Parsing.Parse_error } | LPAREN argtype STAR argtype RPAREN { PairArgType ($2, $4) } ; vernac_extend: | VERNAC vernac_entry EXTEND IDENT vernac_classifier vernac_state vernac_rules END { VernacExt { vernacext_name = $4; vernacext_entry = $2; vernacext_class = $5; vernacext_state = $6; vernacext_rules = $7; } } ; vernac_entry: | COMMAND { None } | CODE { Some $1 } ; vernac_classifier: | { ClassifDefault } | CLASSIFIED BY CODE { ClassifCode $3 } | CLASSIFIED AS IDENT { ClassifName $3 } ; vernac_state: | { None } | STATE IDENT { Some $2 } ; vernac_rules: | vernac_rule { [$1] } | vernac_rule vernac_rules { $1 :: $2 } ; vernac_rule: | PIPE vernac_attributes_opt rule_state LBRACKET ext_tokens RBRACKET rule_deprecation rule_classifier ARROW CODE { { vernac_atts = $2; vernac_state = $3; vernac_toks = $5; vernac_depr = $7; vernac_class= $8; vernac_body = $10; } } ; rule_state: | { None } | BANGBRACKET IDENT RBRACKET { Some $2 } ; vernac_attributes_opt: | { None } | HASHBRACKET vernac_attributes RBRACKET { Some $2 } ; vernac_attributes: | vernac_attribute { [$1] } | vernac_attribute SEMICOLON { [$1] } | vernac_attribute SEMICOLON vernac_attributes { $1 :: $3 } ; vernac_attribute: | qualid_or_ident EQUAL qualid_or_ident { ($1, $3) } | qualid_or_ident { ($1, $1) } ; rule_deprecation: | { false } | DEPRECATED { true } ; rule_classifier: | { None } | FUN CODE { Some $2 } ; tactic_extend: | TACTIC EXTEND IDENT tactic_deprecated tactic_level tactic_rules END { TacticExt { tacext_name = $3; tacext_deprecated = $4; tacext_level = $5; tacext_rules = $6 } } ; tactic_deprecated: | { None } | DEPRECATED CODE { Some $2 } ; tactic_level: | { None } | LEVEL INT { Some $2 } ; tactic_rules: | { [] } | tactic_rule tactic_rules { $1 :: $2 } ; tactic_rule: | PIPE LBRACKET ext_tokens RBRACKET ARROW CODE { { tac_toks = $3; tac_body = $6 } } ; ext_tokens: | { [] } | ext_token ext_tokens { $1 :: $2 } ; ext_token: | STRING { ExtTerminal $1 } | IDENT { let e = parse_user_entry $1 "" in ExtNonTerminal (e, TokNone) } | IDENT LPAREN IDENT RPAREN { let e = parse_user_entry $1 "" in ExtNonTerminal (e, TokName $3) } | IDENT LPAREN IDENT COMMA STRING RPAREN { let e = parse_user_entry $1 $5 in ExtNonTerminal (e, TokName $3) } ; qualid_or_ident: | QUALID { $1 } | IDENT { $1 } ; globals: | { [] } | GLOBAL COLON idents SEMICOLON { $3 } ; idents: | { [] } | qualid_or_ident idents { $1 :: $2 } ; gram_entries: | { [] } | gram_entry gram_entries { $1 :: $2 } ; gram_entry: | qualid_or_ident COLON reuse LBRACKET LBRACKET rules_opt RBRACKET RBRACKET SEMICOLON { { gentry_name = $1; gentry_rules = GDataReuse ($3, $6); } } | qualid_or_ident COLON position_opt LBRACKET levels RBRACKET SEMICOLON { { gentry_name = $1; gentry_rules = GDataFresh ($3, $5); } } ; reuse: | TOP { None } | LEVEL STRING { Some $2 } ; position_opt: | { None } | position { Some $1 } ; position: | FIRST { First } | LAST { Last } | BEFORE STRING { Before $2 } | AFTER STRING { After $2 } ; string_opt: | { None } | STRING { Some $1 } ; assoc_opt: | { None } | assoc { Some $1 } ; assoc: | LEFTA { LeftA } | RIGHTA { RightA } | NONA { NonA } ; levels: | level { [$1] } | level PIPE levels { $1 :: $3 } ; level: | string_opt assoc_opt LBRACKET rules_opt RBRACKET { { grule_label = $1; grule_assoc = $2; grule_prods = $4; } } ; rules_opt: | { [] } | rules { $1 } ; rules: | rule { [$1] } | rule PIPE rules { $1 :: $3 } ; rule: | symbols_opt ARROW CODE { { gprod_symbs = $1; gprod_body = $3; } } ; symbols_opt: | { [] } | symbols { $1 } ; symbols: | symbol { [$1] } | symbol SEMICOLON symbols { $1 :: $3 } ; symbol: | IDENT EQUAL gram_tokens { (Some $1, $3) } | gram_tokens { (None, $1) } ; gram_token: | qualid_or_ident { GSymbQualid ($1, None) } | qualid_or_ident LEVEL STRING { GSymbQualid ($1, Some $3) } | LPAREN gram_tokens RPAREN { GSymbParen $2 } | LBRACKET rules RBRACKET { GSymbProd $2 } | STRING { GSymbString $1 } ; gram_tokens: | gram_token { [$1] } | gram_token gram_tokens { $1 :: $2 } ; doc_gram: | DOC_GRAMMAR doc_gram_entries { GramExt { gramext_name = ""; gramext_globals=[]; gramext_entries = $2 } } doc_gram_entries: | { [] } | doc_gram_entry doc_gram_entries { $1 :: $2 } ; doc_gram_entry: | qualid_or_ident COLON LBRACKET PIPE doc_gram_rules RBRACKET { { gentry_name = $1; gentry_rules = GDataFresh (None, [{ grule_label = None; grule_assoc = None; grule_prods = $5; }]) } } | qualid_or_ident COLON LBRACKET RBRACKET { { gentry_name = $1; gentry_rules = GDataFresh (None, [{ grule_label = None; grule_assoc = None; grule_prods = []; }]) } } ; doc_gram_rules: | doc_gram_rule { [$1] } | doc_gram_rule PIPE doc_gram_rules { $1 :: $3 } ; doc_gram_rule: | doc_gram_symbols_opt { { gprod_symbs = $1; gprod_body = no_code; } } ; doc_gram_symbols_opt: | { [] } | doc_gram_symbols { $1 } | doc_gram_symbols SEMICOLON { $1 } ; doc_gram_symbols: | doc_gram_symbol { [$1] } | doc_gram_symbols SEMICOLON doc_gram_symbol { $1 @ [$3] } ; doc_gram_symbol: | IDENT EQUAL doc_gram_gram_tokens { (Some $1, $3) } | doc_gram_gram_tokens { (None, $1) } ; doc_gram_gram_tokens: | doc_gram_gram_token { [$1] } | doc_gram_gram_token doc_gram_gram_tokens { $1 :: $2 } ; doc_gram_gram_token: | qualid_or_ident { GSymbQualid ($1, None) } | LPAREN doc_gram_gram_tokens RPAREN { GSymbParen $2 } | LBRACKET doc_gram_rules RBRACKET { GSymbProd $2 } | STRING { GSymbString $1 } ; coq-8.15.0/coqpp/coqpp_parser.ml000066400000000000000000000032171417001151100165330ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let () = close_in chan in let () = Printf.eprintf "%s\n%!" (pr_loc loc) in fatal msg | Parsing.Parse_error -> let () = close_in chan in let loc = Coqpp_lex.loc lexbuf in let () = Printf.eprintf "%s\n%!" (pr_loc loc) in fatal "syntax error" in let () = close_in chan in ans coq-8.15.0/coqpp/coqpp_parser.mli000066400000000000000000000014151417001151100167020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string val fatal : string -> unit val parse_file : string -> Coqpp_ast.t coq-8.15.0/coqpp/dune000066400000000000000000000004561417001151100143630ustar00rootroot00000000000000(ocamllex coqpp_lex) (ocamlyacc coqpp_parse) (library (name coqpp) (wrapped false) (modules coqpp_ast coqpp_lex coqpp_parse coqpp_parser) (modules_without_implementation coqpp_ast)) (executable (name coqpp_main) (public_name coqpp) (package coq-core) (libraries coqpp) (modules coqpp_main)) coq-8.15.0/default.nix000066400000000000000000000102731417001151100145250ustar00rootroot00000000000000# How to use? # If you have Nix installed, you can get in an environment with everything # needed to compile Coq and CoqIDE by running: # $ nix-shell # at the root of the Coq repository. # How to tweak default arguments? # nix-shell supports the --arg option (see Nix doc) that allows you for # instance to do this: # $ nix-shell --arg ocamlPackages "(import {}).ocaml-ng.ocamlPackages_4_05" --arg buildIde false # You can also compile Coq and "install" it by running: # $ make clean # (only needed if you have left-over compilation files) # $ nix-build # at the root of the Coq repository. # nix-build also supports the --arg option, so you will be able to do: # $ nix-build --arg doInstallCheck false # if you want to speed up things by not running the test-suite. # Once the build is finished, you will find, in the current directory, # a symlink to where Coq was installed. { pkgs ? import ./dev/nixpkgs.nix {} , ocamlPackages ? pkgs.ocaml-ng.ocamlPackages_4_09 , buildIde ? true , buildDoc ? true , doInstallCheck ? true , shell ? false # We don't use lib.inNixShell because that would also apply # when in a nix-shell of some package depending on this one. , coq-version ? "8.14-git" }: with pkgs; with pkgs.lib; stdenv.mkDerivation rec { name = "coq"; buildInputs = [ hostname python3 time flock dune_2 # coq-makefile timing tools ] ++ optionals buildIde [ ocamlPackages.lablgtk3-sourceview3 glib gnome3.defaultIconTheme wrapGAppsHook ] ++ optionals buildDoc [ # Sphinx doc dependencies pkgconfig (python3.withPackages (ps: [ ps.sphinx ps.sphinx_rtd_theme ps.pexpect ps.beautifulsoup4 ps.antlr4-python3-runtime ps.sphinxcontrib-bibtex ])) antlr4 ocamlPackages.odoc ] ++ optionals doInstallCheck ( # Test-suite dependencies # ncurses is required to build an OCaml REPL optional (!versionAtLeast ocaml.version "4.07") ncurses ++ [ ocamlPackages.ounit rsync which ] ) ++ optionals shell ( [ jq curl gitFull gnupg ] # Dependencies of the merging script ++ (with ocamlPackages; [ merlin ocp-indent ocp-index utop ocamlformat ]) # Dev tools ++ [ graphviz ] # Useful for STM debugging ); # OCaml and findlib are needed so that native_compute works # This follows a similar change in the nixpkgs repo (cf. NixOS/nixpkgs#101058) # ocamlfind looks for zarith when building plugins # This follows a similar change in the nixpkgs repo (cf. NixOS/nixpkgs#94230) propagatedBuildInputs = with ocamlPackages; [ ocaml findlib zarith ]; propagatedUserEnvPkgs = with ocamlPackages; [ ocaml findlib ]; src = if shell then null else with builtins; filterSource (path: _: !elem (baseNameOf path) [".git" "result" "bin" "_build" "_build_ci" "_build_vo" "nix"]) ./.; preConfigure = '' patchShebangs dev/tools/ doc/stdlib ''; prefixKey = "-prefix "; enableParallelBuilding = true; buildFlags = [ "world" "byte" ] ++ optional buildDoc "doc-html"; installTargets = [ "install" "install-byte" ] ++ optional buildDoc "install-doc-html"; createFindlibDestdir = !shell; postInstall = "ln -s $out/lib/coq-core $OCAMLFIND_DESTDIR/coq-core"; inherit doInstallCheck; preInstallCheck = '' patchShebangs tools/ patchShebangs test-suite/ export OCAMLPATH=$OCAMLFIND_DESTDIR:$OCAMLPATH ''; installCheckTarget = [ "check" ]; passthru = { inherit coq-version ocamlPackages; dontFilter = true; # Useful to use mkCoqPackages from }; setupHook = writeText "setupHook.sh" " addCoqPath () { if test -d \"$1/lib/coq/${coq-version}/user-contrib\"; then export COQPATH=\"\${COQPATH-}\${COQPATH:+:}$1/lib/coq/${coq-version}/user-contrib/\" fi } addEnvHooks \"$targetOffset\" addCoqPath "; meta = { description = "Coq proof assistant"; longDescription = '' Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. ''; homepage = http://coq.inria.fr; license = licenses.lgpl21; platforms = platforms.unix; }; } coq-8.15.0/dev/000077500000000000000000000000001417001151100131345ustar00rootroot00000000000000coq-8.15.0/dev/Bugzilla_Coq_autolink.user.js000066400000000000000000000011511417001151100207260ustar00rootroot00000000000000// ==UserScript== // @name Bugzilla Coq autolink // @namespace CoqScript // @include https://coq.inria.fr/bugs/* // @description Makes #XXXX into links to Github Coq PRs // @version 1 // @grant none // ==/UserScript== var regex = /#(\d+)/g; var substr = '$&'; function doNode(node) { node.innerHTML = node.innerHTML.replace(regex,substr); } var comments = document.getElementsByClassName("bz_comment_table")[0]; var pars = comments.getElementsByClassName("bz_comment_text"); for(var j=0; j 1) { var range = document.createRange(); var start = content.search(regex); var end = start + matches[0].length; range.setStart(node, start); range.setEnd(node, end); var linkNode = document.createElement("a"); linkNode.href = "https://coq.inria.fr/bugs/show_bug.cgi?id=" + matches[1]; range.surroundContents(linkNode); //handle multiple matches in one text node doNode(linkNode.parentNode); } } } for(var i=0; i Nametab.shortest_qualid_of_global ?loc Id.Set.empty r);; let go () = Coqloop.(loop ~opts:Option.(get !drop_args) ~state:Option.(get !drop_last_doc)) let _ = print_string ("\n\tOcaml toplevel with Coq printers and utilities (use go();; to exit)\n\n"); flush_all() (* Local Variables: *) (* mode: tuareg *) (* End: *) coq-8.15.0/dev/bench/000077500000000000000000000000001417001151100142135ustar00rootroot00000000000000coq-8.15.0/dev/bench/bench.sh000077500000000000000000000540621417001151100156400ustar00rootroot00000000000000#! /usr/bin/env bash # ASSUMPTIONS: # - the OPAM packages, specified by the user, are topologically sorted wrt. to the dependency relationship. # - all the variables below are set. set -e BENCH_DEBUG= r='\033[0m' # reset (all attributes off) b='\033[1m' # bold u='\033[4m' # underline nl=$'\n' bt='`' # backtick start_code_block='```' end_code_block='```' number_of_processors=$(cat /proc/cpuinfo | grep '^processor *' | wc -l) program_name="$0" program_path=$(readlink -f "${program_name%/*}") coqbot_url_prefix="https://coqbot.herokuapp.com/pendulum/" # Check that the required arguments are provided check_variable () { if [ ! -v "$1" ] then echo "Variable $1 should be set" exit 1 fi } echo $PWD #check_variable "BUILD_ID" #check_variable "BUILD_URL" #check_variable "JOB_NAME" #check_variable "JENKINS_URL" #check_variable "CI_JOB_URL" : "${coq_pr_number:=}" : "${coq_pr_comment_id:=}" : "${new_ocaml_switch:=ocaml-base-compiler.4.07.1}" : "${old_ocaml_switch:=ocaml-base-compiler.4.07.1}" : "${new_coq_repository:=https://gitlab.com/coq/coq.git}" : "${old_coq_repository:=https://gitlab.com/coq/coq.git}" : "${new_coq_opam_archive_git_uri:=https://github.com/coq/opam-coq-archive.git}" : "${old_coq_opam_archive_git_uri:=https://github.com/coq/opam-coq-archive.git}" : "${new_coq_opam_archive_git_branch:=master}" : "${old_coq_opam_archive_git_branch:=master}" : "${num_of_iterations:=1}" : "${timeout:=3h}" : "${coq_opam_packages:=coq-bignums coq-hott coq-performance-tests-lite coq-engine-bench-lite coq-mathcomp-ssreflect coq-mathcomp-fingroup coq-mathcomp-algebra coq-mathcomp-solvable coq-mathcomp-field coq-mathcomp-character coq-mathcomp-odd-order coq-math-classes coq-corn coq-flocq3 coq-compcert coq-geocoq coq-color coq-coqprime coq-coqutil coq-bedrock2 coq-rewriter coq-fiat-core coq-fiat-parsers coq-fiat-crypto coq-unimath coq-coquelicot coq-iris-examples coq-verdi coq-verdi-raft coq-fourcolor coq-rewriter-perf-SuperFast coq-perennial coq-vst}" new_coq_commit=$(git rev-parse HEAD^2) old_coq_commit=$(git merge-base HEAD^1 $new_coq_commit) if echo "$num_of_iterations" | grep '^[1-9][0-9]*$' 2> /dev/null > /dev/null; then : else echo echo "ERROR: num_of_iterations \"$num_of_iterations\" is not a positive integer." > /dev/stderr print_man_page_hint exit 1 fi bench_dirname="_bench" mkdir -p "${bench_dirname}" working_dir="$PWD/${bench_dirname}" log_dir=$working_dir/logs mkdir "$log_dir" export COQ_LOG_DIR=$log_dir echo "DEBUG: ocaml -version = $(ocaml -version)" echo "DEBUG: working_dir = $working_dir" echo "DEBUG: new_ocaml_switch = $new_ocaml_switch" echo "DEBUG: new_coq_repository = $new_coq_repository" echo "DEBUG: new_coq_commit = $new_coq_commit" echo "DEBUG: new_coq_opam_archive_git_uri = $new_coq_opam_archive_git_uri" echo "DEBUG: new_coq_opam_archive_git_branch = $new_coq_opam_archive_git_branch" echo "DEBUG: old_ocaml_switch = $old_ocaml_switch" echo "DEBUG: old_coq_repository = $old_coq_repository" echo "DEBUG: old_coq_commit = $old_coq_commit" echo "DEBUG: old_coq_opam_archive_git_uri = $old_coq_opam_archive_git_uri" echo "DEBUG: old_coq_opam_archive_git_branch = $old_coq_opam_archive_git_branch" echo "DEBUG: num_of_iterations = $num_of_iterations" echo "DEBUG: coq_opam_packages = $coq_opam_packages" echo "DEBUG: coq_pr_number = $coq_pr_number" echo "DEBUG: coq_pr_comment_id = $coq_pr_comment_id" # -------------------------------------------------------------------------------- # Some sanity checks of command-line arguments provided by the user that can be done right now. if which perf > /dev/null; then echo -n else echo > /dev/stderr echo "ERROR: \"perf\" program is not available." > /dev/stderr echo > /dev/stderr exit 1 fi if which curl > /dev/null; then : else echo > /dev/stderr echo "ERROR: \"curl\" program is not available." > /dev/stderr echo > /dev/stderr exit 1 fi if which du > /dev/null; then : else echo > /dev/stderr echo "ERROR: \"du\" program is not available." > /dev/stderr echo > /dev/stderr exit 1 fi if [ ! -e "$working_dir" ]; then echo > /dev/stderr echo "ERROR: \"$working_dir\" does not exist." > /dev/stderr echo > /dev/stderr exit 1 fi if [ ! -d "$working_dir" ]; then echo > /dev/stderr echo "ERROR: \"$working_dir\" is not a directory." > /dev/stderr echo > /dev/stderr exit 1 fi if [ ! -w "$working_dir" ]; then echo > /dev/stderr echo "ERROR: \"$working_dir\" is not writable." > /dev/stderr echo > /dev/stderr exit 1 fi coq_opam_packages_on_separate_lines=$(echo "$coq_opam_packages" | sed 's/ /\n/g') if [ $(echo "$coq_opam_packages_on_separate_lines" | wc -l) != $(echo "$coq_opam_packages_on_separate_lines" | sort | uniq | wc -l) ]; then echo "ERROR: The provided set of OPAM packages contains duplicates." exit 1 fi # -------------------------------------------------------------------------------- # Tell coqbot to update the initial comment, if we know which one to update function coqbot_update_comment() { is_done="$1" comment_body="$2" uninstallable_packages="$3" if [ ! -z "${coq_pr_number}" ]; then comment_text="" artifact_text="" if [ -z "${is_done}" ]; then comment_text="in progress, " artifact_text="eventually " else comment_text="" artifact_text="" fi comment_text="Benchmarking ${comment_text}log available [here](${CI_JOB_URL}) ([raw log here](${CI_JOB_URL}/raw)), artifacts ${artifact_text}available for [download](${CI_JOB_URL}/artifacts/download) and [browsing](${CI_JOB_URL}/artifacts/browse)" if [ ! -z "${comment_body}" ]; then comment_text="${comment_text}${nl}${start_code_block}${nl}${comment_body}${nl}${end_code_block}" fi if [ ! -z "${uninstallable_packages}" ]; then comment_text="${comment_text}${nl}The following packages failed to install: ${uninstallable_packages}" fi comment_text="${comment_text}${nl}${nl}
Old Coq version ${old_coq_commit}" comment_text="${comment_text}${nl}${nl}${start_code_block}${nl}$(git log -n 1 "${old_coq_commit}")${nl}${end_code_block}${nl}
" comment_text="${comment_text}${nl}${nl}
New Coq version ${new_coq_commit}" comment_text="${comment_text}${nl}${nl}${start_code_block}${nl}$(git log -n 1 "${new_coq_commit}")${nl}${end_code_block}${nl}
" comment_text="${comment_text}${nl}${nl}[Diff: ${bt}${old_coq_commit}..${new_coq_commit}${bt}](https://github.com/coq/coq/compare/${old_coq_commit}..${new_coq_commit})" # if there's a comment id, we update the comment while we're # in progress; otherwise, we wait until the end to post a new # comment if [ ! -z "${coq_pr_comment_id}" ]; then # Tell coqbot to update the in-progress comment curl -X POST --data-binary "${coq_pr_number}${nl}${coq_pr_comment_id}${nl}${comment_text}" "${coqbot_url_prefix}/update-comment" elif [ ! -z "${is_done}" ]; then # Tell coqbot to post a new comment that we're done benchmarking curl -X POST --data-binary "${coq_pr_number}${nl}${comment_text}" "${coqbot_url_prefix}/new-comment" fi if [ ! -z "${is_done}" ]; then # Tell coqbot to remove the `needs: benchmarking` label curl -X POST --data-binary "${coq_pr_number}" "${coqbot_url_prefix}/benchmarking-done" fi fi } # initial update to the comment, to say that we're in progress coqbot_update_comment "" "" "" # -------------------------------------------------------------------------------- zulip_post="" if [[ $ZULIP_BENCH_BOT ]]; then pr_full=$(git log -n 1 --pretty=%s) pr_full=${pr_full#"[CI merge] PR #"} pr_num=${pr_full%%:*} pr_msg=${pr_full#*:} zulip_header="Bench at $CI_JOB_URL Testing [$pr_msg](https://github.com/coq/coq/pull/$pr_num) On packages $coq_opam_packages " # 24008 is the "github notifications" stream resp=$(curl -sSX POST https://coq.zulipchat.com/api/v1/messages \ -u "$ZULIP_BENCH_BOT" \ --data-urlencode type=stream \ --data-urlencode to='240008' \ --data-urlencode subject='Bench Notifications' \ --data-urlencode content="$zulip_header") zulip_post=$(echo "$resp" | jq .id) case "$zulip_post" in ''|*[!0-9]*) # not an int echo "Failed to post to zulip: $resp" zulip_post="" ;; esac fi zulip_edit() { if ! [[ $zulip_post ]]; then return; fi ending=$1 if [[ $rendered_results ]]; then msg="$zulip_header ~~~ $rendered_results ~~~ $ending " else msg="$zulip_header $ending " fi curl -sSX PATCH https://coq.zulipchat.com/api/v1/messages/"$zulip_post" \ -u "$ZULIP_BENCH_BOT" \ --data-urlencode content="$msg" >/dev/null } zulip_autofail() { code=$? com=$BASH_COMMAND zulip_edit "Failed '$com' with exit code $code." } if [[ $zulip_post ]]; then trap zulip_autofail ERR; fi # Clone the indicated git-repository. coq_dir="$working_dir/coq" git clone -q "$new_coq_repository" "$coq_dir" cd "$coq_dir" git remote rename origin new_coq_repository git remote add old_coq_repository "$old_coq_repository" git fetch -q "$old_coq_repository" git checkout -q $new_coq_commit coq_opam_version=dev # -------------------------------------------------------------------------------- new_opam_root="$working_dir/opam.NEW" old_opam_root="$working_dir/opam.OLD" # -------------------------------------------------------------------------------- old_coq_opam_archive_dir="$working_dir/old_coq_opam_archive" git clone -q --depth 1 -b "$old_coq_opam_archive_git_branch" "$old_coq_opam_archive_git_uri" "$old_coq_opam_archive_dir" new_coq_opam_archive_dir="$working_dir/new_coq_opam_archive" git clone -q --depth 1 -b "$new_coq_opam_archive_git_branch" "$new_coq_opam_archive_git_uri" "$new_coq_opam_archive_dir" initial_opam_packages="num ocamlfind dune" # Create an opam root and install Coq # $1 = root_name {ex: NEW / OLD} # $2 = compiler name # $3 = git hash of Coq to be installed # $4 = directory of coq opam archive create_opam() { local RUNNER="$1" local OPAM_DIR="$working_dir/opam.$RUNNER" local OPAM_COMP="$2" local COQ_HASH="$3" local OPAM_COQ_DIR="$4" export OPAMROOT="$OPAM_DIR" export COQ_RUNNER="$RUNNER" opam init --disable-sandboxing -qn -j$number_of_processors --bare # Allow beta compiler switches opam repo add -q --set-default beta https://github.com/ocaml/ocaml-beta-repository.git # Allow experimental compiler switches opam repo add -q --set-default ocaml-pr https://github.com/ejgallego/ocaml-pr-repository.git # Rest of default switches opam repo add -q --set-default iris-dev "https://gitlab.mpi-sws.org/FP/opam-dev.git" opam switch create -qy -j$number_of_processors "$OPAM_COMP" eval $(opam env) # For some reason opam guesses an incorrect upper bound on the # number of jobs available on Travis, so we set it here manually: opam config set-global jobs $number_of_processors if [ ! -z "$BENCH_DEBUG" ]; then opam config list; fi opam repo add -q --this-switch coq-extra-dev "$OPAM_COQ_DIR/extra-dev" opam repo add -q --this-switch coq-released "$OPAM_COQ_DIR/released" # Pinning for packages that are not in a repository opam pin add -ynq coq-perennial.dev git+https://github.com/mit-pdos/perennial#coq/tested opam install -qy -j$number_of_processors $initial_opam_packages if [ ! -z "$BENCH_DEBUG" ]; then opam repo list; fi cd "$coq_dir" echo "$1_coq_commit = $COQ_HASH" echo "wrap-build-commands: [\"$program_path/wrapper.sh\"]" >> "$OPAM_DIR/config" git checkout -q $COQ_HASH COQ_HASH_LONG=$(git log --pretty=%H | head -n 1) echo "$1_coq_commit_long = $COQ_HASH_LONG" for package in coq-core coq-stdlib coq; do export COQ_OPAM_PACKAGE=$package export COQ_ITERATION=1 _RES=0 opam pin add -y -b -j "$number_of_processors" --kind=path $package.dev . \ 3>$log_dir/$package.$RUNNER.opam_install.1.stdout.log 1>&3 \ 4>$log_dir/$package.$RUNNER.opam_install.1.stderr.log 2>&4 || \ _RES=$? if [ $_RES = 0 ]; then echo "$package ($RUNNER) installed successfully" else echo "ERROR: \"opam install $package.$coq_opam_version\" has failed (for the $RUNNER commit = $COQ_HASH_LONG)." zulip_edit "Bench failed: could not install $package ($RUNNER)." exit 1 fi # we don't multi compile coq for now (TODO some other time) # the render needs all the files so copy them around for it in $(seq 2 $num_of_iterations); do cp "$log_dir/$package.$RUNNER.1.time" "$log_dir/$package.$RUNNER.$it.time" cp "$log_dir/$package.$RUNNER.1.perf" "$log_dir/$package.$RUNNER.$it.perf" done done } # Create an OPAM-root to which we will install the NEW version of Coq. create_opam "NEW" "$new_ocaml_switch" "$new_coq_commit" "$new_coq_opam_archive_dir" new_coq_commit_long="$COQ_HASH_LONG" # Create an OPAM-root to which we will install the OLD version of Coq. create_opam "OLD" "$old_ocaml_switch" "$old_coq_commit" "$old_coq_opam_archive_dir" old_coq_commit_long="$COQ_HASH_LONG" # Packages which appear in the rendered table # Deliberately don't include the dummy "coq" package installable_coq_opam_packages="coq-core coq-stdlib" echo "DEBUG: $program_path/render_results $log_dir $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages" rendered_results="$($program_path/render_results "$log_dir" $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages)" echo "${rendered_results}" zulip_edit "Benching continues..." # -------------------------------------------------------------------------------- # Measure the compilation times of the specified OPAM packages in both switches # Sort the opam packages sorted_coq_opam_packages=$("${program_path}/sort-by-deps.sh" ${coq_opam_packages}) echo "sorted_coq_opam_packages = ${sorted_coq_opam_packages}" # Generate per line timing info in devs that use coq_makefile export TIMING=1 for coq_opam_package in $sorted_coq_opam_packages; do export COQ_OPAM_PACKAGE=$coq_opam_package if [ ! -z "$BENCH_DEBUG" ]; then opam list opam show $coq_opam_package || continue 2 else # cause to skip with error if unknown package opam show $coq_opam_package >/dev/null || continue 2 fi echo "coq_opam_package = $coq_opam_package" for RUNNER in NEW OLD; do export COQ_RUNNER=$RUNNER # perform measurements for the NEW/OLD commit (provided by the user) if [ $RUNNER = "NEW" ]; then export OPAMROOT="$new_opam_root" echo "Testing NEW commit: $(date)" else export OPAMROOT="$old_opam_root" echo "Testing OLD commit: $(date)" fi eval $(opam env) # If a given OPAM-package was already installed (as a # dependency of some OPAM-package that we have benchmarked # before), remove it. opam uninstall -q $coq_opam_package >/dev/null 2>&1 # OPAM 2.0 likes to ignore the -j when it feels like :S so we # workaround that here. opam config set-global jobs $number_of_processors opam install $coq_opam_package -v -b -j$number_of_processors --deps-only -y \ 3>$log_dir/$coq_opam_package.$RUNNER.opam_install.deps_only.stdout.log 1>&3 \ 4>$log_dir/$coq_opam_package.$RUNNER.opam_install.deps_only.stderr.log 2>&4 || continue 2 opam config set-global jobs 1 if [ ! -z "$BENCH_DEBUG" ]; then ls -l $working_dir; fi for iteration in $(seq $num_of_iterations); do export COQ_ITERATION=$iteration _RES=0 timeout "$timeout" opam install -v -b -j1 $coq_opam_package \ 3>$log_dir/$coq_opam_package.$RUNNER.opam_install.$iteration.stdout.log 1>&3 \ 4>$log_dir/$coq_opam_package.$RUNNER.opam_install.$iteration.stderr.log 2>&4 || \ _RES=$? if [ $_RES = 0 ]; then echo $_RES > $log_dir/$coq_opam_package.$RUNNER.opam_install.$iteration.exit_status # "opam install" was successful. # Remove the benchmarked OPAM-package, unless this is the # very last iteration (we want to keep this OPAM-package # because other OPAM-packages we will benchmark later # might depend on it --- it would be a waste of time to # remove it now just to install it later) if [ $iteration != $num_of_iterations ]; then opam uninstall -q $coq_opam_package fi else # "opam install" failed. echo $_RES > $log_dir/$coq_opam_package.$RUNNER.opam_install.$iteration.exit_status continue 3 fi done done installable_coq_opam_packages="$installable_coq_opam_packages $coq_opam_package" # -------------------------------------------------------------- cat $log_dir/$coq_opam_package.$RUNNER.1.*.time || true cat $log_dir/$coq_opam_package.$RUNNER.1.*.perf || true # Print the intermediate results after we finish benchmarking each OPAM package if [ "$coq_opam_package" = "$(echo $sorted_coq_opam_packages | sed 's/ /\n/g' | tail -n 1)" ]; then # It does not make sense to print the intermediate results when # we finished bechmarking the very last OPAM package because the # next thing will do is that we will print the final results. # It would look lame to print the same table twice. : else echo "DEBUG: $program_path/render_results "$log_dir" $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages" rendered_results="$($program_path/render_results "$log_dir" $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages)" echo "${rendered_results}" # update the comment coqbot_update_comment "" "${rendered_results}" "" zulip_edit "Benching continues..." fi # Generate HTML report for LAST run # N.B. Not all packages end in .dev, e.g., coq-lambda-rust uses .dev.timestamp. # So we use a wildcard to catch such packages. This will have to be updated if # ever there is a package that uses some different naming scheme. new_base_path=$new_opam_root/$new_ocaml_switch/.opam-switch/build/$coq_opam_package.dev*/ old_base_path=$old_opam_root/$old_ocaml_switch/.opam-switch/build/$coq_opam_package.dev*/ for vo in $(cd $new_base_path/; find . -name '*.vo'); do if [ -e $old_base_path/$vo ]; then echo "$coq_opam_package/$vo $(stat -c%s $old_base_path/$vo) $(stat -c%s $new_base_path/$vo)" >> "$log_dir/vosize.log" fi if [ -e $old_base_path/${vo%%o}.timing ] && \ [ -e $new_base_path/${vo%%o}.timing ]; then mkdir -p $working_dir/html/$coq_opam_package/$(dirname $vo)/ # NB: sometimes randomly fails $program_path/timelog2html $new_base_path/${vo%%o} \ $old_base_path/${vo%%o}.timing \ $new_base_path/${vo%%o}.timing > \ $working_dir/html/$coq_opam_package/${vo%%o}.html || echo "Failed (code $?):" $program_path/timelog2html $new_base_path/${vo%%o} \ $old_base_path/${vo%%o}.timing \ $new_base_path/${vo%%o}.timing fi done done # Since we do not upload all files, store a list of the files # available so that if we at some point want to tweak which files we # upload, we'll know which ones are available for upload du -ha "$working_dir" > "$working_dir/files.listing" # The following directories in $working_dir are no longer used: # # - coq, opam.OLD, opam.NEW # Measured data for each `$coq_opam_package`, `$iteration`, `status \in {NEW,OLD}`: # # - $working_dir/$coq_opam_package.$status.$iteration.time # => output of /usr/bin/time --format="%U" ... # # - $working_dir/$coq_opam_package.NEW.$iteration.perf # => output of perf stat -e instructions:u,cycles:u ... # # The next script processes all these files and prints results in a table. echo "INFO: workspace = ${CI_JOB_URL}/artifacts/browse/${bench_dirname}" # Print the final results. if [ -z "$installable_coq_opam_packages" ]; then # Tell the user that none of the OPAM-package(s) the user provided # /are installable. printf "\n\nINFO: failed to install: $sorted_coq_opam_packages" coqbot_update_comment "done" "" "$sorted_coq_opam_packages" exit 1 fi echo "DEBUG: $program_path/render_results $log_dir $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages" rendered_results="$($program_path/render_results "$log_dir" $num_of_iterations $new_coq_commit_long $old_coq_commit_long 0 user_time_pdiff $installable_coq_opam_packages)" echo "${rendered_results}" echo "INFO: per line timing: ${CI_JOB_URL}/artifacts/browse/${bench_dirname}/html/" cd "$coq_dir" echo INFO: Old Coq version git log -n 1 "$old_coq_commit" echo INFO: New Coq version git log -n 1 "$new_coq_commit" not_installable_coq_opam_packages=$(comm -23 <(echo $sorted_coq_opam_packages | sed 's/ /\n/g' | sort | uniq) <(echo $installable_coq_opam_packages | sed 's/ /\n/g' | sort | uniq) | sed 's/\t//g') coqbot_update_comment "done" "${rendered_results}" "${not_installable_coq_opam_packages}" if [ -n "$not_installable_coq_opam_packages" ]; then # Tell the user that some of the provided OPAM-package(s) # is/are not installable. printf '\n\nINFO: failed to install %s\n' "$not_installable_coq_opam_packages" zulip_edit "Bench complete, failed to install packages: $not_installable_coq_opam_packages" exit 1 fi zulip_edit "Bench complete: all packages successfully installed." coq-8.15.0/dev/bench/gitlab-bench.yml000066400000000000000000000010071417001151100172530ustar00rootroot00000000000000 bench: stage: build needs: [] when: manual before_script: - printenv -0 | sort -z | tr '\0' '\n' script: dev/bench/bench.sh tags: - timing variables: GIT_DEPTH: "" artifacts: name: "$CI_JOB_NAME" paths: - _bench/html/**/*.v.html - _bench/logs - _bench/files.listing - _bench/opam.NEW/**/*.log - _bench/opam.NEW/**/*.timing - _bench/opam.OLD/**/*.log - _bench/opam.OLD/**/*.timing when: always expire_in: 1 year environment: bench coq-8.15.0/dev/bench/render_results000077500000000000000000000355461417001151100172160ustar00rootroot00000000000000#! /usr/bin/env ocaml (* ASSUMPTIONS: - the 1-st command line argument (working directory): - designates an existing readable directory - which contains *.time and *.perf files produced by bench.sh script - the 2-nd command line argument (number of iterations): - is a positive integer - the 3-rd command line argument (minimal user time): - is a positive floating point number - the 4-th command line argument determines the name of the column according to which the resulting table will be sorted. Valid values are: - package_name - user_time_pdiff - the rest of the command line-arguments - are names of benchamarked Coq OPAM packages for which bench.sh script generated *.time and *.perf files *) #use "topfind";; #require "unix";; #print_depth 100000000;; #print_length 100000000;; open Printf open Unix ;; let _ = Printexc.record_backtrace true ;; type ('a,'b) pkg_timings = { user_time : 'a; num_instr : 'b; num_cycles : 'b; num_mem : 'b; num_faults : 'b; } ;; let reduce_pkg_timings (m_f : 'a list -> 'c) (m_a : 'b list -> 'd) (t : ('a,'b) pkg_timings list) : ('c,'d) pkg_timings = { user_time = m_f @@ List.map (fun x -> x.user_time) t ; num_instr = m_a @@ List.map (fun x -> x.num_instr) t ; num_cycles = m_a @@ List.map (fun x -> x.num_cycles) t ; num_mem = m_a @@ List.map (fun x -> x.num_mem) t ; num_faults = m_a @@ List.map (fun x -> x.num_faults) t } ;; (******************************************************************************) (* BEGIN Copied from batteries, to remove *) (******************************************************************************) let run_and_read cmd = (* This code is before the open of BatInnerIO to avoid using batteries' wrapped IOs *) let string_of_file fn = let buff_size = 1024 in let buff = Buffer.create buff_size in let ic = open_in fn in let line_buff = Bytes.create buff_size in begin let was_read = ref (input ic line_buff 0 buff_size) in while !was_read <> 0 do Buffer.add_subbytes buff line_buff 0 !was_read; was_read := input ic line_buff 0 buff_size; done; close_in ic; end; Buffer.contents buff in let tmp_fn = Filename.temp_file "" "" in let cmd_to_run = cmd ^ " > " ^ tmp_fn in let status = Unix.system cmd_to_run in let output = string_of_file tmp_fn in Unix.unlink tmp_fn; (status, output) ;; let ( %> ) f g x = g (f x) let run = run_and_read %> snd module Float = struct let nan = Pervasives.nan end module List = struct include List let rec init_tailrec_aux acc i n f = if i >= n then acc else init_tailrec_aux (f i :: acc) (i+1) n f let rec init_aux i n f = if i >= n then [] else let r = f i in r :: init_aux (i+1) n f let rev_init_threshold = match Sys.backend_type with | Sys.Native | Sys.Bytecode -> 10_000 (* We don't known the size of the stack, better be safe and assume it's small. *) | Sys.Other _ -> 50 let init len f = if len < 0 then invalid_arg "List.init" else if len > rev_init_threshold then rev (init_tailrec_aux [] 0 len f) else init_aux 0 len f let rec drop n = function | _ :: l when n > 0 -> drop (n-1) l | l -> l let reduce f = function | [] -> invalid_arg "List.reduce: Empty List" | h :: t -> fold_left f h t let min l = reduce Pervasives.min l let max l = reduce Pervasives.max l end ;; module String = struct include String let rchop ?(n = 1) s = if n < 0 then invalid_arg "String.rchop: number of characters to chop is negative" else let slen = length s in if slen <= n then "" else sub s 0 (slen - n) end ;; module Table : sig type header = string type row = string list list val print : header list -> row -> row list -> string end = struct type header = string type row = string list list let val_padding = 2 (* Padding between data in the same row *) let row_padding = 1 (* Padding between rows *) let homogeneous b = if b then () else failwith "Heterogeneous data" let vert_split (ls : 'a list list) = let split l = match l with | [] -> failwith "vert_split" | x :: l -> (x, l) in let ls = List.map split ls in List.split ls let justify n s = let len = String.length s in let () = assert (len <= n) in let lft = (n - len) / 2 in let rgt = n - lft - len in String.make lft ' ' ^ s ^ String.make rgt ' ' let justify_row layout data = let map n s = let len = String.length s in let () = assert (len <= n) in (* Right align *) let pad = n - len in String.make pad ' ' ^ s in let data = List.map2 map layout data in String.concat (String.make val_padding ' ') data let angle hkind vkind = match hkind, vkind with | `Lft, `Top -> "┌" | `Rgt, `Top -> "┐" | `Mid, `Top -> "┬" | `Lft, `Mid -> "├" | `Rgt, `Mid -> "┤" | `Mid, `Mid -> "┼" | `Lft, `Bot -> "└" | `Rgt, `Bot -> "┘" | `Mid, `Bot -> "┴" let print_separator vkind col_size = let rec dashes n = if n = 0 then "" else "─" ^ dashes (n - 1) in let len = List.length col_size in let pad = dashes row_padding in let () = assert (0 < len) in let map n = dashes n in angle `Lft vkind ^ pad ^ String.concat (pad ^ angle `Mid vkind ^ pad) (List.map map col_size) ^ pad ^ angle `Rgt vkind let print_blank col_size = let len = List.length col_size in let () = assert (0 < len) in let pad = String.make row_padding ' ' in let map n = String.make n ' ' in "│" ^ pad ^ String.concat (pad ^ "│" ^ pad) (List.map map col_size) ^ pad ^ "│" let print_row row = let len = List.length row in let () = assert (0 < len) in let pad = String.make row_padding ' ' in "│" ^ pad ^ String.concat (pad ^ "│" ^ pad) row ^ pad ^ "│" (* Invariant : all rows must have the same shape *) let print (headers : header list) (top : row) (rows : row list) = (* Sanitize input *) let ncolums = List.length headers in let shape = ref None in let check row = let () = homogeneous (List.length row = ncolums) in let rshape : int list = List.map (fun data -> List.length data) row in match !shape with | None -> shape := Some rshape | Some s -> homogeneous (rshape = s) in let () = List.iter check rows in (* Compute layout *) let rec layout n (rows : row list) = if n = 0 then [] else let (col, rows) = vert_split rows in let ans = layout (n - 1) rows in let data = ref None in let iter args = let size = List.map String.length args in match !data with | None -> data := Some size | Some s -> data := Some (List.map2 (fun len1 len2 -> max len1 len2) s size) in let () = List.iter iter col in let data = match !data with None -> [] | Some s -> s in data :: ans in let layout = layout ncolums (top::rows) in let map hd shape = let data_size = match shape with | [] -> 0 | n :: shape -> List.fold_left (fun accu n -> accu + n + val_padding) n shape in max (String.length hd) data_size in let col_size = List.map2 map headers layout in (* Justify the data *) let headers = List.map2 justify col_size headers in let top = List.map2 justify col_size (List.map2 justify_row layout top) in let rows = List.map (fun row -> List.map2 justify col_size (List.map2 justify_row layout row)) rows in (* Print the table *) let lines = print_separator `Top col_size :: print_row headers :: print_blank col_size :: print_row top :: print_separator `Mid col_size :: List.map print_row rows @ print_separator `Bot col_size :: [] in String.concat "\n" lines end (******************************************************************************) (* END Copied from batteries, to remove *) (******************************************************************************) let add_timings a b = { user_time = a.user_time +. b.user_time; num_instr = a.num_instr + b.num_instr; num_cycles = a.num_cycles + b.num_cycles; num_mem = a.num_mem + b.num_mem; num_faults = a.num_faults + b.num_faults; } let mk_pkg_timings work_dir pkg_name suffix iteration = let command_prefix = "cat " ^ work_dir ^ "/" ^ pkg_name ^ suffix ^ string_of_int iteration in let ncoms = command_prefix ^ ".ncoms" |> run |> String.rchop ~n:1 |> int_of_string in let timings = List.init ncoms (fun ncom -> let command_prefix = command_prefix ^ "." ^ string_of_int (ncom+1) in let time_command_output = command_prefix ^ ".time" |> run |> String.rchop ~n:1 |> String.split_on_char ' ' in let nth x i = List.nth i x in { user_time = time_command_output |> nth 0 |> float_of_string (* Perf can indeed be not supported in some systems, so we must fail gracefully *) ; num_instr = (try command_prefix ^ ".perf | grep instructions:u | awk '{print $1}' | sed 's/,//g'" |> run |> String.rchop ~n:1 |> int_of_string with Failure _ -> 0) ; num_cycles = (try command_prefix ^ ".perf | grep cycles:u | awk '{print $1}' | sed 's/,//g'" |> run |> String.rchop ~n:1 |> int_of_string with Failure _ -> 0) ; num_mem = time_command_output |> nth 1 |> int_of_string ; num_faults = time_command_output |> nth 2 |> int_of_string }) in match timings with | [] -> assert false | timing :: rest -> List.fold_left add_timings timing rest ;; (* process command line paramters *) assert (Array.length Sys.argv > 5); let work_dir = Sys.argv.(1) in let num_of_iterations = int_of_string Sys.argv.(2) in let new_coq_version = Sys.argv.(3) in let old_coq_version = Sys.argv.(4) in let minimal_user_time = float_of_string Sys.argv.(5) in let sorting_column = Sys.argv.(6) in let coq_opam_packages = Sys.argv |> Array.to_list |> List.drop 7 in (* ASSUMPTIONS: "working_dir" contains all the files produced by the following command: two_points_on_the_same_branch.sh $working_directory $coq_repository $coq_branch[:$new:$old] $num_of_iterations coq_opam_package_1 coq_opam_package_2 ... coq_opam_package_N -sf *) (* Run a given bash command; wait until it termines; check if its exit status is 0; return its whole stdout as a string. *) let proportional_difference_of_integers new_value old_value = if old_value = 0 then Float.nan else float_of_int (new_value - old_value) /. float_of_int old_value *. 100.0 in (* parse the *.time and *.perf files *) coq_opam_packages |> List.map (fun package_name -> package_name,(* compilation_results_for_NEW : (float * int * int * int) list *) List.init num_of_iterations succ |> List.map (mk_pkg_timings work_dir package_name ".NEW."), List.init num_of_iterations succ |> List.map (mk_pkg_timings work_dir package_name ".OLD.")) (* from the list of measured values, select just the minimal ones *) |> List.map (fun ((package_name : string), (new_measurements : (float, int) pkg_timings list), (old_measurements : (float, int) pkg_timings list)) -> let f_min : float list -> float = List.min in let i_min : int list -> int = List.min in package_name, reduce_pkg_timings f_min i_min new_measurements, reduce_pkg_timings f_min i_min old_measurements ) (* compute the "proportional differences in % of the NEW measurement and the OLD measurement" of all measured values *) |> List.map (fun (package_name, new_t, old_t) -> package_name, new_t, old_t, { user_time = (new_t.user_time -. old_t.user_time) /. old_t.user_time *. 100.0 ; num_instr = proportional_difference_of_integers new_t.num_instr old_t.num_instr ; num_cycles = proportional_difference_of_integers new_t.num_cycles old_t.num_cycles ; num_mem = proportional_difference_of_integers new_t.num_mem old_t.num_mem ; num_faults = proportional_difference_of_integers new_t.num_faults old_t.num_faults }) (* sort the table with results *) |> List.sort (match sorting_column with | "user_time_pdiff" -> fun (_,_,_,perf1) (_,_,_,perf2) -> compare perf1.user_time perf2.user_time | "package_name" -> fun (n1,_,_,_) (n2,_,_,_) -> compare n1 n2 | _ -> assert false ) (* Keep only measurements that took at least "minimal_user_time" (in seconds). *) |> List.filter (fun (_, new_t, old_t, _) -> minimal_user_time <= new_t.user_time && minimal_user_time <= old_t.user_time) (* Below we take the measurements and format them to stdout. *) |> List.map begin fun (package_name, new_t, old_t, perc) -> let precision = 2 in let prf f = Printf.sprintf "%.*f" precision f in let pri n = Printf.sprintf "%d" n in [ [ package_name ]; [ prf new_t.user_time; prf old_t.user_time; prf perc.user_time ]; [ pri new_t.num_cycles; pri old_t.num_cycles; prf perc.num_cycles ]; [ pri new_t.num_instr; pri old_t.num_instr; prf perc.num_instr ]; [ pri new_t.num_mem; pri old_t.num_mem; prf perc.num_mem ]; [ pri new_t.num_faults; pri old_t.num_faults; prf perc.num_faults ]; ] end |> fun measurements -> let headers = [ ""; "user time [s]"; "CPU cycles"; "CPU instructions"; "max resident mem [KB]"; "mem faults"; ] in let descr = ["NEW"; "OLD"; "PDIFF"] in let top = [ [ "package_name" ]; descr; descr; descr; descr; descr ] in printf "%s%!" (Table.print headers top measurements) ; (* ejgallego: disable this as it is very verbose and brings up little info in the log. *) if false then begin printf " PDIFF = proportional difference between measurements done for the NEW and the OLD Coq version = (NEW_measurement - OLD_measurement) / OLD_measurement * 100%% NEW = %s OLD = %s Columns: 1. user time [s] Total number of CPU-seconds that the process used directly (in user mode), in seconds. (In other words, \"%%U\" quantity provided by the \"/usr/bin/time\" command.) 2. CPU cycles Total number of CPU-cycles that the process used directly (in user mode). (In other words, \"cycles:u\" quantity provided by the \"/usr/bin/perf\" command.) 3. CPU instructions Total number of CPU-instructions that the process used directly (in user mode). (In other words, \"instructions:u\" quantity provided by the \"/usr/bin/perf\" command.) 4. max resident mem [KB] Maximum resident set size of the process during its lifetime, in Kilobytes. (In other words, \"%%M\" quantity provided by the \"/usr/bin/time\" command.) 5. mem faults Number of major, or I/O-requiring, page faults that occurred while the process was running. These are faults where the page has actually migrated out of primary memory. (In other words, \"%%F\" quantity provided by the \"/usr/bin/time\" command.) " new_coq_version old_coq_version; end coq-8.15.0/dev/bench/sort-by-deps000066400000000000000000000020421417001151100164640ustar00rootroot00000000000000#!/usr/bin/env ocaml let get_pkg_name arg = List.nth (String.split_on_char ':' arg) 0 let get_pkg_deps arg = String.split_on_char ',' (List.nth (String.split_on_char ':' arg) 1) let split_pkg arg = get_pkg_name arg, get_pkg_deps arg let depends_on arg1 arg2 = let pkg1, deps1 = split_pkg arg1 in let pkg2, deps2 = split_pkg arg2 in pkg1 != pkg2 && List.mem pkg2 deps1 let rec sort = function | [], [] -> [] | [], deferred -> sort (List.rev deferred, []) | arg :: rest, deferred -> (* check if any remaining package reverse-depends on this one *) if List.exists (fun other_arg -> depends_on arg other_arg) rest then (* defer this package *) sort (rest, arg :: deferred) else (* emit this package, and then try again with any deferred packages *) arg :: sort (List.rev deferred @ rest, []) let main () = let args = Array.to_list Sys.argv in let pkgs = List.tl args in let sorted_pkgs = sort (pkgs, []) in Printf.printf "%s\n%!" (String.concat " " (List.map get_pkg_name sorted_pkgs)) let () = main () coq-8.15.0/dev/bench/sort-by-deps.sh000077500000000000000000000011711417001151100171020ustar00rootroot00000000000000#!/usr/bin/env bash program_name="$0" program_path=$(readlink -f "${program_name%/*}") # We add || true (which may not be needed without set -e) to be # explicit about the fact that this script does not fail even if `opam # install --show-actions` does, e.g., because of a non-existent # package # # TODO: Figure out how to use the OPAM API # (https://opam.ocaml.org/doc/api/) to call this from OCaml. for i in "$@"; do echo -n "$i:"; ((echo -n "$(opam install --show-actions "$i" | grep -o '∗\s*install\s*[^ ]*' | sed 's/∗\s*install\s*//g')" | tr '\n' ',') || true); echo done | xargs ocaml "${program_path}/sort-by-deps" coq-8.15.0/dev/bench/timelog2html000077500000000000000000000060411417001151100165510ustar00rootroot00000000000000#!/usr/bin/env lua5.1 args = {...} vfile = assert(args[1], "arg1 missing: .v file") table.remove(args,1) assert(#args > 0, "arg missing: at lease one aux file") data_files = args source = assert(io.open(vfile), "unable to open "..vfile):read("*a") function htmlescape(s) return (s:gsub("&","&"):gsub("<","<"):gsub(">",">")) end colors = { '#F08080', '#EEE8AA', '#98FB98' } assert(#data_files <= #colors, "only ".. #colors .." data files are supported") vname = vfile:match("([^/]+.v)$") print([[ ]]..vname..[[

Timings for ]]..vname..[[

    ]]) for i,data_file in ipairs(data_files) do print('
  1. ' .. data_file .. "
  2. ") end print("
") all_data = {} for _, data_file in ipairs(data_files) do local data = {} local last_end = -1 local lines = 1 for l in io.lines(data_file) do local b,e,t = l:match('^Chars ([%d]+) %- ([%d]+) %S+ ([%d%.]+) secs') if b then if tonumber(b) > last_end + 1 then local text = string.sub(source,last_end+1,b-1) if not text:match('^%s+$') then local _, n = text:gsub('\n','') data[#data+1] = { start = last_end+1; stop = b-1; time = 0; text = text; lines = lines } lines = lines + n last_end = b end end local text = string.sub(source,last_end+1,e) local _, n = text:gsub('\n','') local _, eoln = text:match('^[%s\n]*'):gsub('\n','') data[#data+1] = { start = b; stop = e; time = tonumber(t); text = text; lines = lines } lines = lines + n last_end = tonumber(e) end end if last_end + 1 <= string.len(source) then local text = string.sub(source,last_end+1,string.len(source)) data[#data+1] = { start = last_end+1; stop = string.len(source); time = 0; text = text; lines = lines+1 } end all_data[#all_data+1] = data end max = 0; for _, data in ipairs(all_data) do for _,d in ipairs(data) do max = math.max(max,d.time) end end data = all_data[1] for j,d in ipairs(data) do print('
') for k=1,#all_data do print('
') end if d.text == '\n' then print('
\n\n
') elseif d.text:match('\n$') then print('
'..htmlescape(d.text)..'\n
') else print('
'..htmlescape(d.text)..'
') end print("
") end print [[ ]] -- vim: set ts=4: --for i = 1,#data do -- io.stderr:write(data[i].text) --end coq-8.15.0/dev/bench/wrapper.sh000077500000000000000000000013501417001151100162310ustar00rootroot00000000000000#!/bin/sh log_dir=$COQ_LOG_DIR runner=$COQ_RUNNER package=$COQ_OPAM_PACKAGE iteration=$COQ_ITERATION echo "wrap[$package.$runner.$iteration|$OPAM_PACKAGE_NAME]" "$@" >> "$log_dir/wraplog.txt" echo >> "$log_dir/wraplog.txt" # we could be running commands for a dependency if [ "$package" ] && [ "$OPAM_PACKAGE_NAME" = "$package" ] ; then prefix=$log_dir/$package.$runner.$iteration if [ -e "$prefix.ncoms" ]; then ncoms=$(cat "$prefix.ncoms") ncoms=$((ncoms+1)) else ncoms=1 fi echo $ncoms > "$prefix.ncoms" exec /usr/bin/time \ -o "$prefix.$ncoms.time" --format="%U %M %F" \ perf stat -e instructions:u,cycles:u -o "$prefix.$ncoms.perf" \ "$@" else exec "$@" fi coq-8.15.0/dev/bugzilla2github_stripped.csv000066400000000000000000000116231417001151100206640ustar00rootroot000000000000002, 1156 3, 1157 4, 1158 7, 1160 8, 1161 10, 1163 12, 1164 13, 1165 14, 1169 16, 1171 17, 1184 18, 1190 19, 1191 20, 1193 21, 1200 23, 1201 24, 1203 25, 1208 26, 1210 27, 1212 28, 1216 30, 1217 31, 1223 34, 1227 35, 1232 36, 1235 38, 1238 39, 1244 40, 1245 41, 1246 42, 1247 44, 1248 45, 1249 46, 1250 47, 1252 48, 1253 49, 1254 50, 1256 52, 1262 54, 1263 55, 1264 56, 1265 59, 1266 60, 1267 61, 1268 63, 1270 64, 1272 65, 1274 66, 1275 69, 1276 70, 1279 71, 1283 72, 1284 73, 1285 74, 1286 75, 1287 78, 1288 79, 1291 80, 1292 82, 1293 83, 1295 84, 1296 85, 1297 86, 1299 88, 1301 89, 1303 90, 1304 91, 1305 92, 1307 93, 1308 94, 1310 95, 1312 96, 1313 97, 1314 98, 1316 99, 1318 100, 1319 101, 1320 102, 1321 103, 1323 105, 1324 106, 1327 107, 1328 108, 1330 109, 1334 112, 1335 115, 1336 119, 1337 121, 1341 123, 1342 124, 1343 125, 1344 126, 1345 127, 1346 128, 1348 129, 1349 134, 1350 135, 1351 136, 1352 137, 1353 138, 1354 139, 1355 140, 1356 142, 1357 143, 1358 144, 1359 145, 1360 147, 1361 148, 1362 149, 1363 150, 1365 152, 1366 154, 1368 155, 1369 160, 1370 161, 1371 162, 1372 164, 1373 165, 1374 166, 1376 167, 1377 169, 1378 170, 1380 178, 1382 179, 1383 180, 1384 181, 1385 182, 1386 183, 1387 184, 1390 185, 1391 186, 1392 187, 1393 189, 1394 190, 1398 191, 1401 192, 1402 194, 1403 195, 1404 196, 1405 197, 1407 198, 1409 199, 1410 202, 1412 204, 1413 205, 1421 207, 1422 209, 1423 210, 1426 212, 1427 213, 1428 214, 1429 215, 1433 216, 1435 219, 1436 220, 1437 221, 1440 222, 1444 224, 1445 225, 1450 228, 1452 229, 1453 235, 1457 236, 1458 238, 1459 239, 1460 240, 1462 242, 1465 243, 1466 244, 1470 245, 1471 248, 1472 250, 1473 253, 1474 254, 1475 259, 1476 261, 1478 262, 1479 263, 1480 264, 1481 265, 1484 266, 1485 267, 1486 268, 1488 269, 1489 270, 1490 271, 1492 272, 1493 273, 1494 274, 1498 275, 1500 277, 1503 278, 1504 279, 1505 282, 1506 283, 1511 289, 1513 290, 1514 291, 1516 292, 1517 294, 1520 295, 1521 299, 1523 301, 1524 302, 1525 303, 1527 305, 1529 311, 1530 315, 1531 316, 1532 317, 1534 320, 1535 322, 1539 324, 1541 328, 1542 329, 1543 330, 1544 331, 1545 333, 1546 335, 1547 336, 1548 338, 1549 343, 1550 348, 1551 350, 1552 351, 1553 352, 1554 353, 1555 356, 1556 363, 1557 368, 1558 371, 1559 372, 1560 413, 1561 418, 1562 420, 1563 426, 1564 431, 1565 444, 1566 447, 1567 452, 1569 459, 1570 462, 1571 463, 1573 468, 1574 472, 1575 473, 1577 509, 1578 519, 1579 529, 1580 540, 1581 541, 1583 545, 1584 546, 1585 547, 1589 550, 1590 552, 1591 553, 1592 554, 1593 574, 1594 592, 1595 602, 1597 603, 1598 606, 1599 607, 1600 667, 1601 668, 1602 686, 1603 690, 1605 699, 1606 705, 1607 708, 1609 711, 1610 728, 1611 739, 1612 742, 1613 743, 1615 774, 1617 775, 1619 776, 1623 777, 1624 778, 1625 779, 1627 780, 1628 781, 1629 782, 1630 783, 1631 784, 1632 785, 1633 786, 1636 787, 1637 788, 1638 789, 1639 790, 1640 793, 1641 794, 1642 795, 1644 797, 1645 798, 1646 803, 1647 804, 1649 805, 1650 808, 1652 813, 1653 815, 1655 816, 1656 818, 1657 820, 1658 821, 1659 822, 1660 823, 1661 826, 1662 828, 1663 829, 1664 830, 1665 831, 1666 832, 1667 834, 1668 835, 1669 836, 1670 837, 5689 839, 5791 840, 5792 841, 5793 842, 5794 843, 5795 844, 5796 846, 5797 849, 5798 850, 5799 854, 5800 855, 5801 856, 5802 857, 5803 860, 5804 861, 5805 862, 5806 863, 5807 864, 5808 865, 5809 867, 5810 868, 5811 869, 5812 870, 5813 871, 5814 872, 5815 874, 5816 875, 5817 878, 5818 879, 5819 881, 5820 883, 5821 884, 5822 885, 5823 886, 5824 888, 5825 889, 5826 890, 5827 891, 5828 892, 5829 893, 5830 894, 5831 896, 5832 898, 5833 901, 5834 903, 5835 905, 5836 906, 5837 909, 5838 914, 5839 915, 5840 922, 5841 923, 5842 925, 5843 927, 5844 931, 5845 932, 5846 934, 5847 935, 5848 936, 5849 937, 5850 938, 5851 939, 5852 940, 5853 941, 5854 945, 5855 946, 5856 947, 5857 949, 5858 950, 5859 951, 5860 952, 5861 953, 5862 954, 5863 957, 5864 960, 5865 963, 5866 965, 5867 967, 5868 968, 5869 969, 5870 972, 5871 973, 5872 974, 5873 975, 5874 976, 5875 977, 5876 979, 5877 983, 5878 984, 5879 985, 5880 986, 5881 987, 5882 988, 5883 990, 5884 991, 5885 993, 5886 996, 5887 997, 5888 1000, 5889 1001, 5890 1002, 5891 1003, 5892 1004, 5893 1005, 5894 1006, 5895 1007, 5896 1010, 5897 1012, 5898 1013, 5899 1014, 5900 1015, 5901 1016, 5902 1017, 5903 1018, 5904 1025, 5905 1028, 5906 1029, 5907 1030, 5908 1031, 5909 1033, 5910 1035, 5911 1036, 5912 1037, 5913 1039, 5914 1041, 5915 1042, 5916 1044, 5917 1045, 5918 1052, 5919 1053, 5920 1054, 5921 1055, 5922 1056, 5923 1060, 5924 1064, 5925 1067, 5926 1070, 5927 1072, 5928 1075, 5929 1076, 5930 1085, 5931 1086, 5932 1087, 5933 1089, 5934 1091, 5935 1096, 5936 1097, 5937 1098, 5938 1099, 5939 1100, 5940 1101, 5941 1102, 5942 1104, 5943 1107, 5944 1108, 5945 1111, 5946 1113, 5947 1114, 5948 1115, 5949 1116, 5950 1118, 5951 1119, 5952 1120, 5953 1122, 5954 1123, 5955 1124, 5956 1128, 5957 1129, 5958 1132, 5959 1136, 5960 1137, 5961 1138, 5962 1140, 5963 1141, 5964 1142, 5965 1144, 5966 1145, 5967 1149, 5968 1151, 5969 1153, 5970 coq-8.15.0/dev/ci/000077500000000000000000000000001417001151100135275ustar00rootroot00000000000000coq-8.15.0/dev/ci/README-developers.md000066400000000000000000000175631417001151100171700ustar00rootroot00000000000000Information for developers about the CI system ---------------------------------------------- When you submit a pull request (PR) on the Coq GitHub repository, this will automatically launch a battery of CI tests. The PR will not be integrated unless these tests pass. We are currently running tests on the following platforms: - GitLab CI is the main CI platform. It tests the compilation of Coq, of the documentation, and of CoqIDE on Linux with several versions of OCaml and with warnings as errors; it runs the test-suite and tests the compilation of several external developments. It also runs a linter that checks whitespace discipline. A [pre-commit hook](../tools/pre-commit) is automatically installed by `./configure`. It should allow complying with this discipline without pain. - Github Actions are used to test the compilation of Coq on Windows and macOS. For Windows, the Coq platform script is used, producing an installer that can be used to test Coq. You can anticipate the results of most of these tests prior to submitting your PR by running GitLab CI on your private branches. To do so follow these steps: 1. Log into GitLab CI (the easiest way is to sign in with your GitHub account). 2. Click on "New Project". 3. Choose "CI / CD for external repository" then click on "GitHub". 4. Find your fork of the Coq repository and click on "Connect". 5. If GitLab did not do so automatically, [enable the Container Registry](https://docs.gitlab.com/ee/user/project/container_registry.html#enable-the-container-registry-for-your-project). 6. You are encouraged to go to the CI / CD general settings and increase the timeout from 1h to 2h for better reliability. Now every time you push (including force-push unless you changed the default GitLab setting) to your fork on GitHub, it will be synchronized on GitLab and CI will be run. You will receive an e-mail with a report of the failures if there are some. You can also run one CI target locally (using `make ci-somedev`). See also [`test-suite/README.md`](../../test-suite/README.md) for information about adding new tests to the test-suite. ### Breaking changes When your PR breaks an external project we test in our CI, you must prepare a patch (or ask someone—possibly the project author—to prepare a patch) to fix the project. There is experimental support for an improved workflow, see [the next section](#experimental-automatic-overlay-creation-and-building), below are the steps to manually prepare a patch: 1. Fork the external project, create a new branch, push a commit adapting the project to your changes. 2. Test your pull request with your adapted version of the external project by adding an overlay file to your pull request (cf. [`dev/ci/user-overlays/README.md`](user-overlays/README.md)). 3. Fixes to external libraries (pure Coq projects) *must* be backward compatible (i.e. they should also work with the development version of Coq, and the latest stable version). This will allow you to open a PR on the external project repository to have your changes merged *before* your PR on Coq can be integrated. On the other hand, patches to plugins (projects linking to the Coq ML API) can very rarely be made backward compatible and plugins we test will generally have a dedicated branch per Coq version. You can still open a pull request but the merging will be requested by the developer who merges the PR on Coq. There are plans to improve this, cf. [#6724](https://github.com/coq/coq/issues/6724). Moreover your PR must absolutely update the [`CHANGES.md`](../../CHANGES.md) file. ### Experimental automatic overlay creation and building If you break external projects that are hosted on GitHub, you can use the `create_overlays.sh` script to automatically perform most of the above steps. In order to do so: - determine the list of failing projects: IDs can be found as ci-XXX1 ci-XXX2 ci-XXX3 in the list of GitLab CI failures; - for each project XXXi, look in [ci-basic-overlay.sh](https://github.com/coq/coq/blob/master/dev/ci/ci-basic-overlay.sh) to see if the corresponding `XXXi_CI_GITURL` is hosted on GitHub; - log on GitHub and fork all the XXXi projects hosted there; - call the script as: ``` ./dev/tools/create_overlays.sh ejgallego 9873 XXX1 XXX2 XXX3 ``` replacing `ejgallego` by your GitHub nickname, `9873` by the actual PR number, and selecting the XXXi hosted on GitHub. The script will: + checkout the contributions and prepare the branch/remote so you can just commit the fixes and push, + add the corresponding overlay file in `dev/ci/user-overlays`; - go to `_build_ci/XXXi` to prepare your overlay (you can test your modifications by using `make -C ../.. ci-XXXi`) and push using `git push ejgallego` (replacing `ejgallego` by your GitHub nickname); - finally push the `dev/ci/user-overlays/9873-elgallego-YYY.sh` file on your Coq fork (replacing `9873` by the actual PR number, and `ejgallego` by your GitHub nickname). For problems related to ML-plugins, if you use `dune build` to build Coq, it will actually be aware of the broken contributions and perform a global build. This is very convenient when using `merlin` as you will get a coherent view of all the broken plugins, with full incremental cross-project rebuild. Advanced GitLab CI information ------------------------------ GitLab CI is set up to use the "build artifact" feature to avoid rebuilding Coq. In one job, Coq is built with `./configure -prefix _install_ci` and `make install` is run, then the `_install_ci` directory persists to and is used by the next jobs. ### Artifacts Build artifacts from GitLab can be linked / downloaded in a systematic way, see [GitLab's documentation](https://docs.gitlab.com/ce/user/project/pipelines/job_artifacts.html#downloading-the-latest-job-artifacts) for more information. For example, to access the documentation of the `master` branch, you can do: https://gitlab.com/coq/coq/-/jobs/artifacts/master/file/_install_ci/share/doc/coq/sphinx/html/index.html?job=doc:refman Browsing artifacts is also possible: https://gitlab.com/coq/coq/-/jobs/artifacts/master/browse/_install_ci/?job=build:base Above, you can replace `master` and `job` by the desired GitLab branch and job name. Currently available artifacts are: - the Coq executables and stdlib, in four copies varying in architecture and OCaml version used to build Coq: https://gitlab.com/coq/coq/-/jobs/artifacts/master/browse/_install_ci/?job=build:base Additionally, an experimental Dune build is provided: https://gitlab.com/coq/coq/-/jobs/artifacts/master/browse/_build/?job=build:edge:dune:dev - the Coq documentation, built in the `doc:*` jobs. When submitting a documentation PR, this can help reviewers checking the rendered result. **@coqbot** will automatically post links to these artifacts in the PR checks section. Furthermore, these artifacts are automatically deployed at: + Coq's Reference Manual [master branch]: + Coq's Standard Library Documentation [master branch]: + Coq's ML API Documentation [master branch]: ### GitLab and Docker System and opam packages are installed in a Docker image. The image is automatically built and uploaded to your GitLab registry, and is loaded by subsequent jobs. **IMPORTANT**: When updating Coq's CI docker image, you must modify the `CACHEKEY` variable in [`.gitlab-ci.yml`](../../.gitlab-ci.yml) (see comment near it for details). The Docker building job reuses the uploaded image if it is available, but if you wish to save more time you can skip the job by setting `SKIP_DOCKER` to `true`. In the case of the main Coq repository, this variable is set to true by default, but coqbot will set it to `false` anytime a PR modifies a path matching `dev/ci/docker/.*Dockerfile.*`. See also [`docker/README.md`](docker/README.md). coq-8.15.0/dev/ci/README-users.md000066400000000000000000000115051417001151100161470ustar00rootroot00000000000000Information for external library / Coq plugin authors ----------------------------------------------------- You are encouraged to consider submitting your project for addition to Coq's CI. This means that: - Any time that a proposed change is breaking your project, Coq developers and contributors will send you patches to adapt it or will explain how to adapt it and work with you to ensure that you manage to do it. On the condition that: - At the time of the submission, your project works with Coq's `master` branch. - Your project is publicly available in a git repository and we can easily send patches to you (e.g. through pull / merge requests). - You react in a timely manner to discuss / integrate those patches. When seeking your help for preparing such patches, we will accept that it takes longer than when we are just requesting to integrate a simple (and already fully prepared) patch. - You do not push, to the branches that we test, commits that haven't been first tested to compile with the corresponding branch(es) of Coq. For that, we recommend setting a CI system for you project, see [supported CI images for Coq](#supported-ci-images-for-coq) below. - You maintain a reasonable build time for your project, or you provide a "lite" target that we can use. In case you forget to comply with these last three conditions, we would reach out to you and give you a 30-day grace period during which your project would be moved into our "allow failure" category. At the end of the grace period, in the absence of progress, the project would be removed from our CI. ### Timely merging of overlays A pitfall of the current CI setup is that when a breaking change is merged in Coq upstream, CI for your contrib will be broken until you merge the corresponding pull request with the fix for your contribution. As of today, you have to worry about synchronizing with Coq upstream every once in a while; we hope we will improve this in the future by using [coqbot](https://github.com/coq/bot); meanwhile, a workaround is to give merge permissions to someone from the Coq team as to help with these kind of merges. ### OCaml and plugin-specific considerations Projects that link against Coq's OCaml API [most of them are known as "plugins"] do have some special requirements: - Coq's OCaml API is not stable. We hope to improve this in the future but as of today you should expect to have to merge a fair amount of "overlays", usually in the form of Pull Requests from Coq developers in order to keep your plugin compatible with Coq master. In order to alleviate the load, you can delegate the merging of such compatibility pull requests to Coq developers themselves, by granting access to the plugin repository or by using `bots` such as [Bors](https://github.com/apps/bors) that allow for automatic management of Pull Requests. - Plugins in the CI should compile with the OCaml flags that Coq uses. In particular, warnings that are considered fatal by the Coq developers _must_ be also fatal for plugin CI code. ### Add your project by submitting a pull request Add a new `ci-mydev.sh` script to [`dev/ci`](.); set the corresponding variables in [`ci-basic-overlay.sh`](ci-basic-overlay.sh); add the corresponding target to [`Makefile.ci`](../../Makefile.ci) and a new job to [`.gitlab-ci.yml`](../../.gitlab-ci.yml) so that this new target is run. Have a look at [#7656](https://github.com/coq/coq/pull/7656/files) for an example. **Do not hesitate to submit an incomplete pull request if you need help to finish it.** You may also be interested in having your project tested in our performance benchmark. Currently this is done by providing an OPAM package in https://github.com/coq/opam-coq-archive and opening an issue at https://github.com/coq/coq-bench/issues. ### Recommended branching policy. It is sometimes the case that you will need to maintain a branch of your project for particular Coq versions. This is in fact very likely if your project includes a Coq ML plugin. For such projects, we recommend a branching convention that mirrors Coq's branching policy. Then, you would have a `master` branch that follows Coq's `master`, a `v8.8` branch that works with Coq's `v8.8` branch and so on. This convention will be supported by tools in the future to make some developer commands work more seamlessly. ### Supported CI images for Coq The Coq developers and contributors provide official Docker and Nix images for testing against Coq master. Using these images is highly recommended: - For Docker, see: https://github.com/coq-community/docker-coq The https://github.com/coq-community/docker-coq/wiki/CI-setup wiki page contains additional information and templates to help setting Docker-based CI up for your Coq project - For Nix, see the setup at https://github.com/coq-community/manifesto/wiki/Continuous-Integration-with-Nix coq-8.15.0/dev/ci/README.md000066400000000000000000000016651417001151100150160ustar00rootroot00000000000000Continuous Integration for the Coq Proof Assistant ================================================== Changes to Coq are systematically tested for regression and compatibility breakage on our Continuous Integration (CI) platforms *before* integration, so as to ensure better robustness and catch problems as early as possible. These tests include the compilation of several external libraries / plugins. This README is split into two specific documents: - [README-users.md](./README-users.md) which contains information for authors of external libraries and plugins who might be interested in having their development tested in our CI system. - [README-developers.md](./README-developers.md) for Coq developers / contributors, who must ensure that they don't break these external developments accidentally. *Remark:* the CI policy outlined in these documents is susceptible to evolve and specific accommodations are of course possible. coq-8.15.0/dev/ci/ci-aac_tactics.sh000066400000000000000000000003211417001151100167060ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download aac_tactics if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/aac_tactics" make make install ) coq-8.15.0/dev/ci/ci-argosy.sh000066400000000000000000000003121417001151100157540ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download argosy if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/argosy" make ) coq-8.15.0/dev/ci/ci-autosubst.sh000066400000000000000000000003151417001151100165040ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download autosubst if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/autosubst" make make install ) coq-8.15.0/dev/ci/ci-basic-overlay.sh000066400000000000000000000367261417001151100172320ustar00rootroot00000000000000#!/usr/bin/env bash # This is the list of repositories used by the CI scripts, unless overridden # by a call to the "overlay" function in ci-common declare -a projects # the list of project repos that can be be overlayed # checks if the given argument is a known project function is_in_projects { for x in "${projects[@]}"; do if [ "$1" = "$x" ]; then return 0; fi; done return 1 } # project [] # [] defaults to /archive on github.com # and /-/archive on gitlab function project { local var_ref=${1}_CI_REF local var_giturl=${1}_CI_GITURL local var_archiveurl=${1}_CI_ARCHIVEURL local giturl=$2 local ref=$3 local archiveurl=$4 case $giturl in *github.com*) archiveurl=${archiveurl:-$giturl/archive} ;; *gitlab*) archiveurl=${archiveurl:-$giturl/-/archive} ;; esac # register the project in the list of projects projects[${#projects[*]}]=$1 # bash idiom for setting a variable if not already set : "${!var_ref:=$ref}" : "${!var_giturl:=$giturl}" : "${!var_archiveurl:=$archiveurl}" } ######################################################################## # MathComp ######################################################################## project mathcomp 'https://github.com/math-comp/math-comp' '4efe95e4b39cc058e63e9161d76f0b50af493c94' project fourcolor 'https://github.com/math-comp/fourcolor' '4581901330cce906c3c58ce5c24e5d061ea3ab39' project oddorder 'https://github.com/math-comp/odd-order' '91e3b9a75d12d142568985566e652988189a2ff4' project mczify 'https://github.com/math-comp/mczify' '8481446a34ee53cf24639210877ed9f7dd1dcf92' ######################################################################## # UniMath ######################################################################## project unimath 'https://github.com/UniMath/UniMath' '95d88e3c0b1c5ff05ce36e0deea02a5823ed404f' ######################################################################## # Unicoq + Mtac2 ######################################################################## project unicoq 'https://github.com/unicoq/unicoq' '2095dff58b630dc69fa9b6d707e843e193fe18a8' project mtac2 'https://github.com/Mtac2/Mtac2' 'd8332d81189f3763dfaccd9de31bc9f459fad6bb' ######################################################################## # Mathclasses + Corn ######################################################################## project math_classes 'https://github.com/coq-community/math-classes' '5da1f4142c2db714d7943152e6522e56c95f434d' project corn 'https://github.com/coq-community/corn' 'd6cbe5a1c5106078899b63a05bf0390c1f868337' ######################################################################## # Iris ######################################################################## # NB: stdpp and Iris refs are gotten from the opam files in the Iris # and lambdaRust repos respectively. project stdpp "https://gitlab.mpi-sws.org/iris/stdpp" "" project iris "https://gitlab.mpi-sws.org/iris/iris" "" project autosubst 'https://github.com/coq-community/autosubst' 'c71b2dac46049d549fa002f59503091b2a0b99a8' project iris_examples 'https://gitlab.mpi-sws.org/iris/examples' 'bb946806c5aa9bcb8184a8cb3bc1befecc0322a0' ######################################################################## # HoTT ######################################################################## project hott 'https://github.com/HoTT/HoTT' 'd79c4bb3c0a10ceb8d8ff2acd4ffc4c645ffd5c9' ######################################################################## # CoqHammer ######################################################################## project coqhammer 'https://github.com/lukaszcz/coqhammer' 'f8bd70509365f6e2c8038b773332eef935ca87ee' ######################################################################## # GeoCoq ######################################################################## project geocoq 'https://github.com/GeoCoq/GeoCoq' '25917f56a3b46843690457b2bfd83168bed1321c' ######################################################################## # Flocq ######################################################################## project flocq 'https://gitlab.inria.fr/flocq/flocq' 'e68658cb2e6ed16c79fc4b01d578e79052f45bf8' ######################################################################## # coq-performance-tests ######################################################################## project coq_performance_tests 'https://github.com/coq-community/coq-performance-tests' '5cc9a158e3aa32bc39716100e575f5f30cc72008' ######################################################################## # coq-tools ######################################################################## project coq_tools 'https://github.com/JasonGross/coq-tools' 'd87641a8f0b19399c6a709796b62ae303dc11ac1' ######################################################################## # Coquelicot ######################################################################## project coquelicot 'https://gitlab.inria.fr/coquelicot/coquelicot' '09eb630f233c6887bef8f6764c7145bc45f24951' ######################################################################## # Gappa stand alone tool ######################################################################## project gappa_tool 'https://gitlab.inria.fr/gappa/gappa' '6c97a36257369d89ff32c9877c0e83681bfd3df9' ######################################################################## # Gappa plugin ######################################################################## project gappa 'https://gitlab.inria.fr/gappa/coq' '45c464f726a7361201f4902bfcbb0aaf46b15e3d' ######################################################################## # CompCert ######################################################################## project compcert 'https://github.com/AbsInt/CompCert' '2198a280b1150a61be1e514f044da03e69a66af9' ######################################################################## # VST ######################################################################## project vst 'https://github.com/PrincetonUniversity/VST' '7f0d5bb642b886783f69add45dad9446c8ef4cd1' ######################################################################## # cross-crypto ######################################################################## project cross_crypto 'https://github.com/mit-plv/cross-crypto' 'b0e32790d17ec2836780e8e8f3c38f67366dda63' ######################################################################## # rewriter ######################################################################## project rewriter 'https://github.com/mit-plv/rewriter' 'dbc67ab8ca64fcc0e4c67d91afc9c68c7b9e096a' ######################################################################## # fiat_parsers ######################################################################## project fiat_parsers 'https://github.com/mit-plv/fiat' 'cb85ee76ee3410f04333633f6644663ebb525ac8' ######################################################################## # fiat_crypto ######################################################################## project fiat_crypto 'https://github.com/mit-plv/fiat-crypto' '79e021e7e38ed796138fab7231fedeef64691771' ######################################################################## # fiat_crypto_legacy ######################################################################## project fiat_crypto_legacy 'https://github.com/mit-plv/fiat-crypto' '9f540adc1b265732845fe0785efe89b3f8b3937f' ######################################################################## # coq_dpdgraph ######################################################################## project coq_dpdgraph 'https://github.com/Karmaki/coq-dpdgraph' 'f1445af644340c1ca9b51dee98fddf4431c23635' ######################################################################## # CoLoR ######################################################################## project color 'https://github.com/fblanqui/color' 'd6d57c65f5b898de7b93a916375a3b3cd0078914' ######################################################################## # TLC ######################################################################## project tlc 'https://github.com/charguer/tlc' 'f5390a5ec9106b15b8f8a4434958d4959b15a295' ######################################################################## # Bignums ######################################################################## project bignums 'https://github.com/coq/bignums' 'da002e1e296fff03dc6555a9687e7cc164547315' ######################################################################## # coqprime ######################################################################## project coqprime 'https://github.com/thery/coqprime' '2ad3850655131aafb5b0e625b82069bc1573acaf' ######################################################################## # bbv ######################################################################## project bbv 'https://github.com/mit-plv/bbv' 'd5ab9c04db85eb85688816dc687d118000a65736' ######################################################################## # bedrock2 ######################################################################## project bedrock2 'https://github.com/mit-plv/bedrock2' 'ac173a934e8b64916d6e55868b57878d5b041b6f' ######################################################################## # Equations ######################################################################## project equations 'https://github.com/mattam82/Coq-Equations' '722cccfe0fa4305c91bcdcce5ddc1fb43aa09cfe' ######################################################################## # Elpi + Hierarchy Builder ######################################################################## project elpi 'https://github.com/LPCIC/coq-elpi' 'd1fca91261ff5dad68680b6bce14e3c2f13f31aa' project hierarchy_builder 'https://github.com/math-comp/hierarchy-builder' 'd3896f8ed5679a38509d9923a0ccf560d211fac3' ######################################################################## # Engine-Bench ######################################################################## project engine_bench 'https://github.com/mit-plv/engine-bench' 'd2cf8602d479b627c1f5d3d03044e8570af827ea' ######################################################################## # fcsl-pcm ######################################################################## project fcsl_pcm 'https://github.com/imdea-software/fcsl-pcm' 'd68bef532d2c8ba6ff1edc80815f9ba9cd3f2d8c' ######################################################################## # ext-lib ######################################################################## project ext_lib 'https://github.com/coq-community/coq-ext-lib' '8f0f0228332007a1b73abb01fb9bf828023007fa' ######################################################################## # simple-io ######################################################################## project simple_io 'https://github.com/Lysxia/coq-simple-io' 'd0198042ac27a16ebf91aafc0b9bc163ac799a25' ######################################################################## # quickchick ######################################################################## project quickchick 'https://github.com/QuickChick/QuickChick' 'e49fbf916fdc7b5949753928f689fa9334e11b1b' ######################################################################## # reduction-effects ######################################################################## project reduction_effects 'https://github.com/coq-community/reduction-effects' '98b0279362f6e830588c2010bc6d150284cd6e3b' ######################################################################## # menhirlib ######################################################################## # Note: menhirlib is now in subfolder coq-menhirlib of menhir project menhirlib "https://gitlab.inria.fr/fpottier/menhir" "20210310" ######################################################################## # aac_tactics ######################################################################## project aac_tactics 'https://github.com/coq-community/aac-tactics' '70ab69806b188a742112e2c218fc40eeb44b5eeb' ######################################################################## # paramcoq ######################################################################## project paramcoq 'https://github.com/coq-community/paramcoq' '46bc4afb4e681d9942f3bae771b3e89923a251f0' ######################################################################## # relation_algebra ######################################################################## project relation_algebra 'https://github.com/damien-pous/relation-algebra' 'c951ffc65bc0d8c2d1e218b1888c7156d4535ad7' ######################################################################## # StructTact + InfSeqExt + Cheerios + Verdi + Verdi Raft ######################################################################## project struct_tact 'https://github.com/uwplse/StructTact' '179bd5312e9d8b63fc3f4071c628cddfc496d741' project inf_seq_ext 'https://github.com/DistributedComponents/InfSeqExt' 'd7a56b2ba532ac0a78a3e3ad0080d223186838de' project cheerios 'https://github.com/uwplse/cheerios' '9c7f66e57b91f706d70afa8ed99d64ed98ab367d' project verdi 'https://github.com/uwplse/verdi' '064cc4fb2347453bf695776ed820ffb5fbc1d804' project verdi_raft 'https://github.com/uwplse/verdi-raft' 'ea99a7453c30a0c11b904b36a3b4862fad28abe1' ######################################################################## # stdlib2 ######################################################################## project stdlib2 'https://github.com/coq/stdlib2' '54e057ea7023d058e57169a3bceaab708a5f7d26' ######################################################################## # argosy ######################################################################## project argosy 'https://github.com/mit-pdos/argosy' '9e8a6341b20f56855d829d6c0542258bb2c10037' ######################################################################## # perennial ######################################################################## project perennial 'https://github.com/mit-pdos/perennial' 'fb179c5f5ddc8e7a16efe5d65c826112732525db' ######################################################################## # metacoq ######################################################################## project metacoq 'https://github.com/MetaCoq/metacoq' 'b2e9f58336520d5c286af93941b6d603c21553ef' ######################################################################## # SF suite ######################################################################## project sf 'https://github.com/DeepSpec/sf' 'd5fd1887ae7b23edea8f98cdf0b8c2db0ba874df' ######################################################################## # Coqtail ######################################################################## project coqtail 'https://github.com/whonore/Coqtail' 'f38e086135c5d9b69c9a5f70f82072419d952c29' ######################################################################## # Deriving ######################################################################## project deriving 'https://github.com/arthuraa/deriving' 'bc994b5950cc58f4eeb5a3d0fca60e7b6da9ab42' ######################################################################## # VsCoq ######################################################################## project vscoq 'https://github.com/maximedenes/vscoq' 'fe84907aaae2e7b1b9776e2876dd072bf8fd1320' ######################################################################## # category-theory ######################################################################## project category_theory 'https://github.com/jwiegley/category-theory' '1bd49317e9f2e27c3cca108768e57b2dc205d595' ######################################################################## # itauto ######################################################################## project itauto 'https://gitlab.inria.fr/fbesson/itauto' '4a53bf249a10278e22c918136399eb0e66a6e173' coq-8.15.0/dev/ci/ci-bbv.sh000066400000000000000000000003011417001151100152170ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download bbv if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/bbv" make make install ) coq-8.15.0/dev/ci/ci-bedrock2.sh000066400000000000000000000004621417001151100161510ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download bedrock2 if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/bedrock2" COQMF_ARGS='-arg "-async-proofs-tac-j 1"' make make install ) coq-8.15.0/dev/ci/ci-bignums.sh000066400000000000000000000003331417001151100161170ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download bignums if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/bignums" make make install cd tests make ) coq-8.15.0/dev/ci/ci-category_theory.sh000066400000000000000000000004041417001151100176610ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download category_theory if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/category_theory" make make install ) coq-8.15.0/dev/ci/ci-color.sh000066400000000000000000000003321417001151100155700ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download color if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ulimit -s ulimit -s 65536 ulimit -s ( cd "${CI_BUILD_DIR}/color" make ) coq-8.15.0/dev/ci/ci-common.sh000066400000000000000000000133671417001151100157560ustar00rootroot00000000000000#!/usr/bin/env bash set -xe # default value for NJOBS : "${NJOBS:=1}" export NJOBS # We add $PWD/_install_ci/lib unconditionally due to a hack in the # ci-menhir script, which will install some OCaml libraries outside # our docker-opam / Nix setup; we have to do this for all the 3 cases # below; would we fix ci-menhir, then we just do this for the first # branch [ci case] export OCAMLPATH="$PWD/_install_ci/lib:$OCAMLPATH" export PATH="$PWD/_install_ci/bin:$PATH" # We can remove setting COQLIB and COQCORELIB from here, but better to # wait until we have merged the coq.boot patch so we can do this in a # more controlled way. if [ -n "${GITLAB_CI}" ]; then # Gitlab build, Coq installed into `_install_ci` export COQBIN="$PWD/_install_ci/bin" export CI_BRANCH="$CI_COMMIT_REF_NAME" if [[ ${CI_BRANCH#pr-} =~ ^[0-9]*$ ]] then export CI_PULL_REQUEST="${CI_BRANCH#pr-}" fi elif [ -d "$PWD/_build_vo/" ] && [ -z "$CI_PURE_DUNE" ] then # Dune Ocaml build, vo build using make export OCAMLPATH="$PWD/_build_vo/default/lib/:$OCAMLPATH" export COQBIN="$PWD/_build_vo/default/bin" export COQLIB="$PWD/_build_vo/default/lib/coq" export COQCORELIB="$PWD/_build_vo/default/lib/coq-core" CI_BRANCH="$(git rev-parse --abbrev-ref HEAD)" export CI_BRANCH elif [ -d "$PWD/_build/install/default/" ]; then # Full Dune build, we basically do what `dune exec --` does export OCAMLPATH="$PWD/_build/install/default/lib/:$OCAMLPATH" export COQBIN="$PWD/_build/install/default/bin" export COQLIB="$PWD/_build/install/default/lib/coq" export COQCORELIB="$PWD/_build/install/default/lib/coq-core" CI_BRANCH="$(git rev-parse --abbrev-ref HEAD)" export CI_BRANCH fi export PATH="$COQBIN:$PATH" # Coq's tools need an ending slash :S, we should fix them. export COQBIN="$COQBIN/" ls -l "$COQBIN" # Where we download and build external developments CI_BUILD_DIR="$PWD/_build_ci" # Where we install external binaries and ocaml libraries CI_INSTALL_DIR="$PWD/_install_ci" ls -l "$CI_BUILD_DIR" || true declare -A overlays # overlay [] # creates an overlay for project using a given url and branch which is # active for prnumber or prbranch. prbranch defaults to ref. function overlay() { local project=$1 local ov_url=$2 local ov_ref=$3 local ov_prnumber=$4 local ov_prbranch=$5 : "${ov_prbranch:=$ov_ref}" if [ "$CI_PULL_REQUEST" = "$ov_prnumber" ] || [ "$CI_BRANCH" = "$ov_prbranch" ]; then if ! is_in_projects "$project"; then echo "Error: $1 is not a known project which can be overlayed" exit 1 fi overlays[${project}_URL]=$ov_url overlays[${project}_REF]=$ov_ref fi } set +x # shellcheck source=ci-basic-overlay.sh . "${ci_dir}/ci-basic-overlay.sh" for overlay in "${ci_dir}"/user-overlays/*.sh; do # shellcheck source=/dev/null # the directoy can be empty if [ -e "${overlay}" ]; then . "${overlay}"; fi done set -x # [git_download project] will download [project] and unpack it # in [$CI_BUILD_DIR/project] if the folder does not exist already; # if it does, it will do nothing except print a warning (this can be # useful when building locally). # Note: when $FORCE_GIT is set to 1 or when $CI is unset or empty # (local build), it uses git clone to perform the download. git_download() { local project=$1 local dest="$CI_BUILD_DIR/$project" local giturl_var="${project}_CI_GITURL" local giturl="${!giturl_var}" local ref_var="${project}_CI_REF" local ref="${!ref_var}" local ov_url=${overlays[${project}_URL]} local ov_ref=${overlays[${project}_REF]} if [ -d "$dest" ]; then echo "Warning: download and unpacking of $project skipped because $dest already exists." elif [[ $ov_url ]] || [ "$WITH_SUBMODULES" = "1" ] || [ "$CI" = "" ]; then git clone "$giturl" "$dest" pushd "$dest" git checkout "$ref" git log -n 1 if [[ $ov_url ]]; then # In CI we merge into the upstream branch to stay synchronized # Locally checkout the overlay and rebase on upstream # We act differently because merging is what will happen when the PR is merged # but rebasing produces something that is nicer to edit if [[ $CI ]]; then git -c pull.rebase=false -c user.email=nobody@example.invalid -c user.name=Nobody \ pull --no-ff "$ov_url" "$ov_ref" git log -n 1 HEAD^2 || true # no merge commit if the overlay was merged upstream git log -n 1 else git remote add -t "$ov_ref" -f overlay "$ov_url" git checkout -b "$ov_ref" overlay/"$ov_ref" git rebase "$ref" git log -n 1 fi fi if [ "$WITH_SUBMODULES" = 1 ]; then git submodule update --init --recursive fi popd else # When possible, we download tarballs to reduce bandwidth and latency local archiveurl_var="${project}_CI_ARCHIVEURL" local archiveurl="${!archiveurl_var}" mkdir -p "$dest" pushd "$dest" local commit commit=$(git ls-remote "$giturl" "refs/heads/$ref" | cut -f 1) if [[ "$commit" == "" ]]; then # $ref must have been a tag or hash, not a branch commit="$ref" fi wget "$archiveurl/$commit.tar.gz" tar xfz "$commit.tar.gz" --strip-components=1 rm -f "$commit.tar.gz" popd fi } make() { # +x: add x only if defined if [ -z "${MAKEFLAGS+x}" ] && [ -n "${NJOBS}" ]; then # Not submake and parallel make requested command make -j "$NJOBS" "$@" else command make "$@" fi } # run make -k; make again if it failed so that the failing file comes last # makes it easier to find the error messages in the CI log function make_full() { if ! make -k "$@"; then make -k "$@"; exit 1; fi } coq-8.15.0/dev/ci/ci-compcert.sh000066400000000000000000000006621417001151100162740ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download compcert if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQCOPTS='-native-compiler no -w -undeclared-scope -w -omega-is-deprecated' ( cd "${CI_BUILD_DIR}/compcert" ./configure -ignore-coq-version x86_32-linux -use-external-MenhirLib -use-external-Flocq make make check-proof COQCHK='"$(COQBIN)coqchk" -silent -o $(COQINCLUDES)' ) coq-8.15.0/dev/ci/ci-coq_dpdgraph.sh000066400000000000000000000003571417001151100171140ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coq_dpdgraph if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/coq_dpdgraph" autoconf ./configure make make test-suite ) coq-8.15.0/dev/ci/ci-coq_performance_tests.sh000066400000000000000000000004121417001151100210360ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coq_performance_tests if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/coq_performance_tests" make_full coq perf-Sanity make validate make install ) coq-8.15.0/dev/ci/ci-coq_tools.sh000066400000000000000000000006561417001151100164650ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coq_tools if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi jason_msg() { echo "The build broke, if an overlay is needed, mention @JasonGross in describing the expected change in Coq that needs to be taken into account, and he'll prepare a fix for coq-tools" exit $1 } ( cd "${CI_BUILD_DIR}/coq_tools" make check || jason_msg $? ) coq-8.15.0/dev/ci/ci-coqhammer.sh000066400000000000000000000002761417001151100164350ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coqhammer if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/coqhammer" make ) coq-8.15.0/dev/ci/ci-coqprime.sh000066400000000000000000000003571417001151100163000ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coqprime if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ulimit -s ulimit -s 65536 ulimit -s ( cd "${CI_BUILD_DIR}/coqprime" make make install ) coq-8.15.0/dev/ci/ci-coqtail.sh000066400000000000000000000003431417001151100161100ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coqtail if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/coqtail" PYTHONPATH=python python3 -m pytest tests/coq ) coq-8.15.0/dev/ci/ci-coquelicot.sh000066400000000000000000000004621417001151100166250ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download coquelicot if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/coquelicot" if ! [ -x ./configure ]; then autoreconf -i -s ./configure fi ./remake "-j${NJOBS}" ./remake install ) coq-8.15.0/dev/ci/ci-corn.sh000066400000000000000000000003771417001151100154240ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download corn if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/corn" ./configure.sh make make install ) coq-8.15.0/dev/ci/ci-cross_crypto.sh000066400000000000000000000003261417001151100172060ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download cross_crypto if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/cross_crypto" make ) coq-8.15.0/dev/ci/ci-deriving.sh000066400000000000000000000003301417001151100162570ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download deriving if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/deriving" make make tests make install ) coq-8.15.0/dev/ci/ci-elpi.sh000066400000000000000000000004441417001151100154070ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download elpi git_download hierarchy_builder if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/elpi" make make install ) ( cd "${CI_BUILD_DIR}/hierarchy_builder" make make install ) coq-8.15.0/dev/ci/ci-engine_bench.sh000066400000000000000000000004201417001151100170540ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download engine_bench if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler ondemand' ( cd "${CI_BUILD_DIR}/engine_bench" make coq make coq-perf-Sanity ) coq-8.15.0/dev/ci/ci-equations.sh000066400000000000000000000004201417001151100164600ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download equations if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/equations" ./configure.sh coq make ci make install ) coq-8.15.0/dev/ci/ci-ext_lib.sh000066400000000000000000000003111417001151100160750ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download ext_lib if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/ext_lib" make make install ) coq-8.15.0/dev/ci/ci-fcsl_pcm.sh000066400000000000000000000002741417001151100162450ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download fcsl_pcm if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/fcsl_pcm" make ) coq-8.15.0/dev/ci/ci-fiat_crypto.sh000066400000000000000000000013531417001151100170010ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download fiat_crypto if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi # We need a larger stack size to not overflow ocamlopt+flambda when # building the executables. # c.f. https://github.com/coq/coq/pull/8313#issuecomment-416650241 stacksize=32768 # fiat-crypto is not guaranteed to build with the latest version of # bedrock2, so we use the pinned version of bedrock2, but the external # version of other developments make_args=(EXTERNAL_REWRITER=1 EXTERNAL_COQPRIME=1) ( cd "${CI_BUILD_DIR}/fiat_crypto" ulimit -s $stacksize make "${make_args[@]}" pre-standalone-extracted printlite lite make -j 1 "${make_args[@]}" all-except-compiled ) coq-8.15.0/dev/ci/ci-fiat_crypto_legacy.sh000066400000000000000000000011751417001151100203270ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download fiat_crypto_legacy if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi targets1=( print-old-pipeline-lite-hardcoded old-pipeline-lite-hardcoded lite-display-hardcoded ) targets2=( print-old-pipeline-nobigmem-hardcoded old-pipeline-nobigmem-hardcoded nonautogenerated-specific nonautogenerated-specific-display selected-specific selected-specific-display ) export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/fiat_crypto_legacy" make "${targets1[@]}" make -j 1 "${targets2[@]}" ) coq-8.15.0/dev/ci/ci-fiat_crypto_ocaml.sh000066400000000000000000000004731417001151100201560ustar00rootroot00000000000000#!/usr/bin/env bash set -e # fiat-crypto job sets up the sources if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" make_args=(EXTERNAL_REWRITER=1 EXTERNAL_COQPRIME=1) ( cd "${CI_BUILD_DIR}/fiat_crypto" make "${make_args[@]}" standalone-ocaml lite-generated-files ) coq-8.15.0/dev/ci/ci-fiat_parsers.sh000066400000000000000000000004441417001151100171400ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download fiat_parsers if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ulimit -s ulimit -s 65536 ulimit -s ( cd "${CI_BUILD_DIR}/fiat_parsers" make parsers parsers-examples make fiat-core ) coq-8.15.0/dev/ci/ci-flocq.sh000066400000000000000000000004761417001151100155670ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download flocq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/flocq" if ! [ -x ./configure ]; then autoconf ./configure COQEXTRAFLAGS="-compat 8.13"; fi ./remake "-j${NJOBS}" ./remake install ) coq-8.15.0/dev/ci/ci-fourcolor.sh000066400000000000000000000003151417001151100164650ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download fourcolor if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/fourcolor" make make install ) coq-8.15.0/dev/ci/ci-gappa.sh000066400000000000000000000010111417001151100155350ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download gappa_tool git_download gappa if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/gappa_tool" if [ ! -x ./configure ]; then autoreconf touch stamp-config_h.in ./configure --prefix="$CI_INSTALL_DIR" fi ./remake "-j${NJOBS}" ./remake install ) ( cd "${CI_BUILD_DIR}/gappa" if [ ! -x ./configure ]; then autoconf ./configure fi ./remake "-j${NJOBS}" ./remake install ) coq-8.15.0/dev/ci/ci-geocoq.sh000066400000000000000000000003111417001151100157240ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download geocoq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/geocoq" ./configure.sh make ) coq-8.15.0/dev/ci/ci-hott.sh000066400000000000000000000003041417001151100154270ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download hott if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/hott" make make validate ) coq-8.15.0/dev/ci/ci-iris.sh000066400000000000000000000017711417001151100154300ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download iris_examples # Extract required version of Iris (avoiding "+" which does not work on MacOS :( *) iris_CI_REF=$(grep -F '"coq-iris-heap-lang"' < "${CI_BUILD_DIR}/iris_examples/coq-iris-examples.opam" | sed 's/.*"dev\.[0-9][0-9.-]*\.\([0-9a-z][0-9a-z]*\)".*/\1/') [ -n "$iris_CI_REF" ] || { echo "Could not find Iris dependency version" && exit 1; } # Download Iris git_download iris # Extract required version of std++ stdpp_CI_REF=$(grep -F '"coq-stdpp"' < "${CI_BUILD_DIR}/iris/coq-iris.opam" | sed 's/.*"dev\.[0-9][0-9.-]*\.\([0-9a-z][0-9a-z]*\)".*/\1/') [ -n "$stdpp_CI_REF" ] || { echo "Could not find stdpp dependency version" && exit 1; } # Download std++ git_download stdpp if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi # Build ( cd "${CI_BUILD_DIR}/stdpp" make make install ) ( cd "${CI_BUILD_DIR}/iris" make make validate make install ) ( cd "${CI_BUILD_DIR}/iris_examples" make make install ) coq-8.15.0/dev/ci/ci-itauto.sh000066400000000000000000000003631417001151100157630ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download itauto if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQCOPTS='-native-compiler ondemand' ( cd "${CI_BUILD_DIR}/itauto" make make install ) coq-8.15.0/dev/ci/ci-math_classes.sh000066400000000000000000000003441417001151100171230ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download math_classes if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/math_classes" ./configure.sh make make install ) coq-8.15.0/dev/ci/ci-mathcomp.sh000066400000000000000000000003461417001151100162670ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download mathcomp if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/mathcomp/mathcomp" make make test-suite make install ) coq-8.15.0/dev/ci/ci-mczify.sh000066400000000000000000000002701417001151100157540ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download mczify if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/mczify" make ) coq-8.15.0/dev/ci/ci-menhir.sh000066400000000000000000000006071417001151100157410ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download menhirlib if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/menhirlib" dune build @install -p menhirLib,menhirSdk,menhir dune install -p menhirLib,menhirSdk,menhir menhir menhirSdk menhirLib --prefix="$CI_INSTALL_DIR" make -C coq-menhirlib make -C coq-menhirlib install ) coq-8.15.0/dev/ci/ci-metacoq.sh000066400000000000000000000004531417001151100161070ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download metacoq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/metacoq" ./configure.sh local make .merlin make ci-local-noclean make install ) coq-8.15.0/dev/ci/ci-mtac2.sh000066400000000000000000000003401417001151100154570ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download mtac2 if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/mtac2" coq_makefile -f _CoqProject -o Makefile make ) coq-8.15.0/dev/ci/ci-oddorder.sh000066400000000000000000000003131417001151100162530ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download oddorder if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/oddorder" make make install ) coq-8.15.0/dev/ci/ci-paramcoq.sh000066400000000000000000000003531417001151100162600ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download paramcoq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/paramcoq" make make install cd test-suite make examples ) coq-8.15.0/dev/ci/ci-perennial.sh000066400000000000000000000004051417001151100164300ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" WITH_SUBMODULES=1 git_download perennial if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ulimit -s ulimit -s 65536 ulimit -s ( cd "${CI_BUILD_DIR}/perennial" make TIMED=false lite ) coq-8.15.0/dev/ci/ci-quickchick.sh000066400000000000000000000003731417001151100165750ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download quickchick if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/quickchick" make make install INSTALLDIR=$CI_INSTALL_DIR/bin make tests ) coq-8.15.0/dev/ci/ci-reduction_effects.sh000066400000000000000000000003511417001151100201460ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download reduction_effects if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/reduction_effects" make make test make install ) coq-8.15.0/dev/ci/ci-relation_algebra.sh000066400000000000000000000003331417001151100177450ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download relation_algebra if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/relation_algebra" make make install ) coq-8.15.0/dev/ci/ci-rewriter.sh000066400000000000000000000003131417001151100163140ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download rewriter if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/rewriter" make make install ) coq-8.15.0/dev/ci/ci-sf.sh000066400000000000000000000004421417001151100150640ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download sf if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi cd "$CI_BUILD_DIR/sf" ( cd lf-current make ) ( cd plf-current make ) ( cd vfa-current make ) # ( cd qc-current # make clean # make # ) coq-8.15.0/dev/ci/ci-simple_io.sh000066400000000000000000000003231417001151100164320ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download simple_io if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/simple_io" make build make install ) coq-8.15.0/dev/ci/ci-stdlib2.sh000066400000000000000000000003331417001151100160160ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download stdlib2 if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/stdlib2/src" ./bootstrap make make install ) coq-8.15.0/dev/ci/ci-tlc.sh000066400000000000000000000003011417001151100152300ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download tlc if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/tlc" make make install ) coq-8.15.0/dev/ci/ci-unicoq.sh000066400000000000000000000003611417001151100157520ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download unicoq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/unicoq" coq_makefile -f _CoqProject -o Makefile make make install ) coq-8.15.0/dev/ci/ci-unimath.sh000066400000000000000000000006351417001151100161250ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download unimath if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COQEXTRAFLAGS='-native-compiler no' ( cd "${CI_BUILD_DIR}/unimath" # DisplayedInserter consumes too much memory for the shared workers sed -i.bak 's|DisplayedBicats/Examples/DisplayedInserter.v||' UniMath/Bicategories/.package/files make BUILD_COQ=no ) coq-8.15.0/dev/ci/ci-verdi_raft.sh000066400000000000000000000011101417001151100165720ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download struct_tact git_download inf_seq_ext git_download cheerios git_download verdi git_download verdi_raft if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "${CI_BUILD_DIR}/struct_tact" ./configure make make install ) ( cd "${CI_BUILD_DIR}/inf_seq_ext" ./configure make make install ) ( cd "${CI_BUILD_DIR}/cheerios" ./configure make make install ) ( cd "${CI_BUILD_DIR}/verdi" ./configure make make install ) ( cd "${CI_BUILD_DIR}/verdi_raft" ./configure make ) coq-8.15.0/dev/ci/ci-vscoq.sh000066400000000000000000000003121417001151100156030ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download vscoq if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi ( cd "$CI_BUILD_DIR/vscoq/language-server" make build ) coq-8.15.0/dev/ci/ci-vst.sh000066400000000000000000000004051417001151100152670ustar00rootroot00000000000000#!/usr/bin/env bash set -e ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" git_download vst if [ "$DOWNLOAD_ONLY" ]; then exit 0; fi export COMPCERT=bundled ulimit -s ulimit -s 65536 ulimit -s ( cd "${CI_BUILD_DIR}/vst" make IGNORECOQVERSION=true ) coq-8.15.0/dev/ci/ci-wrapper.sh000077500000000000000000000011111417001151100161310ustar00rootroot00000000000000#!/usr/bin/env bash # Use this script to preserve the exit code of $CI_SCRIPT when piping # it to `tee time-of-build.log`. We have a separate script, because # this only works in bash, which we don't require project-wide. set -eo pipefail CI_NAME="$1" CI_SCRIPT="ci-${CI_NAME}.sh" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # assume this script is in dev/ci/, cd to the root Coq directory cd "${DIR}/../.." export TIMED=1 bash "${DIR}/${CI_SCRIPT}" 2>&1 | tee time-of-build.log echo 'Aggregating timing log...' python ./tools/make-one-time-file.py time-of-build.log coq-8.15.0/dev/ci/docker/000077500000000000000000000000001417001151100147765ustar00rootroot00000000000000coq-8.15.0/dev/ci/docker/README.md000066400000000000000000000037201417001151100162570ustar00rootroot00000000000000## Overall Docker Setup for Coq's CI. This directory provides Docker images to be used by Coq's CI. The images do support Docker autobuild on `hub.docker.com` and Gitlab's private registry. The Gitlab CI will build a Docker image unless the CI environment variable `SKIP_DOCKER` is set to `true`. This image will be stored in the [Gitlab container registry](https://gitlab.com/coq/coq/container_registry) under the name given by the `CACHEKEY` variable from the [Gitlab CI configuration file](../../../.gitlab-ci.yml). `SKIP_DOCKER` is set to "true" in `https://gitlab.com/coq/coq` to avoid running a lengthy redundant job. For efficiency, users should enable that setting in forked repositories after the initial Docker build in the fork succeeds. The steps to generate a new Docker image are: - Update the `CACHEKEY` variable in .gitlab-ci.yml with the date and md5. - Submit the change in a PR. coqbot will detect that the Dockerfile has changed and will trigger a pipeline build with `SKIP_DOCKER` set to `false`. This will run a `docker-boot` process, and once completed, a new Docker image will be available in the container registry, with the name set in `CACHEKEY`. - Any pipeline with the same `CACHEKEY` will now automatically reuse that image without rebuilding it from scratch. ## Manual Building You can also manually build and push any image: - Build the image `docker build -t base:$VERSION .` To upload/push to your hub: - Create a https://hub.docker.com account. - Login into your space `docker login --username=$USER` - Push the image: + `docker tag base:$VERSION $USER/base:$VERSION` + `docker push $USER/base:$VERSION` ## Debugging / Misc To open a shell inside an image do `docker run -ti --entrypoint /bin/bash ` Each `RUN` command creates an "layer", thus a Docker build is incremental and it always help to put things updated more often at the end. ## Possible Improvements: - Use ARG for customizing versions, centralize variable setup; coq-8.15.0/dev/ci/docker/bionic_coq/000077500000000000000000000000001417001151100171035ustar00rootroot00000000000000coq-8.15.0/dev/ci/docker/bionic_coq/Dockerfile000066400000000000000000000061171417001151100211020ustar00rootroot00000000000000# Update CACHEKEY in the .gitlab-ci.yml when modifying this file. FROM ubuntu:bionic LABEL maintainer="e@x80.org" ENV DEBIAN_FRONTEND="noninteractive" # We need libgmp-dev:i386 for zarith; maybe we could also install GTK RUN dpkg --add-architecture i386 RUN apt-get update -qq && apt-get install --no-install-recommends -y -qq \ # Dependencies of the image, the test-suite and external projects m4 automake autoconf time wget rsync git gcc-multilib build-essential unzip jq \ # Dependencies of ZArith perl libgmp-dev libgmp-dev:i386 \ # Dependencies of lablgtk (for CoqIDE) libgtksourceview-3.0-dev \ # Dependencies of Gappa libboost1.65-all-dev libmpfr-dev autoconf-archive bison flex \ # Dependencies of stdlib and sphinx doc texlive-latex-extra texlive-fonts-recommended texlive-xetex latexmk \ python3-pip python3-setuptools python3-pexpect python3-bs4 fonts-freefont-otf \ # Dependencies of source-doc and coq-makefile texlive-science tipa # More dependencies of the sphinx doc, pytest for coqtail RUN pip3 install docutils==0.16 sphinx==3.0.2 sphinx_rtd_theme==0.4.3 \ antlr4-python3-runtime==4.7.1 sphinxcontrib-bibtex==0.4.2 \ pytest==5.4.3 # We need to install OPAM 2.0 manually for now. RUN wget https://github.com/ocaml/opam/releases/download/2.0.6/opam-2.0.6-x86_64-linux -O /usr/bin/opam && chmod 755 /usr/bin/opam # Basic OPAM setup ENV NJOBS="2" \ OPAMJOBS="2" \ OPAMROOT=/root/.opamcache \ OPAMROOTISOK="true" \ OPAMYES="true" # Base opam is the set of base packages required by Coq ENV COMPILER="4.05.0" # Common OPAM packages ENV BASE_OPAM="zarith.1.10 ocamlfind.1.9.1 ounit2.2.2.3 odoc.1.5.3" \ CI_OPAM="ocamlgraph.1.8.8 yojson.1.7.0 cppo.1.6.8" \ BASE_ONLY_OPAM="dune.2.7.1 elpi.1.13.6 stdlib-shims.0.1.0" # BASE switch; CI_OPAM contains Coq's CI dependencies. ENV COQIDE_OPAM="cairo2.0.6.1 lablgtk3-sourceview3.3.1.0" # Must add this to COQIDE_OPAM{,_EDGE} when we update the opam # packages "lablgtk3-gtksourceview3" # base switch RUN opam init -a --disable-sandboxing --compiler="$COMPILER" default https://opam.ocaml.org && eval $(opam env) && opam update && \ opam install $BASE_OPAM $COQIDE_OPAM $CI_OPAM $BASE_ONLY_OPAM # base+32bit switch, note the zarith hack RUN opam switch create "${COMPILER}+32bit" && eval $(opam env) && \ i386 env CC='gcc -m32' opam install zarith.1.10 && \ opam install $BASE_OPAM # EDGE switch, dune 2.8 is required for OCaml 4.12 ENV COMPILER_EDGE="4.13.0" \ BASE_OPAM_EDGE="dune.2.9.1 dune-release.1.5.0" # EDGE+flambda switch, we install CI_OPAM as to be able to use # `ci-template-flambda` with everything. RUN opam switch create "${COMPILER_EDGE}+flambda" \ --repositories default,ocaml-beta=git+https://github.com/ocaml/ocaml-beta-repository.git \ --packages="ocaml-variants.${COMPILER_EDGE}+options,ocaml-option-flambda" && eval $(opam env) && \ opam install $BASE_OPAM $BASE_OPAM_EDGE $COQIDE_OPAM $CI_OPAM RUN opam clean -a -c # set the locale for the benefit of Python ENV LANG C.UTF-8 coq-8.15.0/dev/ci/nix/000077500000000000000000000000001417001151100143255ustar00rootroot00000000000000coq-8.15.0/dev/ci/nix/CoLoR.nix000066400000000000000000000000621417001151100160210ustar00rootroot00000000000000{ bignums }: { coqBuildInputs = [ bignums ]; } coq-8.15.0/dev/ci/nix/CompCert.nix000066400000000000000000000002651417001151100165640ustar00rootroot00000000000000{ ocamlPackages }: { buildInputs = with ocamlPackages; [ ocaml findlib menhir ]; configure = "./configure -ignore-coq-version x86_64-linux"; make = "make all check-proof"; } coq-8.15.0/dev/ci/nix/Corn.nix000066400000000000000000000001151417001151100157430ustar00rootroot00000000000000{ bignums, math-classes }: { coqBuildInputs = [ bignums math-classes ]; } coq-8.15.0/dev/ci/nix/Elpi.nix000066400000000000000000000001231417001151100157320ustar00rootroot00000000000000{ ocamlPackages }: { buildInputs = with ocamlPackages; [ ocaml findlib elpi ]; } coq-8.15.0/dev/ci/nix/GeoCoq.nix000066400000000000000000000001231417001151100162160ustar00rootroot00000000000000{ mathcomp }: { coqBuildInputs = [ mathcomp ]; configure = "./configure.sh"; } coq-8.15.0/dev/ci/nix/HoTT.nix000066400000000000000000000002161417001151100156620ustar00rootroot00000000000000{ autoconf, automake }: { buildInputs = [ autoconf automake ]; configure = "./autogen.sh && ./configure"; make = "make all validate"; } coq-8.15.0/dev/ci/nix/README.md000066400000000000000000000022351417001151100156060ustar00rootroot00000000000000# Working on third-party developments with *this* version of Coq Aim: getting an environment suitable for working on a third-party development using the current version of Coq (i.e., built from the current state of this repository). Dive into such an environment, for the project `example` by running, from the root of this repository: ./dev/ci/nix/shell example This will build Coq and the other dependencies of the `example` project, then open a shell with all these dependencies available (e.g., `coqtop` is in path). Additionally, three environment variables are set, to abstract over the build-system of that project: `configure`, `make`, and `clean`. Therefore, after changing the working directory to the root of the sources of that project, the contents of these variables can be evaluated to respectively set-up, build, and clean the project. ## Variant: nocoq The dependencies of the third-party developments are split into `buildInputs` and `coqBuildInputs`. The second list gathers the Coq libraries. In case you only want the non-coq dependencies (because you want to use Coq from your `PATH`), set the environment variable `NOCOQ` to some non-empty value. coq-8.15.0/dev/ci/nix/VST.nix000066400000000000000000000001171417001151100155200ustar00rootroot00000000000000{}: rec { make = "make IGNORECOQVERSION=true"; clean = "${make} clean"; } coq-8.15.0/dev/ci/nix/bedrock2.nix000066400000000000000000000003071417001151100165400ustar00rootroot00000000000000{}: { configure = "git submodule update --init --recursive"; clean = "(cd deps/bbv && make clean); (cd deps/riscv-coq && make clean); (cd compiler && make clean); (cd bedrock2 && make clean)"; } coq-8.15.0/dev/ci/nix/bignums.nix000066400000000000000000000001011417001151100165010ustar00rootroot00000000000000{ ocamlPackages }: { buildInputs = [ ocamlPackages.ocaml ]; } coq-8.15.0/dev/ci/nix/coq.nix000066400000000000000000000003341417001151100156270ustar00rootroot00000000000000{ stdenv, callPackage, branch, wd }: let coq = callPackage wd { buildDoc = false; doInstallCheck = false; coq-version = "8.9"; }; in coq.overrideAttrs (o: { name = "coq-local-${branch}"; src = fetchGit "${wd}"; }) coq-8.15.0/dev/ci/nix/coq_dpdgraph.nix000066400000000000000000000003061417001151100174770ustar00rootroot00000000000000{ autoconf, ocamlPackages }: { buildInputs = [ autoconf ] ++ (with ocamlPackages; [ ocaml findlib camlp5 ocamlgraph ]); configure = "autoconf && ./configure"; make = "make all test-suite"; } coq-8.15.0/dev/ci/nix/coquelicot.nix000066400000000000000000000003171417001151100172150ustar00rootroot00000000000000{ autoconf, automake, ssreflect }: { buildInputs = [ autoconf automake ]; coqBuildInputs = [ ssreflect ]; configure = "./autogen.sh && ./configure"; make = "./remake"; clean = "./remake clean"; } coq-8.15.0/dev/ci/nix/cross_crypto.nix000066400000000000000000000001341417001151100175740ustar00rootroot00000000000000{}: { configure = "git submodule update --init --recursive"; clean = "make cleanall"; } coq-8.15.0/dev/ci/nix/default.nix000066400000000000000000000113131417001151100164700ustar00rootroot00000000000000{ pkgs ? import ../../nixpkgs.nix {} , branch , wd , project ? "xyz" , withCoq ? true , bn ? "master" }: with pkgs; # Coq from this directory let coq = callPackage ./coq.nix { inherit branch wd; }; in # Third-party libraries, built with this Coq let coqPackages = mkCoqPackages coq; in let mathcomp = coqPackages.mathcomp.overrideAttrs (o: { name = "coq-git-mathcomp-git"; src = fetchTarball https://github.com/math-comp/math-comp/archive/master.tar.gz; }); in let ssreflect = coqPackages.ssreflect.overrideAttrs (o: { inherit (mathcomp) src; }); in let coq-ext-lib = coqPackages.coq-ext-lib.overrideAttrs (o: { src = fetchTarball "https://github.com/coq-community/coq-ext-lib/tarball/master"; }); in let simple-io = (coqPackages.simple-io.override { inherit coq-ext-lib; }) .overrideAttrs (o: { src = fetchTarball "https://github.com/Lysxia/coq-simple-io/tarball/master"; }); in let bignums = coqPackages.bignums.overrideAttrs (o: if bn == "release" then {} else if bn == "master" then { src = fetchTarball https://github.com/coq/bignums/archive/master.tar.gz; } else { src = fetchTarball bn; } ); in let coqprime = coqPackages.coqprime.override { inherit coq bignums; }; in let math-classes = (coqPackages.math-classes.override { inherit coq bignums; }) .overrideAttrs (o: { src = fetchTarball "https://github.com/coq-community/math-classes/archive/master.tar.gz"; }); in let corn = (coqPackages.corn.override { inherit coq bignums math-classes; }) .overrideAttrs (o: { src = fetchTarball "https://github.com/coq-community/corn/archive/master.tar.gz"; }); in let stdpp = coqPackages.stdpp.overrideAttrs (o: { src = fetchTarball "https://gitlab.mpi-sws.org/iris/stdpp/-/archive/master/stdpp-master.tar.bz2"; }); in let iris = (coqPackages.iris.override { inherit coq stdpp; }) .overrideAttrs (o: { src = fetchTarball "https://gitlab.mpi-sws.org/iris/iris/-/archive/master/iris-master.tar.bz2"; propagatedBuildInputs = [ stdpp ]; }); in let unicoq = callPackage ./unicoq { inherit coq; }; in let StructTact = coqPackages.StructTact.overrideAttrs (o: { src = fetchTarball "https://github.com/uwplse/StructTact/tarball/master"; }); in let Cheerios = (coqPackages.Cheerios.override { inherit StructTact; }) .overrideAttrs (o: { src = fetchTarball "https://github.com/uwplse/cheerios/tarball/master"; }); in let Verdi = (coqPackages.Verdi.override { inherit Cheerios ssreflect; }) .overrideAttrs (o: { src = fetchTarball "https://github.com/uwplse/verdi/tarball/master"; }); in let flocq = coqPackages.flocq.overrideAttrs (o: { src = fetchTarball "https://gitlab.inria.fr/flocq/flocq/-/archive/master/flocq-master.tar.gz"; configurePhase = '' autoreconf ${bash}/bin/bash configure --libdir=$out/lib/coq/${coq.coq-version}/user-contrib/Flocq ''; buildPhase = '' ./remake ''; }); in let callPackage = newScope { inherit coq bignums coq-ext-lib coqprime corn iris math-classes mathcomp simple-io ssreflect stdpp unicoq Verdi flocq; }; in # Environments for building CI libraries with this Coq let projects = { bedrock2 = callPackage ./bedrock2.nix {}; bignums = callPackage ./bignums.nix {}; CoLoR = callPackage ./CoLoR.nix {}; CompCert = callPackage ./CompCert.nix {}; coq_dpdgraph = callPackage ./coq_dpdgraph.nix {}; coquelicot = callPackage ./coquelicot.nix {}; Corn = callPackage ./Corn.nix {}; cross_crypto = callPackage ./cross_crypto.nix {}; Elpi = callPackage ./Elpi.nix {}; fiat_crypto = callPackage ./fiat_crypto.nix {}; flocq = callPackage ./flocq.nix {}; formal-topology = callPackage ./formal-topology.nix {}; gappa = callPackage ./gappa.nix {}; GeoCoq = callPackage ./GeoCoq.nix {}; HoTT = callPackage ./HoTT.nix {}; iris = callPackage ./iris.nix {}; lambda-rust = callPackage ./lambda-rust.nix {}; math_classes = callPackage ./math_classes.nix {}; mathcomp = {}; mtac2 = callPackage ./mtac2.nix {}; oddorder = callPackage ./oddorder.nix {}; quickchick = callPackage ./quickchick.nix {}; simple-io = callPackage ./simple-io.nix {}; verdi-raft = callPackage ./verdi-raft.nix {}; VST = callPackage ./VST.nix {}; }; in if !builtins.hasAttr project projects then throw "Unknown project “${project}”; choose from: ${pkgs.lib.concatStringsSep ", " (builtins.attrNames projects)}." else let prj = projects."${project}"; in let inherit (stdenv.lib) optional optionals; in stdenv.mkDerivation { name = "shell-for-${project}-in-${branch}"; buildInputs = [ python ] ++ optional withCoq coq ++ (prj.buildInputs or []) ++ optionals withCoq (prj.coqBuildInputs or []) ; configure = prj.configure or "true"; make = prj.make or "make"; clean = prj.clean or "make clean"; } coq-8.15.0/dev/ci/nix/fiat_crypto.nix000066400000000000000000000003231417001151100173660ustar00rootroot00000000000000{ ocamlPackages }: { buildInputs = with ocamlPackages; [ ocaml findlib ]; configure = "git submodule update --init --recursive && ulimit -s 32768"; make = "make c-files printlite lite && make -j 1 coq"; } coq-8.15.0/dev/ci/nix/flocq.nix000066400000000000000000000002421417001151100161470ustar00rootroot00000000000000{ autoconf, automake }: { buildInputs = [ autoconf automake ]; configure = "./autogen.sh && ./configure"; make = "./remake"; clean = "./remake clean"; } coq-8.15.0/dev/ci/nix/formal-topology.nix000066400000000000000000000000531417001151100201750ustar00rootroot00000000000000{ corn }: { coqBuildInputs = [ corn ]; } coq-8.15.0/dev/ci/nix/gappa.nix000066400000000000000000000003221417001151100161320ustar00rootroot00000000000000{ autoconf, automake, ocaml, flocq }: { buildInputs = [ autoconf automake ocaml ]; coqBuildInputs = [ flocq ]; configure = "autoreconf && ./configure"; make = "./remake"; clean = "./remake clean"; } coq-8.15.0/dev/ci/nix/iris.nix000066400000000000000000000000551417001151100160130ustar00rootroot00000000000000{ stdpp }: { coqBuildInputs = [ stdpp ]; } coq-8.15.0/dev/ci/nix/lambda-rust.nix000066400000000000000000000000531417001151100172560ustar00rootroot00000000000000{ iris }: { coqBuildInputs = [ iris ]; } coq-8.15.0/dev/ci/nix/math_classes.nix000066400000000000000000000001221417001151100175060ustar00rootroot00000000000000{ bignums }: { coqBuildInputs = [ bignums ]; configure = "./configure.sh"; } coq-8.15.0/dev/ci/nix/mtac2.nix000066400000000000000000000002261417001151100160530ustar00rootroot00000000000000{ coq, unicoq }: { buildInputs = with coq.ocamlPackages; [ ocaml findlib camlp5 ]; coqBuildInputs = [ unicoq ]; configure = "./configure.sh"; } coq-8.15.0/dev/ci/nix/oddorder.nix000066400000000000000000000000631417001151100166460ustar00rootroot00000000000000{ mathcomp }: { coqBuildInputs = [ mathcomp ]; } coq-8.15.0/dev/ci/nix/quickchick.nix000066400000000000000000000002541417001151100171640ustar00rootroot00000000000000{ ocamlPackages, ssreflect, coq-ext-lib, simple-io }: { buildInputs = with ocamlPackages; [ ocaml findlib ocamlbuild num ]; coqBuildInputs = [ ssreflect simple-io ]; } coq-8.15.0/dev/ci/nix/shell000077500000000000000000000007271417001151100153700ustar00rootroot00000000000000#!/usr/bin/env sh ## This file should be run from the root of the Coq source tree BRANCH=$(git rev-parse --abbrev-ref HEAD) echo "Branch: $BRANCH in $PWD" if [ "$#" -ne 1 ]; then PROJECT="" else PROJECT="--argstr project $1" fi if [ "$BN" ]; then BN="--argstr bn ${BN}" else BN="" fi if [ "$NOCOQ" ]; then NOCOQ="--arg withCoq false" else NOCOQ="" fi nix-shell ./dev/ci/nix/ --show-trace --argstr wd $PWD --argstr branch $BRANCH $PROJECT $BN $NOCOQ coq-8.15.0/dev/ci/nix/simple-io.nix000066400000000000000000000002561417001151100167460ustar00rootroot00000000000000{ ocamlPackages, ssreflect, coq-ext-lib, simple-io }: { buildInputs = with ocamlPackages; [ ocaml findlib ocamlbuild num ]; coqBuildInputs = [ ssreflect coq-ext-lib ]; } coq-8.15.0/dev/ci/nix/unicoq/000077500000000000000000000000001417001151100156235ustar00rootroot00000000000000coq-8.15.0/dev/ci/nix/unicoq/default.nix000066400000000000000000000013631417001151100177720ustar00rootroot00000000000000{ stdenv, writeText, coq }: let META = writeText "META" '' archive(native) = "unicoq.cmxa" plugin(native) = "unicoq.cmxs" ''; in stdenv.mkDerivation { name = "coq${coq.coq-version}-unicoq-0.0-git"; src = fetchTarball https://github.com/unicoq/unicoq/archive/master.tar.gz; patches = [ ./unicoq-num.patch ]; buildInputs = [ coq ] ++ (with coq.ocamlPackages; [ ocaml findlib camlp5 num ]); configurePhase = "coq_makefile -f Make -o Makefile"; installFlags = [ "COQLIB=$(out)/lib/coq/${coq.coq-version}/" ]; postInstall = '' cp ${META} META install -d $OCAMLFIND_DESTDIR ln -s $out/lib/coq/${coq.coq-version}/user-contrib/Unicoq $OCAMLFIND_DESTDIR/ install -m 0644 META src/unicoq.a $OCAMLFIND_DESTDIR/Unicoq ''; } coq-8.15.0/dev/ci/nix/unicoq/unicoq-num.patch000066400000000000000000000005361417001151100207430ustar00rootroot00000000000000commit f29bc64ee3d8b36758d17e1f5d50812e0c93063b Author: Vincent Laporte Date: Thu Nov 29 08:59:22 2018 +0000 Make explicit dependency to num diff --git a/Makefile.local b/Makefile.local new file mode 100644 index 0000000..88be365 --- /dev/null +++ b/Makefile.local @@ -0,0 +1 @@ +CAMLPKGS += -package num coq-8.15.0/dev/ci/nix/verdi-raft.nix000066400000000000000000000001121417001151100171020ustar00rootroot00000000000000{ Verdi }: { coqBuildInputs = [ Verdi ]; configure = "./configure"; } coq-8.15.0/dev/ci/platform/000077500000000000000000000000001417001151100153535ustar00rootroot00000000000000coq-8.15.0/dev/ci/platform/coq-pf-01-sysinfo.bat000066400000000000000000000003051417001151100211340ustar00rootroot00000000000000REM Print some debug information ECHO "Root folders" DIR C:\ ECHO "Powershell version" powershell -Command "Get-Host" ECHO "Git installation of Mingw" DIR "C:\Program Files\Git\mingw64\bin\*.exe" coq-8.15.0/dev/ci/platform/coq-pf-02-download.bat000066400000000000000000000002741417001151100212570ustar00rootroot00000000000000REM Download platform script SET PATH=%PATH%;C:\Program Files\7-Zip;C:\Program Files\Git\mingw64\bin ECHO "Downloading %PLATFORM%" curl -L -o platform.zip "%PLATFORM%" 7z x platform.zip coq-8.15.0/dev/ci/platform/coq-pf-03-build.bat000066400000000000000000000012551417001151100205500ustar00rootroot00000000000000REM Build the platform SET CYGROOT=C:\ci\cygwin%ARCH% SET CYGCACHE=C:\ci\cache\cgwin REM Try CYGWIN_QUIET, but still this stage is super verbose SET CYGWIN_QUIET=y SET COQREGTESTING=y REM XXX: make this a variable with the branch name cd platform-* call coq_platform_make_windows.bat ^ -arch=%ARCH% ^ -destcyg=%CYGROOT% ^ -cygcache=%CYGCACHE% ^ -extent=i ^ -parallel=p ^ -jobs=2 ^ -switch=d ^ -override-dev-pkg="coq=%GITHUB_SERVER_URL%/%GITHUB_REPOSITORY%/archive/%GITHUB_SHA%.tar.gz" ^ -override-dev-pkg="coqide=%GITHUB_SERVER_URL%/%GITHUB_REPOSITORY%/archive/%GITHUB_SHA%.tar.gz" ^ || GOTO ErrorExit GOTO :EOF :ErrorExit ECHO ERROR %0 failed EXIT /b 1 coq-8.15.0/dev/ci/platform/coq-pf-04-installer.bat000066400000000000000000000016251417001151100214500ustar00rootroot00000000000000REM build the installer artifact REM XXX: make this a variable with the branch name cd platform-* REM XXX: This is redundant with the previous scripts, we could centralize it REM In fact, the variable is only needed to access bash SET CYGROOT=C:\ci\cygwin%ARCH% SET BASH=%CYGROOT%\bin\bash MKDIR %GITHUB_WORKSPACE%\artifacts %BASH% --login -c "pwd && ls -la && cd coq-platform && windows/create_installer_windows.sh" || GOTO ErrorExit REM Output is in cygwin home; in general the script has a bit of a REM mess in terms of using the GITHUB_WORKSPACE sometimes, and the REM CYGWIN home some others. I use the path here directly as to avoid REM issues with quoting, which in the previous script required some REM really obscure code. COPY /v /b %CYGROOT%\home\runneradmin\coq-platform\windows_installer\*.exe %GITHUB_WORKSPACE%\artifacts || GOTO ErrorExit GOTO :EOF :ErrorExit ECHO ERROR %0 failed EXIT /b 1 coq-8.15.0/dev/ci/user-overlays/000077500000000000000000000000001417001151100163475ustar00rootroot00000000000000coq-8.15.0/dev/ci/user-overlays/README.md000066400000000000000000000072401417001151100176310ustar00rootroot00000000000000# Add overlays for your pull requests in this directory _Overlays_ let you test pull requests that break the base version of external projects by applying PRs of the external project during CI testing (1 PR per broken external project). Once Coq CI's tests of the external projects pass, the Coq PR can be merged, then the assignee must ask the external projects to merge their PRs (for example by commenting in the external PRs). External projects are then expected to merge their PRs promptly. An overlay file specifies the external PRs that should be applied during CI. A single file can cover multiple external projects. Create your overlay file in the `dev/ci/user-overlays` directory. The name of the overlay file should start with a five-digit pull request number, followed by a dash, anything (by convention, your GitHub nickname and the branch name), then an `.sh` extension (`[0-9]{5}-[a-zA-Z0-9-_]+.sh`). The file must contain a call to the `overlay` function for each affected external project: ``` overlay [] ``` Each call creates an overlay for `project` using a given `giturl` and `ref` which is active for `prnumber` or `prbranch` (`prbranch` defaults to `ref`). For example, an overlay for the project `elpi` that uses the branch `noinstance` from the fork of `SkySkimmer` and is active for pull request `13128`: ``` overlay elpi https://github.com/SkySkimmer/coq-elpi noinstance 13128 ``` The github URL and base branch name for each external project are listed in [`ci-basic-overlay.sh`](../ci-basic-overlay.sh). For example, the entry for `elpi` is ``` project elpi "https://github.com/LPCIC/coq-elpi" "coq-master" ``` But substitute the name of your fork into the URL, e.g. `SkySkimmer/coq-elpi` rather than `LPCIC/coq-elpi`. Use `#` to mark any comments. If the branch name in the external project differs from the Coq branch name, include the external branch name as `[prbranch]` to apply it when you run the test suite locally, e.g. `make ci-elpi`. Overlay files can be created automatically using the script [`create_overlays.sh`](../../dev/tools/create_overlays.sh). ### Branching conventions We suggest you use the convention of identical branch names for the Coq branch and the CI project branch used in the overlay. For example, if your Coq PR is in your branch `more_efficient_tc` and breaks `ltac2`, we suggest you create an `ltac2` overlay with a branch named `more_efficient_tc`. ### Typical workflow - Observe that your changes breaks some external projects in CI - Compile your PR. - For each broken project, run `make `, e.g. `make ci-elpi`, which checks out, builds and runs the project in the `_build_ci/` directory. (Make sure the `_build_vo` directory does not exist in your source tree. Or, soon, run `make -f Makefile.dune ci-foo`.) - Make necessary changes, then rerun the script to verify they work. - From the `` subdirectory, commit your changes to a new branch, based on the base branch name listed in `ci-basic-overlay.sh`, for example `coq-master` for elpi. - If necessary, fork the external project from the project's github page. (Only needs to be done once, ever.) - Push to the external project and create a new PR. Make sure you pick the correct base branch in the github GUI for the comparison (e.g. `coq-master` for elpi). - Create the overlay file, add to your Coq PR, push the updated version and verify that the external projects now pass. - When your PR is merged, the assignee notifies the maintainers of the external project to merge the changes you submitted. This should happen promptly; the external project's CI will fail until the change is merged. - Beer. coq-8.15.0/dev/core.dbg000066400000000000000000000010211417001151100145340ustar00rootroot00000000000000load_printer threads.cma load_printer str.cma load_printer zarith.cma load_printer config.cma load_printer clib.cma load_printer boot.cma load_printer dynlink.cma load_printer lib.cma load_printer gramlib.cma load_printer kernel.cma load_printer library.cma load_printer engine.cma load_printer pretyping.cma load_printer interp.cma load_printer proofs.cma load_printer parsing.cma load_printer printing.cma load_printer tactics.cma load_printer vernac.cma load_printer sysinit.cma load_printer stm.cma load_printer toplevel.cma coq-8.15.0/dev/core_dune.dbg000066400000000000000000000010511417001151100155520ustar00rootroot00000000000000load_printer threads.cma load_printer str.cma load_printer zarith.cma load_printer config.cma load_printer clib.cma load_printer boot.cma load_printer dynlink.cma load_printer lib.cma load_printer gramlib.cma load_printer coqrun.cma load_printer kernel.cma load_printer library.cma load_printer engine.cma load_printer pretyping.cma load_printer interp.cma load_printer proofs.cma load_printer parsing.cma load_printer printing.cma load_printer tactics.cma load_printer vernac.cma load_printer sysinit.cma load_printer stm.cma load_printer toplevel.cma coq-8.15.0/dev/db000066400000000000000000000001451417001151100134440ustar00rootroot00000000000000source core.dbg load_printer ltac_plugin.cmo load_printer top_printers.cmo source top_printers.dbg coq-8.15.0/dev/doc/000077500000000000000000000000001417001151100137015ustar00rootroot00000000000000coq-8.15.0/dev/doc/INSTALL.make.md000066400000000000000000000202061417001151100162450ustar00rootroot00000000000000Quick Installation Procedure using Make. ---------------------------------------- $ ./configure $ make world $ make install (you may need superuser rights) Detailed Installation Procedure. -------------------------------- Note these installation instructions are meant for users. For Coq developers, there is an extra set of targets better suited to them: please see the [contributing guide](../../CONTRIBUTING.md). 1. Check that you have the required dependencies as specified in the top-level [INSTALL](../../INSTALL.md) file. 2. Decompress Coq's source code into a build folder; the name doesn't matter. You will need around 300 MiB free disk space to compile Coq, and a similar amount to install it. 3. Then, configure Coq with the command: ./configure The `configure` script will ask you for directories where to put the Coq binaries, standard library, man pages, etc. It will propose default values. For a list of options accepted by the `configure` script, run `./configure -help`. The main options accepted are: * `-prefix ` Binaries, library, and man pages will be respectively installed in `/bin`, `/lib/coq`, and `/man` * `-libdir ` (default: `/usr/local/lib/coq`) Directory where the Coq standard library will be installed * `-mandir ` (default: `/usr/local/share/man`) Directory where the Coq manual pages will be installed * `-arch ` (default is the result of the command `arch`) An arbitrary architecture name for your machine (useful when compiling Coq on two different architectures for which the result of "arch" is the same, e.g. Sun OS and Solaris) * `-browser ` Use to open an URL in a browser. %s must appear in , and will be replaced by the URL. If you want your build to be reproducible, ensure that the `SOURCE_DATE_EPOCH` environment variable is set as documented in https://reproducible-builds.org/specs/source-date-epoch/ 4. Still in the Coq sources directory, do: make world to compile Coq (this builds both native and bytecode version by default, or only the bytecode version if a native OCaml port is not available). This will compile the entire system. This phase can take more or less time, depending on your architecture and is fairly verbose. On a multi-core machine, it is recommended to compile in parallel, via make -jN where N is your number of cores. If you wish to create timing logs for the standard library, you can pass `TIMING=1` (for per-line timing files) or `TIMED=1` (for per-file timing on stdout). Further variables and targets are available for more detailed timing analysis; see the section of the reference manual on `coq_makefile`. If there is any timing target or variable supported by `coq_makefile`-made Makefiles which is not supported by Coq's own Makefile, please report that as a bug. 5. You can now install the Coq system. Executables, libraries, and manual pages are copied in some standard places of your system, defined at configuration time (step 3). Just do: umask 022 make install Of course, you may need superuser rights to do that. 6. Note that the `install` target does support the `DESTDIR` variable, useful for package builders, so `make DESTDIR=tmp install` will install the files under `tmp/usr/...`. 7. You can now clean all the sources. (You can even erase them.) make clean Notes for packagers ------------------- The `make install` target for Coq's OCaml parts calls `dune install` internally. Before Dune 2.9, `dune install` didn't support configuring the `-docdir` and `-configdir` installation paths, thus these configure options were ignored for the `coq-core` package. Coq will try to detect if Dune >= 2.9 is being used, and perform the right call to Dune in that case. If Dune < 2.9 is being used, Coq's configure will emit a warning. As a packager/user, you have two options: a) manually correct the install locations of `doc` and `etc` for `coq-core`, or to use a tool such as `opam-install` which already supports these options correctly. `dune build -p coq-core && opam-installer $OPTS _build/default/coq-core.install` should do the trick. Installation Procedure For Plugin Developers. --------------------------------------------- If you wish to write plugins you *must* keep the Coq sources, without cleaning them. Therefore, to avoid a duplication of binaries and library, it is not necessary to do the installation step (6- above). You just have to tell it at configuration step (4- above) with the option -local : ./configure -profile devel Then compile the sources as described in step 5 above. The resulting binaries will reside in the subdirectory `bin`, which is symlink to the `_build_vo/default/bin` directory. Unless you pass the `-nodebug` option to `./configure`, the `-g` option of the OCaml compiler will be used during compilation to allow debugging. See the debugging file in `dev/doc` and the chapter 15 of the Coq Reference Manual for details about how to use the OCaml debugger with Coq. The Available Commands. ----------------------- There are two Coq commands: coqtop The Coq toplevel coqc The Coq compiler For architectures where `ocamlopt` is available, `coqtop` is the native code version of Coq. The byte-code version is `coqtop.byte`, which can be used for debugging. If native code isn't available, `coqtop` will point to `coqtop.byte`. `coqc` follows a similar scheme. * `coqtop` launches Coq in the interactive mode. By default it loads basic logical definitions and tactics from the Init directory. * `coqc` allows compilation of Coq files directly from the command line. To compile a file foo.v, do: coqc foo.v It will produce a file `foo.vo`, that you can now load through the Coq command `Require`. A detailed description of these commands and of their options is given in the Reference Manual (which you can get in the doc/ directory, or read online on http://coq.inria.fr/doc/) and in the corresponding manual pages. Moving Binaries Or Library. --------------------------- If you move both the binaries and the library in a consistent way, Coq should still be able to run. Otherwise, Coq may not be able to find the required prelude files and will give this error message: Error during initialization : Error: cannot guess a path for Coq libraries; please use -coqlib option You can then indicate the location of the Coq's standard library using the option `-coqlib`: coqtop -coqlib # FLambda Options You can tweak the optimization flags passed to the OCaml optimizing compiler. Coq's default is: -flambda-opts `-O3 -unbox-closures` which is set in Coq's toplevel `dune` file. Feel free to try a different combination of flags. You can read more at https://caml.inria.fr/pub/docs/manual-ocaml/flambda.html There is a known problem with certain OCaml versions and `native_compute`, that will make compilation require a large amount of RAM (>= 10GiB) for some particular files. We recommend disabling native compilation (`-native-compiler no`) with flambda if you use OCaml < 4.07.0. c.f. https://caml.inria.fr/mantis/view.php?id=7630 Dynamically Loaded Libraries For Bytecode Executables. ------------------------------------------------------ Some bytecode executables of Coq use the OCaml runtime, which dynamically loads a shared library (`.so` or `.dll`). When it is not installed properly, you can get an error message of this kind: Fatal error: cannot load shared library dllcoqrun Reason: dllcoqrun.so: cannot open shared object file: No such file or directory In this case, you need either: - to set the `CAML_LD_LIBRARY_PATH` environment variable to point to the directory where dllcoqrun.so is; this is suitable when you want to run the command a limited number of times in a controlled environment (e.g. during compilation of binary packages); - install `dllcoqrun.so` in a location listed in the file `ld.conf` that is in the directory of the standard library of OCaml coq-8.15.0/dev/doc/README.md000066400000000000000000000106031417001151100151600ustar00rootroot00000000000000# Beginner's guide to hacking Coq ## Getting dependencies See the first section of [`INSTALL.md`](../../INSTALL.md). Developers are recommended to use a recent OCaml version, which they can get through opam or Nix, in particular. ## Configuring Dune caching and default verbosity There are several configuration settings that you can control globally by creating a Dune configuration file (see `man dune-config` to learn more). This file is generally located in `~/.config/dune/config` (this is system-dependent). It should start with the version of the Dune language used (by the configuration file---which can be different from the one used in the Coq repository), e.g.: ``` (lang dune 2.0) ``` - You will get faster rebuilds if you enable Dune caching. This is true in all cases, but even more so when using the targets in `Makefile.dune` (see below). To set up Dune caching, you should append the following line to your Dune configuration file: ``` (cache enabled) ``` Note that by default, Dune caching will use up to 10GB of disk size. See the [official documentation](https://dune.readthedocs.io/en/stable/caching.html#on-disk-size) to learn how to change the default. - Dune is not very verbose by default. If you want to change the behavior to a more verbose one, you may append the following line to your Dune configuration file: ``` (display short) ``` ## Building `coqtop` / `coqc` binaries We recommend that you use the targets in `Makefile.dune`. See [`build-system.dune.md`](build-system.dune.md) to learn more about them. In the example below, you may omit `-f Makefile.dune` by setting `COQ_USE_DUNE=1`. ``` $ git clone https://github.com/coq/coq.git $ cd coq $ make -f Makefile.dune # to get an idea of the available targets $ make -f Makefile.dune check # build all OCaml files as fast as possible $ dune exec -- dev/shim/coqc-prelude test.v # update coqc and the prelude and compile file test.v $ make -f Makefile.dune world # build coq and the complete stdlib and setup it for use under _build/install/default # In particular, you may run, e.g., coq_makefile from _build/install/default # to build some test project ``` When running the commands above, you may set `DUNEOPT=--display=short` for a more verbose build (not required if you have already set the default verbosity globally as described in the previous section). Alternatively, you can use the legacy build system (which is now a hybrid since it relies on Dune for the OCaml parts). If you haven't set `COQ_USE_DUNE=1`, then you don't need `-f Makefile.make`. ``` $ ./configure -profile devel # add -warn-error no if you don't want to fail on warnings while building the stlib $ make -f Makefile.make -j $JOBS # Make once for `merlin` (autocompletion tool) $ make -f Makefile.make -j $JOBS states # builds just enough to run coqtop $ bin/coqc ``` When running the commands above, you may set `_DDISPLAY=short` for a more verbose build. To learn how to run the test suite, you can read [`test-suite/README.md`](../../test-suite/README.md). ## Coq functions of interest - `Coqtop.start`: This function is the main entry point of coqtop. - `Coqtop.parse_args `: This function is responsible for parsing command-line arguments. - `Coqloop.loop`: This function implements the read-eval-print loop. - `Vernacentries.interp`: This function is called to execute the Vernacular command user have typed. It dispatches the control to specific functions handling different Vernacular command. - `Vernacentries.vernac_check_may_eval`: This function handles the `Check ...` command. ## Development environment + tooling - [`Merlin`](https://github.com/ocaml/merlin) for autocomplete. - [Wiki pages on tooling containing `emacs`, `vim`, and `git` information](https://github.com/coq/coq/wiki/DevelSetup) - [`ocamlformat`](https://github.com/ocaml-ppx/ocamlformat) provides support for automatic formatting of OCaml code. To use it please run `dune build @fmt`, see `ocamlformat`'s documentation for more help. ## A note about rlwrap When using `rlwrap coqtop` make sure the version of `rlwrap` is at least `0.42`, otherwise you will get ``` rlwrap: error: Couldn't read completions from /usr/share/rlwrap/completions/coqtop: No such file or directory ``` If this happens either update or use an alternate readline wrapper like `ledit`. coq-8.15.0/dev/doc/SProp.md000066400000000000000000000054071417001151100152740ustar00rootroot00000000000000# Notes on SProp (ml API side, see refman for user side) ## Relevance All kernel binders (`Prod`/`Lambda`/`LetIn`/`Context` elements) are now annotated with a value in `type Sorts.relevance = Relevant | Irrelevant`. It should verify that the binder's type lives in `SProp` iff the annotation is `Irrelevant`. As a plugin you can generally just use `Relevant` everywhere, the kernel will fix it if needed when it checks the terms you produce. The only issue is that if you generate `Relevant` when it should have been `Irrelevant` you won't be able to use proof irrelevance on that variable until the kernel fixes it. See refman for examples as Coq also uses `Relevant` incorrectly in some places. This annotation is done by transforming the binder name `'a` into a `'a Context.binder_annot = { binder_name : 'a; binder_relevance : Sorts.relevance }`, eg `Prod of Name.t * types * types` becomes `Prod of Name.t Context.binder_annot * types * types`. If you just carry binder names around without looking at them no change is needed, eg if you have `match foo with Lambda (x, a, b) -> Prod (x, a, type_of (push_rel (LocalAssum (x,a)) env) b)`. Otherwise see `context.mli` for a few combinators on the `binder_annot` type. When making `Relevant` annotations you can use some convenience functions from `Context` (eg `annotR x = make_annot x Relevant`), also `mkArrowR` from `Constr`/`EConstr` which has the signature of the old `mkArrow`. You can enable the debug warning `bad-relevance` to help find places where you generate incorrect annotations. Relevance can be inferred from a well-typed term using functions in `Retypeops` (for `Constr`) and `Retyping` (for `EConstr`). For `x` a term, note the difference between its relevance as a term (is `x : (_ : SProp)`) and as a type (is `x : SProp`), there are functions for both kinds. ## Case inversion Inductives in SProp with 1 constructor which has no arguments have a special reduction rule for matches. To implement it the Case constructor is extended with a `case_invert` field. If you are constructing a match on a normal (non-special reduction) inductive you must fill the new field with `NoInvert`. Otherwise you must fill it with `CaseInvert {univs ; args}` where `univs` is the universe instance of the type you are matching and `args` the parameters and indices. For instance, in ~~~coq Inductive seq {A} (a:A) : A -> SProp := srefl : seq a a. Definition seq_to_eq {A x y} (e:seq x y) : x = y :> A := match e with srefl => eq_refl end. ~~~ the `match e with ...` has `CaseInvert {univs = Instance.empty; args = [|A x y|]}`. (empty instance since we defined a universe monomorphic `seq`). In practice, you should use `Inductiveops.make_case_or_project` which will take care of this for you (and also handles primitive records correctly etc). coq-8.15.0/dev/doc/archive/000077500000000000000000000000001417001151100153225ustar00rootroot00000000000000coq-8.15.0/dev/doc/archive/COMPATIBILITY000066400000000000000000000010251417001151100172540ustar00rootroot00000000000000Note: this file isn't used anymore. Incompatibilities are documented as part of CHANGES. Incompatibilities beyond 8.4... - Syntax: "x -> y" has now lower priority than "<->" "A -> B <-> C" is now "A -> (B <-> C)" - Tactics: tauto and intuition no longer accidentally destruct binary connectives or records other than and, or, prod, sum, iff. In most of cases, dtauto or dintuition, though stronger than 8.3 tauto and 8.3 intuition will provide compatibility. - "Solve Obligations using" is now "Solve Obligations with". coq-8.15.0/dev/doc/archive/Translator.tex000066400000000000000000000755741417001151100202170ustar00rootroot00000000000000\ifx\pdfoutput\undefined % si on est pas en pdflatex \documentclass[11pt,a4paper]{article} \else \documentclass[11pt,a4paper,pdftex]{article} \fi \usepackage[latin1]{inputenc} \usepackage[T1]{fontenc} \usepackage{pslatex} \usepackage{url} \usepackage{verbatim} \usepackage{amsmath} \usepackage{amssymb} \usepackage{array} \usepackage{fullpage} \title{Translation from Coq V7 to V8} \author{The Coq Development Team} %% Macros etc. \catcode`\_=13 \let\subscr=_ \def_{\ifmmode\sb\else\subscr\fi} \def\NT#1{\langle\textit{#1}\rangle} \def\NTL#1#2{\langle\textit{#1}\rangle_{#2}} %\def\TERM#1{\textsf{\bf #1}} \def\TERM#1{\texttt{#1}} \newenvironment{transbox} {\begin{center}\tt\begin{tabular}{l|ll} \hfil\textrm{V7} & \hfil\textrm{V8} \\ \hline} {\end{tabular}\end{center}} \def\TRANS#1#2 {\begin{tabular}[t]{@{}l@{}}#1\end{tabular} & \begin{tabular}[t]{@{}l@{}}#2\end{tabular} \\} \def\TRANSCOM#1#2#3 {\begin{tabular}[t]{@{}l@{}}#1\end{tabular} & \begin{tabular}[t]{@{}l@{}}#2\end{tabular} & #3 \\} %% %% %% \begin{document} \maketitle \section{Introduction} Coq version 8.0 is a major version and carries major changes: the concrete syntax was redesigned almost from scratch, and many notions of the libraries were renamed for uniformisation purposes. We felt that these changes could discourage users with large theories from switching to the new version. The goal of this document is to introduce these changes on simple examples (mainly the syntactic changes), and describe the automated tools to help moving to V8.0. Essentially, it consists of a translator that takes as input a Coq source file in old syntax and produces a file in new syntax and adapted to the new standard library. The main extra features of this translator is that it keeps comments, even those within expressions\footnote{The position of those comment might differ slightly since there is no exact matching of positions between old and new syntax.}. The document is organised as follows: first section describes the new syntax on simple examples. It is very translation-oriented. This should give users of older versions the flavour of the new syntax, and allow them to make translation manually on small examples. Section~\ref{Translation} explains how the translation process can be automatised for the most part (the boring one: applying similar changes over thousands of lines of code). We strongly advise users to follow these indications, in order to avoid many potential complications of the translation process. \section{The new syntax on examples} The goal of this section is to introduce to the new syntax of Coq on simple examples, rather than just giving the new grammar. It is strongly recommended to read first the definition of the new syntax (in the reference manual), but this document should also be useful for the eager user who wants to start with the new syntax quickly. The toplevel has an option {\tt -translate} which allows interactively translating commands. This toplevel translator accepts a command, prints the translation on standard output (after a % \verb+New syntax:+ balise), executes the command, and waits for another command. The only requirements is that they should be syntactically correct, but they do not have to be well-typed. This interactive translator proved to be useful in two main usages. First as a ``debugger'' of the translation. Before the translation, it may help in spotting possible conflicts between the new syntax and user notations. Or when the translation fails for some reason, it makes it easy to find the exact reason why it failed and make attempts in fixing the problem. The second usage of the translator is when trying to make the first proofs in new syntax. Well trained users will automatically think their scripts in old syntax and might waste much time (and the intuition of the proof) if they have to search the translation in a document. Running a translator in the background will allow the user to instantly have the answer. The rest of this section is a description of all the aspects of the syntax that changed and how they were translated. All the examples below can be tested by entering the V7 commands in the toplevel translator. %% \subsection{Changes in lexical conventions w.r.t. V7} \subsubsection{Identifiers} The lexical conventions changed: \TERM{_} is not a regular identifier anymore. It is used in terms as a placeholder for subterms to be inferred at type-checking, and in patterns as a non-binding variable. Furthermore, only letters (Unicode letters), digits, single quotes and _ are allowed after the first character. \subsubsection{Quoted string} Quoted strings are used typically to give a filename (which may not be a regular identifier). As before they are written between double quotes ("). Unlike for V7, there is no escape character: characters are written normally except the double quote which is doubled. \begin{transbox} \TRANS{"abcd$\backslash\backslash$efg"}{"abcd$\backslash$efg"} \TRANS{"abcd$\backslash$"efg"}{"abcd""efg"} \end{transbox} \subsection{Main changes in terms w.r.t. V7} \subsubsection{Precedence of application} In the new syntax, parentheses are not really part of the syntax of application. The precedence of application (10) is tighter than all prefix and infix notations. It makes it possible to remove parentheses in many contexts. \begin{transbox} \TRANS{(A x)->(f x)=(g y)}{A x -> f x = g y} \TRANS{(f [x]x)}{f (fun x => x)} \end{transbox} \subsubsection{Arithmetics and scopes} The specialized notation for \TERM{Z} and \TERM{R} (introduced by symbols \TERM{`} and \TERM{``}) have disappeared. They have been replaced by the general notion of scope. \begin{center} \begin{tabular}{l|l|l} type & scope name & delimiter \\ \hline types & type_scope & \TERM{type} \\ \TERM{bool} & bool_scope & \\ \TERM{nat} & nat_scope & \TERM{nat} \\ \TERM{Z} & Z_scope & \TERM{Z} \\ \TERM{R} & R_scope & \TERM{R} \\ \TERM{positive} & positive_scope & \TERM{P} \end{tabular} \end{center} In order to use notations of arithmetics on \TERM{Z}, its scope must be opened with command \verb+Open Scope Z_scope.+ Another possibility is using the scope change notation (\TERM{\%}). The latter notation is to be used when notations of several scopes appear in the same expression. In examples below, scope changes are not needed if the appropriate scope has been opened. Scope \verb|nat_scope| is opened in the initial state of Coq. \begin{transbox} \TRANSCOM{`0+x=x+0`}{0+x=x+0}{\textrm{Z_scope}} \TRANSCOM{``0 + [if b then ``1`` else ``2``]``}{0 + if b then 1 else 2}{\textrm{R_scope}} \TRANSCOM{(0)}{0}{\textrm{nat_scope}} \end{transbox} Below is a table that tells which notation is available in which scope. The relative precedences and associativity of operators is the same as in usual mathematics. See the reference manual for more details. However, it is important to remember that unlike V7, the type operators for product and sum are left-associative, in order not to clash with arithmetic operators. \begin{center} \begin{tabular}{l|l} scope & notations \\ \hline nat_scope & \texttt{+ - * < <= > >=} \\ Z_scope & \texttt{+ - * / mod < <= > >= ?=} \\ R_scope & \texttt{+ - * / < <= > >=} \\ type_scope & \texttt{* +} \\ bool_scope & \texttt{\&\& || -} \\ list_scope & \texttt{:: ++} \end{tabular} \end{center} \subsubsection{Notation for implicit arguments} The explicitation of arguments is closer to the \emph{bindings} notation in tactics. Argument positions follow the argument names of the head constant. The example below assumes \verb+f+ is a function with two implicit dependent arguments named \verb+x+ and \verb+y+. \begin{transbox} \TRANS{f 1!t1 2!t2 t3}{f (x:=t1) (y:=t2) t3} \TRANS{!f t1 t2}{@f t1 t2} \end{transbox} \subsubsection{Inferred subterms} Subterms that can be automatically inferred by the type-checker is now written {\tt _} \begin{transbox} \TRANS{?}{_} \end{transbox} \subsubsection{Universal quantification} The universal quantification and dependent product types are now introduced by the \texttt{forall} keyword before the binders and a comma after the binders. The syntax of binders also changed significantly. A binder can simply be a name when its type can be inferred. In other cases, the name and the type of the variable are put between parentheses. When several consecutive variables have the same type, they can be grouped. Finally, if all variables have the same type, parentheses can be omitted. \begin{transbox} \TRANS{(x:A)B}{forall (x:~A), B ~~\textrm{or}~~ forall x:~A, B} \TRANS{(x,y:nat)P}{forall (x y :~nat), P ~~\textrm{or}~~ forall x y :~nat, P} \TRANS{(x,y:nat;z:A)P}{forall (x y :~nat) (z:A), P} \TRANS{(x,y,z,t:?)P}{forall x y z t, P} \TRANS{(x,y:nat;z:?)P}{forall (x y :~nat) z, P} \end{transbox} \subsubsection{Abstraction} The notation for $\lambda$-abstraction follows that of universal quantification. The binders are surrounded by keyword \texttt{fun} and \verb+=>+. \begin{transbox} \TRANS{[x,y:nat; z](f a b c)}{fun (x y:nat) z => f a b c} \end{transbox} \subsubsection{Pattern-matching} Beside the usage of the keyword pair \TERM{match}/\TERM{with} instead of \TERM{Cases}/\TERM{of}, the main change is the notation for the type of branches and return type. It is no longer written between \TERM{$<$ $>$} before the \TERM{Cases} keyword, but interleaved with the destructured objects. The idea is that for each destructured object, one may specify a variable name (after the \TERM{as} keyword) to tell how the branches types depend on this destructured objects (case of a dependent elimination), and also how they depend on the value of the arguments of the inductive type of the destructured objects (after the \TERM{in} keyword). The type of branches is then given after the keyword \TERM{return}, unless it can be inferred. Moreover, when the destructured object is a variable, one may use this variable in the return type. \begin{transbox} \TRANS{Cases n of\\~~ O => O \\| (S k) => (1) end}{match n with\\~~ 0 => 0 \\| S k => 1 end} \TRANS{Cases m n of \\~~0 0 => t \\| ... end}{match m, n with \\~~0, 0 => t \\| ... end} \TRANS{<[n:nat](P n)>Cases T of ... end}{match T as n return P n with ... end} \TRANS{<[n:nat][p:(even n)]\~{}(odd n)>Cases p of\\~~ ... \\end}{match p in even n return \~{} odd n with\\~~ ...\\end} \end{transbox} The annotations of the special pattern-matching operators (\TERM{if}/\TERM{then}/\TERM{else}) and \TERM{let()} also changed. The only restriction is that the destructuring \TERM{let} does not allow dependent case analysis. \begin{transbox} \TRANS{ \begin{tabular}{@{}l} <[n:nat;x:(I n)](P n x)>if t then t1 \\ else t2 \end{tabular}}% {\begin{tabular}{@{}l} if t as x in I n return P n x then t1 \\ else t2 \end{tabular}} \TRANS{<[n:nat](P n)>let (p,q) = t1 in t2}% {let (p,q) in I n return P n := t1 in t2} \end{transbox} \subsubsection{Fixpoints and cofixpoints} An simpler syntax for non-mutual fixpoints is provided, making it very close to the usual notation for non-recursive functions. The decreasing argument is now indicated by an annotation between curly braces, regardless of the binders grouping. The annotation can be omitted if the binders introduce only one variable. The type of the result can be omitted if inferable. \begin{transbox} \TRANS{Fix plus\{plus [n:nat] : nat -> nat :=\\~~ [m]...\}}{fix plus (n m:nat) \{struct n\}: nat := ...} \TRANS{Fix fact\{fact [n:nat]: nat :=\\ ~~Cases n of\\~~~~ O => (1) \\~~| (S k) => (mult n (fact k)) end\}}{fix fact (n:nat) :=\\ ~~match n with \\~~~~0 => 1 \\~~| (S k) => n * fact k end} \end{transbox} There is a syntactic sugar for single fixpoints (defining one variable) associated to a local definition: \begin{transbox} \TRANS{let f := Fix f \{f [x:A] : T := M\} in\\(g (f y))}{let fix f (x:A) : T := M in\\g (f x)} \end{transbox} The same applies to cofixpoints, annotations are not allowed in that case. \subsubsection{Notation for type cast} \begin{transbox} \TRANS{O :: nat}{0 : nat} \end{transbox} \subsection{Main changes in tactics w.r.t. V7} The main change is that all tactic names are lowercase. This also holds for Ltac keywords. \subsubsection{Renaming of induction tactics} \begin{transbox} \TRANS{NewDestruct}{destruct} \TRANS{NewInduction}{induction} \TRANS{Induction}{simple induction} \TRANS{Destruct}{simple destruct} \end{transbox} \subsubsection{Ltac} Definitions of macros are introduced by \TERM{Ltac} instead of \TERM{Tactic Definition}, \TERM{Meta Definition} or \TERM{Recursive Definition}. They are considered recursive by default. \begin{transbox} \TRANS{Meta Definition my_tac t1 t2 := t1; t2.}% {Ltac my_tac t1 t2 := t1; t2.} \end{transbox} Rules of a match command are not between square brackets anymore. Context (understand a term with a placeholder) instantiation \TERM{inst} became \TERM{context}. Syntax is unified with subterm matching. \begin{transbox} \TRANS{Match t With [C[x=y]] -> Inst C[y=x]}% {match t with context C[x=y] => context C[y=x] end} \end{transbox} Arguments of macros use the term syntax. If a general Ltac expression is to be passed, it must be prefixed with ``{\tt ltac :}''. In other cases, when a \'{} was necessary, it is replaced by ``{\tt constr :}'' \begin{transbox} \TRANS{my_tac '(S x)}{my_tac (S x)} \TRANS{my_tac (Let x=tac In x)}{my_tac ltac:(let x:=tac in x)} \TRANS{Let x = '[x](S (S x)) In Apply x}% {let x := constr:(fun x => S (S x)) in apply x} \end{transbox} {\tt Match Context With} is now called {\tt match goal with}. Its argument is an Ltac expression by default. \subsubsection{Named arguments of theorems ({\em bindings})} \begin{transbox} \TRANS{Apply thm with x:=t 1:=u}{apply thm with (x:=t) (1:=u)} \end{transbox} \subsubsection{Occurrences} To avoid ambiguity between a numeric literal and the optional occurrence numbers of this term, the occurrence numbers are put after the term itself and after keyword \TERM{as}. \begin{transbox} \TRANS{Pattern 1 2 (f x) 3 4 d y z}{pattern f x at 1 2, d at 3 4, y, z} \end{transbox} \subsubsection{{\tt LetTac} and {\tt Pose}} Tactic {\tt LetTac} was renamed into {\tt set}, and tactic {\tt Pose} was a particular case of {\tt LetTac} where the abbreviation is folded in the conclusion\footnote{There is a tactic called {\tt pose} in V8, but its behaviour is not to fold the abbreviation at all.}. \begin{transbox} \TRANS{LetTac x = t in H}{set (x := t) in H} \TRANS{Pose x := t}{set (x := t)} \end{transbox} {\tt LetTac} could be followed by a specification (called a clause) of the places where the abbreviation had to be folded (hypothese and/or conclusion). Clauses are the syntactic notion to denote in which parts of a goal a given transformation should occur. Its basic notation is either \TERM{*} (meaning everywhere), or {\tt\textrm{\em hyps} |- \textrm{\em concl}} where {\em hyps} is either \TERM{*} (to denote all the hypotheses), or a comma-separated list of either hypothesis name, or {\tt (value of $H$)} or {\tt (type of $H$)}. Moreover, occurrences can be specified after every hypothesis after the {\TERM{at}} keyword. {\em concl} is either empty or \TERM{*}, and can be followed by occurrences. \begin{transbox} \TRANS{in Goal}{in |- *} \TRANS{in H H1}{in H1, H2 |-} \TRANS{in H H1 ...}{in * |-} \TRANS{in H H1 Goal}{in H1, H2 |- *} \TRANS{in H H1 H2 ... Goal}{in *} \TRANS{in 1 2 H 3 4 H0 1 3 Goal}{in H at 1 2, H0 at 3 4 |- * at 1 3} \end{transbox} \subsection{Main changes in vernacular commands w.r.t. V7} \subsubsection{Require} The default behaviour of {\tt Require} is not to open the loaded module. \begin{transbox} \TRANS{Require Arith}{Require Import Arith} \end{transbox} \subsubsection{Binders} The binders of vernacular commands changed in the same way as those of fixpoints. This also holds for parameters of inductive definitions. \begin{transbox} \TRANS{Definition x [a:A] : T := M}{Definition x (a:A) : T := M} \TRANS{Inductive and [A,B:Prop]: Prop := \\~~conj : A->B->(and A B)}% {Inductive and (A B:Prop): Prop := \\~~conj : A -> B -> and A B} \end{transbox} \subsubsection{Hints} Both {\tt Hints} and {\tt Hint} commands are beginning with {\tt Hint}. Command {\tt HintDestruct} has disappeared. The syntax of \emph{Extern} hints changed: the pattern and the tactic to be applied are separated by a {\tt =>}. \begin{transbox} \TRANS{Hint name := Resolve (f ? x)}% {Hint Resolve (f _ x)} \TRANS{Hint name := Extern 4 (toto ?) Apply lemma}% {Hint Extern 4 (toto _) => apply lemma} \TRANS{Hints Resolve x y z}{Hint Resolve x y z} \TRANS{Hints Resolve f : db1 db2}{Hint Resolve f : db1 db2} \TRANS{Hints Immediate x y z}{Hint Immediate x y z} \TRANS{Hints Unfold x y z}{Hint Unfold x y z} %% \TRANS{\begin{tabular}{@{}l} %% HintDestruct Local Conclusion \\ %% ~~name (f ? ?) 3 [Apply thm] %% \end{tabular}}% %% {\begin{tabular}{@{}l} %% Hint Local Destuct name := \\ %% ~~3 Conclusion (f _ _) => apply thm %% \end{tabular}} \end{transbox} \subsubsection{Implicit arguments} {\tt Set Implicit Arguments} changed its meaning in V8: the default is to turn implicit only the arguments that are {\em strictly} implicit (or rigid), i.e. that remains inferable whatever the other arguments are. For instance {\tt x} inferable from {\tt P x} is not strictly inferable since it can disappears if {\tt P} is instantiated by a term which erases {\tt x}. \begin{transbox} \TRANS{Set Implicit Arguments}% {\begin{tabular}{l} Set Implicit Arguments. \\ Unset Strict Implicits. \end{tabular}} \end{transbox} However, you may wish to adopt the new semantics of {\tt Set Implicit Arguments} (for instance because you think that the choice of arguments it sets implicit is more ``natural'' for you). \subsection{Changes in standard library} Many lemmas had their named changed to improve uniformity. The user generally do not have to care since the translators performs the renaming. Type {\tt entier} from fast_integer.v is renamed into {\tt N} by the translator. As a consequence, user-defined objects of same name {\tt N} are systematically qualified even tough it may not be necessary. The following table lists the main names with which the same problem arises: \begin{transbox} \TRANS{IF}{IF_then_else} \TRANS{ZERO}{Z0} \TRANS{POS}{Zpos} \TRANS{NEG}{Zneg} \TRANS{SUPERIEUR}{Gt} \TRANS{EGAL}{Eq} \TRANS{INFERIEUR}{Lt} \TRANS{add}{Pplus} \TRANS{true_sub}{Pminus} \TRANS{entier}{N} \TRANS{Un_suivi_de}{Ndouble_plus_one} \TRANS{Zero_suivi_de}{Ndouble} \TRANS{Nul}{N0} \TRANS{Pos}{Npos} \end{transbox} \subsubsection{Implicit arguments} %% Hugo: Main definitions of standard library have now implicit arguments. These arguments are dropped in the translated files. This can exceptionally be a source of incompatibilities which has to be solved by hand (it typically happens for polymorphic functions applied to {\tt nil} or {\tt None}). %% preciser: avant ou apres trad ? \subsubsection{Logic about {\tt Type}} Many notations that applied to {\tt Set} have been extended to {\tt Type}, so several definitions in {\tt Type} are superseded by them. \begin{transbox} \TRANS{x==y}{x=y} \TRANS{(EXT x:Prop | Q)}{exists x:Prop, Q} \TRANS{identityT}{identity} \end{transbox} %% Doc of the translator \section{A guide to translation} \label{Translation} %%\subsection{Overview of the translation process} Here is a short description of the tools involved in the translation process: \begin{description} \item{\tt coqc -translate} is the automatic translator. It is a parser/pretty-printer. This means that the translation is made by parsing every command using a parser of old syntax, which is printed using the new syntax. Many efforts were made to preserve as much as possible of the quality of the presentation: it avoids expansion of syntax extensions, comments are not discarded and placed at the same place. \item{\tt translate-v8} (in the translation package) is a small shell-script that will help translate developments that compile with a Makefile with minimum requirements. \end{description} \subsection{Preparation to translation} This step is very important because most of work shall be done before translation. If a problem occurs during translation, it often means that you will have to modify the original source and restart the translation process. This also means that it is recommended not to edit the output of the translator since it would be overwritten if the translation has to be restarted. \subsubsection{Compilation with {\tt coqc -v7}} First of all, it is mandatory that files compile with the current version of Coq (8.0) with option {\tt -v7}. Translation is a complicated task that involves the full compilation of the development. If your development was compiled with older versions, first upgrade to Coq V8.0 with option {\tt -v7}. If you use a Makefile similar to those produced by {\tt coq\_makefile}, you probably just have to do {\tt make OPT="-opt -v7"} ~~~or~~~ {\tt make OPT="-byte -v7"} When the development compiles successfully, there are several changes that might be necessary for the translation. Essentially, this is about syntax extensions (see section below dedicated to porting syntax extensions). If you do not use such features, then you are ready to try and make the translation. \subsection{Translation} \subsubsection{The general case} The preferred way is to use script {\tt translate-v8} if your development is compiled by a Makefile with the following constraints: \begin{itemize} \item compilation is achieved by invoking make without specifying a target \item options are passed to Coq with make variable COQFLAGS that includes variables OPT, COQLIBS, and OTHERFLAGS. \end{itemize} These constraints are met by the makefiles produced by {\tt coq\_makefile} Otherwise, modify your build program so as to pass option {\tt -translate} to program {\tt coqc}. The effect of this option is to output the translated source of any {\tt .v} file in a file with extension {\tt .v8} located in the same directory than the original file. \subsubsection{What may happen during the translation} This section describes events that may happen during the translation and measures to adopt. These are the warnings that may arise during the translation, but they generally do not require any modification for the user: Warnings: \begin{itemize} \item {\tt Unable to detect if $id$ denotes a local definition}\\ This is due to a semantic change in clauses. In a command such as {\tt simpl in H}, the old semantics were to perform simplification in the type of {\tt H}, or in its body if it is defined. With the new semantics, it is performed both in the type and the body (if any). It might lead to incompatibilities \item {\tt Forgetting obsolete module}\\ Some modules have disappeared in V8.0 (new syntax). The user does not need to worry about it, since the translator deals with it. \item {\tt Replacing obsolete module}\\ Same as before but with the module that were renamed. Here again, the translator deals with it. \end{itemize} \subsection{Verification of the translation} The shell-script {\tt translate-v8} also renames {\tt .v8} files into {\tt .v} files (older {\tt .v} files are put in a subdirectory called {\tt v7}) and tries to recompile them. To do so it invokes {\tt make} without option (which should cause the compilation using {\tt coqc} without particular option). If compilation fails at this stage, you should refrain from repairing errors manually on the new syntax, but rather modify the old syntax script and restart the translation. We insist on that because the problem encountered can show up in many instances (especially if the problem comes from a syntactic extension), and fixing the original sources (for instance the {\tt V8only} parts of notations) once will solve all occurrences of the problem. %%\subsubsection{Errors occurring after translation} %%Equality in {\tt Z} or {\tt R}... \subsection{Particular cases} \subsubsection{Lexical conventions} The definition of identifiers changed. Most of those changes are handled by the translator. They include: \begin{itemize} \item {\tt \_} is not an identifier anymore: it is translated to {\tt x\_} \item avoid clash with new keywords by adding a trailing {\tt \_} \end{itemize} If the choices made by translation is not satisfactory or in the following cases: \begin{itemize} \item use of latin letters \item use of iso-latin characters in notations \end{itemize} the user should change his development prior to translation. \subsubsection{{\tt Case} and {\tt Match}} These very low-level case analysis are no longer supported. The translator tries hard to translate them into a user-friendly one, but it might lack type information to do so\footnote{The translator tries to typecheck terms before printing them, but it is not always possible to determine the context in which terms appearing in tactics live.}. If this happens, it is preferable to transform it manually before translation. \subsubsection{Syntax extensions with {\tt Grammar} and {\tt Syntax}} {\tt Grammar} and {\tt Syntax} are no longer supported. They should be replaced by an equivalent {\tt Notation} command and be processed as described above. Before attempting translation, users should verify that compilation with option {\tt -v7} succeeds. In the cases where {\tt Grammar} and {\tt Syntax} cannot be emulated by {\tt Notation}, users have to change manually they development as they wish to avoid the use of {\tt Grammar}. If this is not done, the translator will simply expand the notations and the output of the translator will use the regular Coq syntax. \subsubsection{Syntax extensions with {\tt Notation} and {\tt Infix}} These commands do not necessarily need to be changed. Some work will have to be done manually if the notation conflicts with the new syntax (for instance, using keywords like {\tt fun} or {\tt exists}, overloading of symbols of the old syntax, etc.) or if the precedences are not right. Precedence levels are now from 0 to 200. In V8, the precedence and associativity of an operator cannot be redefined. Typical level are (refer to the chapter on notations in the Reference Manual for the full list): \begin{center} \begin{tabular}{|cll|} \hline Notation & Precedence & Associativity \\ \hline \verb!_ <-> _! & 95 & no \\ \verb!_ \/ _! & 85 & right \\ \verb!_ /\ _! & 80 & right \\ \verb!~ _! & 75 & right \\ \verb!_ = _!, \verb!_ <> _!, \verb!_ < _!, \verb!_ > _!, \verb!_ <= _!, \verb!_ >= _! & 70 & no \\ \verb!_ + _!, \verb!_ - _! & 50 & left \\ \verb!_ * _!, \verb!_ / _! & 40 & left \\ \verb!- _! & 35 & right \\ \verb!_ ^ _! & 30 & left \\ \hline \end{tabular} \end{center} By default, the translator keeps the associativity given in V7 while the levels are mapped according to the following table: \begin{center} \begin{tabular}{l|l|l} V7 level & mapped to & associativity \\ \hline 0 & 0 & no \\ 1 & 20 & left \\ 2 & 30 & right \\ 3 & 40 & left \\ 4 & 50 & left \\ 5 & 70 & no \\ 6 & 80 & right \\ 7 & 85 & right \\ 8 & 90 & right \\ 9 & 95 & no \\ 10 & 100 & left \end{tabular} \end{center} If this is OK, just simply apply the translator. \paragraph{Associativity conflict} Since the associativity of the levels obtained by translating a V7 level (as shown on table above) cannot be changed, you have to choose another level with a compatible associativity. You can choose any level between 0 and 200, knowing that the standard operators are already set at the levels shown on the list above. Assume you have a notation \begin{verbatim} Infix NONA 2 "=_S" my_setoid_eq. \end{verbatim} By default, the translator moves it to level 30 which is right associative, hence a conflict with the expected no associativity. To solve the problem, just add the "V8only" modifier to reset the level and enforce the associativity as follows: \begin{verbatim} Infix NONA 2 "=_S" my_setoid_eq V8only (at level 70, no associativity). \end{verbatim} The translator now knows that it has to translate "=_S" at level 70 with no associativity. Remark: 70 is the "natural" level for relations, hence the choice of 70 here, but any other level accepting a no-associativity would have been OK. Second example: assume you have a notation \begin{verbatim} Infix RIGHTA 1 "o" my_comp. \end{verbatim} By default, the translator moves it to level 20 which is left associative, hence a conflict with the expected right associativity. To solve the problem, just add the "V8only" modifier to reset the level and enforce the associativity as follows: \begin{verbatim} Infix RIGHTA 1 "o" my_comp V8only (at level 20, right associativity). \end{verbatim} The translator now knows that it has to translate "o" at level 20 which has the correct "right associativity". Remark: we assumed here that the user wants a strong precedence for composition, in such a way, say, that "f o g + h" is parsed as "(f o g) + h". To get "o" binding less than the arithmetical operators, an appropriated level would have been close of 70, and below, e.g. 65. \paragraph{Conflict: notation hides another notation} Remark: use {\tt Print Grammar constr} in V8 to diagnose the overlap and see the section on factorization in the chapter on notations of the Reference Manual for hints on how to factorize. Example: \begin{verbatim} Notation "{ x }" := (my_embedding x) (at level 1). \end{verbatim} overlaps in V8 with notation \verb#{ x : A & P }# at level 0 and with x at level 99. The conflicts can be solved by left-factorizing the notation as follows: \begin{verbatim} Notation "{ x }" := (my_embedding x) (at level 1) V8only (at level 0, x at level 99). \end{verbatim} \paragraph{Conflict: a notation conflicts with the V8 grammar} Again, use the {\tt V8only} modifier to tell the translator to automatically take in charge the new syntax. Example: \begin{verbatim} Infix 3 "@" app. \end{verbatim} Since {\tt @} is used in the new syntax for deactivating the implicit arguments, another symbol has to be used, e.g. {\tt @@}. This is done via the {\tt V8only} option as follows: \begin{verbatim} Infix 3 "@" app V8only "@@" (at level 40, left associativity). \end{verbatim} or, alternatively by \begin{verbatim} Notation "x @ y" := (app x y) (at level 3, left associativity) V8only "x @@ y" (at level 40, left associativity). \end{verbatim} \paragraph{Conflict: my notation is already defined at another level (or with another associativity)} In V8, the level and associativity of a given notation can no longer be changed. Then, either you adopt the standard reserved levels and associativity for this notation (as given on the list above) or you change your notation. \begin{itemize} \item To change the notation, follow the directions in the previous paragraph \item To adopt the standard level, just use {\tt V8only} without any argument. \end{itemize} Example: \begin{verbatim} Infix 6 "*" my_mult. \end{verbatim} is not accepted as such in V8. Write \begin{verbatim} Infix 6 "*" my_mult V8only. \end{verbatim} to tell the translator to use {\tt *} at the reserved level (i.e. 40 with left associativity). Even better, use interpretation scopes (look at the Reference Manual). \subsubsection{Strict implicit arguments} In the case you want to adopt the new semantics of {\tt Set Implicit Arguments} (only setting rigid arguments as implicit), add the option {\tt -strict-implicit} to the translator. Warning: changing the number of implicit arguments can break the notations. Then use the {\tt V8only} modifier of {\tt Notation}. \end{document} coq-8.15.0/dev/doc/archive/extensions.txt000066400000000000000000000013221417001151100202600ustar00rootroot00000000000000Comment ajouter une nouvelle entrée primitive pour les TACTIC EXTEND ? ====================================================================== Exemple de l'ajout de l'entrée "clause": - ajouter un type ClauseArgType dans interp/genarg.ml{,i}, avec les wit_, rawwit_, et globwit_ correspondants - ajouter partout où Genarg.argument_type est filtré le cas traitant de ce nouveau ClauseArgType - utiliser le rawwit_clause pour définir une entrée clause du bon type et du bon nom dans le module Tactic de pcoq.ml4 - il faut aussi exporter la règle hors de g_tactic.ml4. Pour cela, il faut rejouter clause dans le GLOBAL du GEXTEND - seulement après, le nom clause sera accessible dans les TACTIC EXTEND ! coq-8.15.0/dev/doc/archive/naming-conventions.tex000066400000000000000000000544271417001151100216740ustar00rootroot00000000000000\documentclass[a4paper]{article} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage[T1]{fontenc} \usepackage{amsfonts} \parindent=0pt \parskip=10pt %%%%%%%%%%%%% % Macros \newcommand\itemrule[3]{ \subsubsection{#1} \begin{quote} \begin{tt} #3 \end{tt} \end{quote} \begin{quote} Name: \texttt{#2} \end{quote}} \newcommand\formula[1]{\begin{tt}#1\end{tt}} \newcommand\tactic[1]{\begin{tt}#1\end{tt}} \newcommand\command[1]{\begin{tt}#1\end{tt}} \newcommand\term[1]{\begin{tt}#1\end{tt}} \newcommand\library[1]{\texttt{#1}} \newcommand\name[1]{\texttt{#1}} \newcommand\zero{\texttt{zero}} \newcommand\op{\texttt{op}} \newcommand\opPrime{\texttt{op'}} \newcommand\opSecond{\texttt{op''}} \newcommand\phimapping{\texttt{phi}} \newcommand\D{\texttt{D}} \newcommand\elt{\texttt{elt}} \newcommand\rel{\texttt{rel}} \newcommand\relp{\texttt{rel'}} %%%%%%%%%%%%% \begin{document} \begin{center} \begin{huge} Proposed naming conventions for the Coq standard library \end{huge} \end{center} \bigskip The following document describes a proposition of canonical naming schemes for the Coq standard library. Obviously and unfortunately, the current state of the library is not as homogeneous as it would be if it would systematically follow such a scheme. To tend in this direction, we however recommend to follow the following suggestions. \tableofcontents \section{General conventions} \subsection{Variable names} \begin{itemize} \item Variables are preferably quantified at the head of the statement, even if some premisses do not depend of one of them. For instance, one would state \begin{quote} \begin{tt} {forall x y z:D, x <= y -> x+z <= y+z} \end{tt} \end{quote} and not \begin{quote} \begin{tt} {forall x y:D, x <= y -> forall z:D, x+z <= y+z} \end{tt} \end{quote} \item Variables are preferably quantified (and named) in the order of ``importance'', then of appearance, from left to right, even if for the purpose of some tactics it would have been more convenient to have, say, the variables not occurring in the conclusion first. For instance, one would state \begin{quote} \begin{tt} {forall x y z:D, x+z <= y+z -> x <= y} \end{tt} \end{quote} and not \begin{quote} \begin{tt} {forall z x y:D, x+z <= y+z -> x <= y} \end{tt} \end{quote} nor \begin{quote} \begin{tt} {forall x y z:D, y+x <= z+x -> y <= z} \end{tt} \end{quote} \item Choice of effective names is domain-dependent. For instance, on natural numbers, the convention is to use the variables $n$, $m$, $p$, $q$, $r$, $s$ in this order. On generic domains, the convention is to use the letters $x$, $y$, $z$, $t$. When more than three variables are needed, indexing variables It is conventional to use specific names for variables having a special meaning. For instance, $eps$ or $\epsilon$ can be used to denote a number intended to be as small as possible. Also, $q$ and $r$ can be used to denote a quotient and a rest. This is good practice. \end{itemize} \subsection{Disjunctive statements} A disjunctive statement with a computational content will be suffixed by \name{\_inf}. For instance, if \begin{quote} \begin{tt} {forall x y, op x y = zero -> x = zero \/ y = zero} \end{tt} \end{quote} has name \texttt{D\_integral}, then \begin{quote} \begin{tt} {forall x y, op x y = zero -> \{x = zero\} + \{y = zero\}} \end{tt} \end{quote} will have name \texttt{D\_integral\_inf}. As an exception, decidability statements, such as \begin{quote} \begin{tt} {forall x y, \{x = y\} + \{x <> y\}} \end{tt} \end{quote} will have a named ended in \texttt{\_dec}. Idem for cotransitivity lemmas which are inherently computational that are ended in \texttt{\_cotrans}. \subsection{Inductive types constructor names} As a general rule, constructor names start with the name of the inductive type being defined as in \texttt{Inductive Z := Z0 : Z | Zpos : Z -> Z | Zneg : Z -> Z} to the exception of very standard types like \texttt{bool}, \texttt{nat}, \texttt{list}... For inductive predicates, constructor names also start with the name of the notion being defined with one or more suffixes separated with \texttt{\_} for discriminating the different cases as e.g. in \begin{verbatim} Inductive even : nat -> Prop := | even_O : even 0 | even_S n : odd n -> even (S n) with odd : nat -> Prop := | odd_S n : even n -> odd (S n). \end{verbatim} As a general rule, inductive predicate names should be lowercase (to the exception of notions referring to a proper name, e.g. \texttt{Bezout}) and multiple words must be separated by ``{\_}''. As an exception, when extending libraries whose general rule is that predicates names start with a capital letter, the convention of this library should be kept and the separation between multiple words is done by making the initial of each work a capital letter (if one of these words is a proper name, then a ``{\_}'' is added to emphasize that the capital letter is proper and not an application of the rule for marking the change of word). Inductive predicates that characterize the specification of a function should be named after the function it specifies followed by \texttt{\_spec} as in: \begin{verbatim} Inductive nth_spec : list A -> nat -> A -> Prop := | nth_spec_O a l : nth_spec (a :: l) 0 a | nth_spec_S n a b l : nth_spec l n a -> nth_spec (b :: l) (S n) a. \end{verbatim} \section{Equational properties of operations} \subsection{General conventions} If the conclusion is in the other way than listed below, add suffix \name{\_reverse} to the lemma name. \subsection{Specific conventions} \itemrule{Associativity of binary operator {\op} on domain {\D}}{Dop\_assoc} {forall x y z:D, op x (op y z) = op (op x y) z} Remark: Symmetric form: \name{Dop\_assoc\_reverse}: \formula{forall x y z:D, op (op x y) z = op x (op y z)} \itemrule{Commutativity of binary operator {\op} on domain {\D}}{Dop\_comm} {forall x y:D, op x y = op y x} Remark: Avoid \formula{forall x y:D, op y x = op x y}, or at worst, call it \name{Dop\_comm\_reverse} \itemrule{Left neutrality of element elt for binary operator {\op}}{Dop\_elt\_l} {forall x:D, op elt x = x} Remark: In English, ``{\elt} is an identity for {\op}'' seems to be a more common terminology. \itemrule{Right neutrality of element elt for binary operator {\op}}{Dop\_elt\_r} {forall x:D, op x elt = x} Remark: By convention, if the identities are reminiscent to zero or one, they are written 1 and 0 in the name of the property. \itemrule{Left absorption of element elt for binary operator {\op}}{Dop\_elt\_l} {forall x:D, op elt x = elt} Remarks: \begin{itemize} \item In French school, this property is named "elt est absorbant pour op" \item English, the property seems generally named "elt is a zero of op" \item In the context of lattices, this a boundedness property, it may be called "elt is a bound on D", or referring to a (possibly arbitrarily oriented) order "elt is a least element of D" or "elt is a greatest element of D" \end{itemize} \itemrule{Right absorption of element {\elt} for binary operator {\op}}{Dop\_elt\_l [BAD ??]} {forall x:D, op x elt = elt} \itemrule{Left distributivity of binary operator {\op} over {\opPrime} on domain {\D}}{Dop\_op'\_distr\_l} {forall x y z:D, op (op' x y) z = op' (op x z) (op y z)} Remark: Some authors say ``distribution''. \itemrule{Right distributivity of binary operator {\op} over {\opPrime} on domain {\D}}{Dop\_op'\_distr\_r} {forall x y z:D, op z (op' x y) = op' (op z x) (op z y)} Remark: Note the order of arguments. \itemrule{Distributivity of unary operator {\op} over binary op' on domain {\D}}{Dop\_op'\_distr} {forall x y:D, op (op' x y) = op' (op x) (op y)} \itemrule{Distributivity of unary operator {\op} over binary op' on domain {\D}}{Dop\_op'\_distr} {forall x y:D, op (op' x y) = op' (op x) (op y)} Remark: For a non commutative operation with inversion of arguments, as in \formula{forall x y z:D, op (op' x y) = op' (op y) (op y z)}, we may probably still call the property distributivity since there is no ambiguity. Example: \formula{forall n m : Z, -(n+m) = (-n)+(-m)}. Example: \formula{forall l l' : list A, rev (l++l') = (rev l)++(rev l')}. \itemrule{Left extrusion of unary operator {\op} over binary op' on domain {\D}}{Dop\_op'\_distr\_l} {forall x y:D, op (op' x y) = op' (op x) y} Question: Call it left commutativity ?? left swap ? \itemrule{Right extrusion of unary operator {\op} over binary op' on domain {\D}}{Dop\_op'\_distr\_r} {forall x y:D, op (op' x y) = op' x (op y)} \itemrule{Idempotency of binary operator {\op} on domain {\D}}{Dop\_idempotent} {forall x:D, op x x = x} \itemrule{Idempotency of unary operator {\op} on domain {\D}}{Dop\_idempotent} {forall x:D, op (op x) = op x} Remark: This is actually idempotency of {\op} wrt to composition and identity. \itemrule{Idempotency of element elt for binary operator {\op} on domain {\D}}{Dop\_elt\_idempotent} {op elt elt = elt} Remark: Generally useless in CIC for concrete, computable operators Remark: The general definition is ``exists n, iter n op x = x''. \itemrule{Nilpotency of element elt wrt a ring D with additive neutral element {\zero} and multiplicative binary operator {\op}}{Delt\_nilpotent} {op elt elt = zero} Remark: We leave the ring structure of D implicit; the general definition is ``exists n, iter n op elt = zero''. \itemrule{Zero-product property in a ring D with additive neutral element {\zero} and multiplicative binary operator {\op}}{D\_integral} {forall x y, op x y = zero -> x = zero \/ y = zero} Remark: We leave the ring structure of D implicit; the Coq library uses either \texttt{\_is\_O} (for \texttt{nat}), \texttt{\_integral} (for \texttt{Z}, \texttt{Q} and \texttt{R}), \texttt{eq\_mul\_0} (for \texttt{NZ}). Remark: The French school says ``integrité''. \itemrule{Nilpotency of binary operator {\op} wrt to its absorbing element zero in D}{Dop\_nilpotent} {forall x, op x x = zero} Remark: Did not find this definition on the web, but it used in the Coq library (to characterize \name{xor}). \itemrule{Involutivity of unary op on D}{Dop\_involutive} {forall x:D, op (op x) = x} \itemrule{Absorption law on the left for binary operator {\op} over binary operator {\op}' on the left}{Dop\_op'\_absorption\_l\_l} {forall x y:D, op x (op' x y) = x} \itemrule{Absorption law on the left for binary operator {\op} over binary operator {\op}' on the right}{Dop\_op'\_absorption\_l\_r} {forall x y:D, op x (op' y x) = x} Remark: Similarly for \name{Dop\_op'\_absorption\_r\_l} and \name{Dop\_op'\_absorption\_r\_r}. \itemrule{De Morgan law's for binary operators {\opPrime} and {\opSecond} wrt to unary op on domain {\D}}{Dop'\_op''\_de\_morgan, Dop''\_op'\_de\_morgan ?? \mbox{leaving the complementing operation implicit})} {forall x y:D, op (op' x y) = op'' (op x) (op y)\\ forall x y:D, op (op'' x y) = op' (op x) (op y)} \itemrule{Left complementation of binary operator {\op} by means of unary {\opPrime} wrt neutral element {\elt} of {\op} on domain {\D}}{Dop\_op'\_opp\_l} {forall x:D, op (op' x) x = elt} Remark: If the name of the opposite function is reminiscent of the notion of complement (e.g. if it is called \texttt{opp}), one can simply say {Dop\_opp\_l}. \itemrule{Right complementation of binary operator {\op} by means of unary {\op'} wrt neutral element {\elt} of {\op} on domain {\D}}{Dop\_opp\_r} {forall x:D, op x (op' x) = elt} Example: \formula{Radd\_opp\_l: forall r : R, - r + r = 0} \itemrule{Associativity of binary operators {\op} and {\op'}}{Dop\_op'\_assoc} {forall x y z, op x (op' y z) = op (op' x y) z} Example: \formula{forall x y z, x + (y - z) = (x + y) - z} \itemrule{Right extrusion of binary operator {\opPrime} over binary operator {\op}}{Dop\_op'\_extrusion\_r} {forall x y z, op x (op' y z) = op' (op x y) z} Remark: This requires {\op} and {\opPrime} to have their right and left argument respectively and their return types identical. Example: \formula{forall x y z, x + (y - z) = (x + y) - z} Remark: Other less natural combinations are possible, such as \formula{forall x y z, op x (op' y z) = op' y (op x z)}. \itemrule{Left extrusion of binary operator {\opPrime} over binary operator {\op}}{Dop\_op'\_extrusion\_l} {forall x y z, op (op' x y) z = op' x (op y z)} Remark: Operations are not necessarily internal composition laws. It is only required that {\op} and {\opPrime} have their right and left argument respectively and their return type identical. Remark: When the type are heterogeneous, only one extrusion law is possible and it can simply be named {Dop\_op'\_extrusion}. Example: \formula{app\_cons\_extrusion : forall a l l', (a :: l) ++ l' = a :: (l ++ l')}. %====================================================================== %\section{Properties of elements} %Remark: Not used in current library %====================================================================== \section{Preservation and compatibility properties of operations} \subsection{With respect to equality} \itemrule{Injectivity of unary operator {\op}}{Dop\_inj} {forall x y:D, op x = op y -> x = y} \itemrule{Left regularity of binary operator {\op}}{Dop\_reg\_l, Dop\_inj\_l, or Dop\_cancel\_l} {forall x y z:D, op z x = op z y -> x = y} Remark: Note the order of arguments. Remark: The Coq usage is to called it regularity but the English standard seems to be cancellation. The recommended form is not decided yet. Remark: Shall a property like $n^p \leq n^q \rightarrow p \leq q$ (for $n\geq 1$) be called cancellation or should it be reserved for operators that have an inverse? \itemrule{Right regularity of binary operator {\op}}{Dop\_reg\_r, Dop\_inj\_r, Dop\_cancel\_r} {forall x y z:D, op x z = op y z -> x = y} \subsection{With respect to a relation {\rel}} \itemrule{Compatibility of unary operator {\op}}{Dop\_rel\_compat} {forall x y:D, rel x y -> rel (op x) (op y)} \itemrule{Left compatibility of binary operator {\op}}{Dop\_rel\_compat\_l} {forall x y z:D, rel x y -> rel (op z x) (op z y)} \itemrule{Right compatibility of binary operator {\op}}{Dop\_rel\_compat\_r} {forall x y z:D, rel x y -> rel (op x z) (op y z)} Remark: For equality, use names of the form \name{Dop\_eq\_compat\_l} or \name{Dop\_eq\_compat\_r} (\formula{forall x y z:D, y = x -> op y z = op x z} and \formula{forall x y z:D, y = x -> op y z = op x z}) Remark: Should we admit (or even prefer) the name \name{Dop\_rel\_monotone}, \name{Dop\_rel\_monotone\_l}, \name{Dop\_rel\_monotone\_r} when {\rel} is an order ? \itemrule{Left regularity of binary operator {\op}}{Dop\_rel\_reg\_l} {forall x y z:D, rel (op z x) (op z y) -> rel x y} \itemrule{Right regularity of binary operator {\op}}{Dop\_rel\_reg\_r} {forall x y z:D, rel (op x z) (op y z) -> rel x y} Question: Would it be better to have \name{z} as first argument, since it is missing in the conclusion ?? (or admit we shall use the options ``\texttt{with p}''?) \itemrule{Left distributivity of binary operator {\op} over {\opPrime} along relation {\rel} on domain {\D}}{Dop\_op'\_rel\_distr\_l} {forall x y z:D, rel (op (op' x y) z) (op' (op x z) (op y z))} Example: standard property of (not necessarily distributive) lattices Remark: In a (non distributive) lattice, by swapping join and meet, one would like also, \formula{forall x y z:D, rel (op' (op x z) (op y z)) (op (op' x y) z)}. How to name it with a symmetric name (use \name{Dop\_op'\_rel\_distr\_mon\_l} and \name{Dop\_op'\_rel\_distr\_anti\_l})? \itemrule{Commutativity of binary operator {\op} along (equivalence) relation {\rel} on domain {\D}}{Dop\_op'\_rel\_comm} {forall x y z:D, rel (op x y) (op y x)} Example: \formula{forall l l':list A, Permutation (l++l') (l'++l)} \itemrule{Irreducibility of binary operator {\op} on domain {\D}}{Dop\_irreducible} {forall x y z:D, z = op x y -> z = x $\backslash/$ z = y} Question: What about the constructive version ? Call it \name{Dop\_irreducible\_inf} ? \formula{forall x y z:D, z = op x y -> \{z = x\} + \{z = y\}} \itemrule{Primality of binary operator {\op} along relation {\rel} on domain {\D}}{Dop\_rel\_prime} {forall x y z:D, rel z (op x y) -> rel z x $\backslash/$ rel z y} %====================================================================== \section{Morphisms} \itemrule{Morphism between structures {\D} and {\D'}}{\name{D'\_of\_D}}{D -> D'} Remark: If the domains are one-letter long, one can used \texttt{IDD'} as for \name{INR} or \name{INZ}. \itemrule{Morphism {\phimapping} mapping unary operators {\op} to {\op'}}{phi\_op\_op', phi\_op\_op'\_morphism} {forall x:D, phi (op x) = op' (phi x)} Remark: If the operators have the same name in both domains, one use \texttt{D'\_of\_D\_op} or \texttt{IDD'\_op}. Example: \formula{Z\_of\_nat\_mult: forall n m : nat, Z\_of\_nat (n * m) = (Z\_of\_nat n * Z\_of\_nat m)\%Z}. Remark: If the operators have different names on distinct domains, one can use \texttt{op\_op'}. \itemrule{Morphism {\phimapping} mapping binary operators {\op} to {\op'}}{phi\_op\_op', phi\_op\_op'\_morphism} {forall x y:D, phi (op x y) = op' (phi x) (phi y)} Remark: If the operators have the same name in both domains, one use \texttt{D'\_of\_D\_op} or \texttt{IDD'\_op}. Remark: If the operators have different names on distinct domains, one can use \texttt{op\_op'}. \itemrule{Morphism {\phimapping} mapping binary operator {\op} to binary relation {\rel}}{phi\_op\_rel, phi\_op\_rel\_morphism} {forall x y:D, phi (op x y) <-> rel (phi x) (phi y)} Remark: If the operator and the relation have similar name, one uses \texttt{phi\_op}. Question: How to name each direction? (add \_elim for -> and \_intro for <- ?? -- as done in Bool.v ??) Example: \formula{eq\_true\_neg: \~{} eq\_true b <-> eq\_true (negb b)}. %====================================================================== \section{Preservation and compatibility properties of operations wrt order} \itemrule{Compatibility of binary operator {\op} wrt (strict order) {\rel} and (large order) {\rel'}}{Dop\_rel\_rel'\_compat} {forall x y z t:D, rel x y -> rel' z t -> rel (op x z) (op y t)} \itemrule{Compatibility of binary operator {\op} wrt (large order) {\relp} and (strict order) {\rel}}{Dop\_rel'\_rel\_compat} {forall x y z t:D, rel' x y -> rel z t -> rel (op x z) (op y t)} %====================================================================== \section{Properties of relations} \itemrule{Reflexivity of relation {\rel} on domain {\D}}{Drel\_refl} {forall x:D, rel x x} \itemrule{Symmetry of relation {\rel} on domain {\D}}{Drel\_sym} {forall x y:D, rel x y -> rel y x} \itemrule{Transitivity of relation {\rel} on domain {\D}}{Drel\_trans} {forall x y z:D, rel x y -> rel y z -> rel x z} \itemrule{Antisymmetry of relation {\rel} on domain {\D}}{Drel\_antisym} {forall x y:D, rel x y -> rel y x -> x = y} \itemrule{Irreflexivity of relation {\rel} on domain {\D}}{Drel\_irrefl} {forall x:D, \~{} rel x x} \itemrule{Asymmetry of relation {\rel} on domain {\D}}{Drel\_asym} {forall x y:D, rel x y -> \~{} rel y x} \itemrule{Cotransitivity of relation {\rel} on domain {\D}}{Drel\_cotrans} {forall x y z:D, rel x y -> \{rel z y\} + \{rel x z\}} \itemrule{Linearity of relation {\rel} on domain {\D}}{Drel\_trichotomy} {forall x y:D, \{rel x y\} + \{x = y\} + \{rel y x\}} Questions: Or call it \name{Drel\_total}, or \name{Drel\_linear}, or \name{Drel\_connected}? Use $\backslash/$ ? or use a ternary sumbool, or a ternary disjunction, for nicer elimination. \itemrule{Informative decidability of relation {\rel} on domain {\D}}{Drel\_dec (or Drel\_dect, Drel\_dec\_inf ?)} {forall x y:D, \{rel x y\} + \{\~{} rel x y\}} Remark: If equality: \name{D\_eq\_dec} or \name{D\_dec} (not like \name{eq\_nat\_dec}) \itemrule{Non informative decidability of relation {\rel} on domain {\D}}{Drel\_dec\_prop (or Drel\_dec)} {forall x y:D, rel x y $\backslash/$ \~{} rel x y} \itemrule{Inclusion of relation {\rel} in relation {\rel}' on domain {\D}}{Drel\_rel'\_incl (or Drel\_incl\_rel')} {forall x y:D, rel x y -> rel' x y} Remark: Use \name{Drel\_rel'\_weak} for a strict inclusion ?? %====================================================================== \section{Relations between properties} \itemrule{Equivalence of properties \texttt{P} and \texttt{Q}}{P\_Q\_iff} {forall x1 .. xn, P <-> Q} Remark: Alternatively use \name{P\_iff\_Q} if it is too difficult to recover what pertains to \texttt{P} and what pertains to \texttt{Q} in their concatenation (as e.g. in \texttt{Godel\_Dummett\_iff\_right\_distr\_implication\_over\_disjunction}). %====================================================================== \section{Arithmetical conventions} \begin{minipage}{6in} \renewcommand{\thefootnote}{\thempfootnote} % For footnotes... \begin{tabular}{lll} Zero on domain {\D} & D0 & (notation \verb=0=)\\ One on domain {\D} & D1 (if explicitly defined) & (notation \verb=1=)\\ Successor on domain {\D} & Dsucc\\ Predecessor on domain {\D} & Dpred\\ Addition on domain {\D} & Dadd/Dplus\footnote{Coq historically uses \texttt{plus} and \texttt{mult} for addition and multiplication which are inconsistent notations, the recommendation is to use \texttt{add} and \texttt{mul} except in existing libraries that already use \texttt{plus} and \texttt{mult}} & (infix notation \verb=+= [50,L])\\ Multiplication on domain {\D} & Dmul/Dmult\footnotemark[\value{footnote}] & (infix notation \verb=*= [40,L]))\\ Subtraction on domain {\D} & Dminus & (infix notation \verb=-= [50,L])\\ Opposite on domain {\D} & Dopp (if any) & (prefix notation \verb=-= [35,R]))\\ Inverse on domain {\D} & Dinv (if any) & (prefix notation \verb=/= [35,R]))\\ Power on domain {\D} & Dpower & (infix notation \verb=^= [30,R])\\ Minimal element on domain {\D} & Dmin\\ Maximal element on domain {\D} & Dmax\\ Large less than order on {\D} & Dle & (infix notations \verb!<=! and \verb!>=! [70,N]))\\ Strict less than order on {\D} & Dlt & (infix notations \verb=<= and \verb=>= [70,N]))\\ \end{tabular} \bigskip \end{minipage} \bigskip The status of \verb!>=! and \verb!>! is undecided yet. It will eithet be accepted only as parsing notations or may also accepted as a {\em definition} for the \verb!<=! and \verb! ... \end{verbatim} ce qui introduit un constructeur moralement équivalent à une application situé à une priorité totalement différente (les ``bindings'' seraient au plus haut niveau alors que l'application est à un niveau bas). \begin{figure} \begin{rulebox} \DEFNT{binding-term} \NT{constr} ~\TERM{with} ~\STAR{\NT{binding}} \SEPDEF \DEFNT{binding} \NT{constr} \end{rulebox} \caption{Grammaire des bindings} \label{bindings} \end{figure} \subsection{Enregistrements} Il faudrait aménager la syntaxe des enregistrements dans l'optique d'avoir des enregistrements anonymes (termes de première classe), même si pour l'instant, on ne dispose que d'enregistrements définis a toplevel. Exemple de syntaxe pour les types d'enregistrements: \begin{verbatim} { x1 : A1; x2 : A2(x1); _ : T; (* Pas de projection disponible *) y; (* Type infere *) ... (* ; optionnel pour le dernier champ *) } \end{verbatim} Exemple de syntaxe pour le constructeur: \begin{verbatim} { x1 = O; x2 : A2(x1) = v1; _ = v2; ... } \end{verbatim} Quant aux dépendences, une convention pourrait être de considérer les champs non annotés par le type comme non dépendants. Plusieurs interrogations: \begin{itemize} \item l'ordre des champs doit-il être respecté ? sinon, que faire pour les champs sans projection ? \item autorise-t-on \texttt{v1} a mentionner \texttt{x1} (comme dans la définition d'un module), ce qui se comporterait comme si on avait écrit \texttt{v1} à la place. Cela pourrait être une autre manière de déclarer les dépendences \end{itemize} La notation pointée pour les projections pose un problème de parsing, sauf si l'on a une convention lexicale qui discrimine les noms de modules des projections et identificateurs: \texttt{x.y.z} peut être compris comme \texttt{(x.y).z} ou texttt{x.(y.z)}. \section{Grammaire des termes} \label{constrsyntax} \subsection{Quelques principes} \begin{enumerate} \item Diminuer le nombre de niveaux de priorité en regroupant les règles qui se ressemblent: infixes, préfixes, lieurs (constructions ouvertes à droite), etc. \item Éviter de surcharger la signification d'un symbole (ex: \verb+( )+ comme parenthésage et produit dans la V7). \item Faire en sorte que les membres gauches (motifs de Cases, lieurs d'abstraction ou de produits) utilisent une syntaxe compatible avec celle des membres droits (branches de Cases et corps de fonction). \end{enumerate} \subsection{Présentation de la grammaire} \begin{figure} \begin{rulebox} \DEFNT{paren-constr} \NT{cast-constr}~\TERM{,}~\NT{paren-constr} &\RNAME{pair} \nlsep \NT{cast-constr} \SEPDEF \DEFNT{cast-constr} \NT{constr}~\TERM{\!\!:}~\NT{cast-constr} &\RNAME{cast} \nlsep \NT{constr} \SEPDEF \DEFNT{constr} \NT{appl-constr}~\NT{infix}~\NT{constr} &\RNAME{infix} \nlsep \NT{prefix}~\NT{constr} &\RNAME{prefix} \nlsep \NT{constr}~\NT{postfix} &\RNAME{postfix} \nlsep \NT{appl-constr} \SEPDEF \DEFNT{appl-constr} \NT{appl-constr}~\PLUS{\NT{appl-arg}} &\RNAME{apply} \nlsep \TERM{@}~\NT{global}~\PLUS{\NT{simple-constr}} &\RNAME{expl-apply} \nlsep \NT{simple-constr} \SEPDEF \DEFNT{appl-arg} \TERM{@}~\NT{int}~\TERM{\!:=}~\NT{simple-constr} &\RNAME{impl-arg} \nlsep \NT{simple-constr} \SEPDEF \DEFNT{simple-constr} \NT{atomic-constr} \nlsep \TERM{(}~\NT{paren-constr}~\TERM{)} \nlsep \NT{match-constr} \nlsep \NT{fix-constr} %% \nlsep \TERM{<\!\!:ast\!\!:<}~\NT{ast}~\TERM{>\!>} &\RNAME{quotation} \end{rulebox} \caption{Grammaire des termes} \label{constr} \end{figure} \begin{figure} \begin{rulebox} \DEFNT{prefix} \TERM{!}~\PLUS{\NT{binder}}~\TERM{.}~ &\RNAME{prod} \nlsep \TERM{fun} ~\PLUS{\NT{binder}} ~\TERM{$\Rightarrow$} &\RNAME{lambda} \nlsep \TERM{let}~\NT{ident}~\STAR{\NT{binder}} ~\TERM{=}~\NT{constr} ~\TERM{in} &\RNAME{let} %\nlsep \TERM{let (}~\NT{comma-ident-list}~\TERM{) =}~\NT{constr} % ~\TERM{in} &~~~\RNAME{let-case} \nlsep \TERM{if}~\NT{constr}~\TERM{then}~\NT{constr}~\TERM{else} &\RNAME{if-case} \nlsep \TERM{eval}~\NT{red-fun}~\TERM{in} &\RNAME{eval} \SEPDEF \DEFNT{infix} \TERM{$\rightarrow$} & \RNAME{impl} \SEPDEF \DEFNT{atomic-constr} \TERM{_} \nlsep \TERM{?}\NT{int} \nlsep \NT{sort} \nlsep \NT{global} \SEPDEF \DEFNT{binder} \NT{ident} &\RNAME{infer} \nlsep \TERM{(}~\NT{ident}~\NT{type}~\TERM{)} &\RNAME{binder} \SEPDEF \DEFNT{type} \TERM{\!:}~\NT{constr} \nlsep \epsilon \end{rulebox} \caption{Grammaires annexes aux termes} \label{gram-annexes} \end{figure} La grammaire des termes (correspondant à l'état \texttt{barestate}) est décrite figures~\ref{constr} et~\ref{gram-annexes}. On constate par rapport aux précédentes versions de Coq d'importants changements de priorité, le plus marquant étant celui de l'application qui se trouve désormais juste au dessus\footnote{La convention est de considérer les opérateurs moins lieurs comme ``au dessus'', c'est-à-dire ayant un niveau de priorité plus élévé (comme c'est le cas avec le niveau de la grammaire actuelle des termes).} des constructions fermées à gauche et à droite. La grammaire des noms globaux est la suivante: \begin{eqnarray*} \DEFNT{global} \NT{ident} %% \nlsep \TERM{\$}\NT{ident} \nlsep \NT{ident}\TERM{.}\NT{global} \end{eqnarray*} Le $\TERM{_}$ dénote les termes à synthétiser. Les métavariables sont reconnues au niveau du lexer pour ne pas entrer en conflit avec le $\TERM{?}$ de l'existentielle. Les opérateurs infixes ou préfixes sont tous au même niveau de priorité du point de vue de Camlp4. La solution envisagée est de les gérer à la manière de Yacc, avec une pile (voir discussions plus bas). Ainsi, l'implication est un infixe normal; la quantification universelle et le let sont vus comme des opérateurs préfixes avec un niveau de priorité plus haut (i.e. moins lieur). Il subsiste des problèmes si l'on ne veut pas écrire de parenthèses dans: \begin{verbatim} A -> (!x. B -> (let y = C in D)) \end{verbatim} La solution proposée est d'analyser le membre droit d'un infixe de manière à autoriser les préfixes et les infixes de niveau inférieur, et d'exiger le parenthésage que pour les infixes de niveau supérieurs. En revanche, à l'affichage, certains membres droits seront plus lisibles s'ils n'utilisent pas cette astuce: \begin{verbatim} (fun x => x) = fun x => x \end{verbatim} La proposition est d'autoriser ce type d'écritures au parsing, mais l'afficheur écrit de manière standardisée en mettant quelques parenthèses superflues: $\TERM{=}$ serait symétrique alors que $\rightarrow$ appellerait l'afficheur de priorité élevée pour son sous-terme droit. Les priorités des opérateurs primitifs sont les suivantes (le signe $*$ signifie que pour le membre droit les opérateurs préfixes seront affichés sans parenthèses quel que soit leur priorité): $$ \begin{array}{c|l} $symbole$ & $priorité$ \\ \hline \TERM{!} & 200\,R* \\ \TERM{fun} & 200\,R* \\ \TERM{let} & 200\,R* \\ \TERM{if} & 200\,R \\ \TERM{eval} & 200\,R \\ \rightarrow & 90\,R* \end{array} $$ Il y a deux points d'entrée pour les termes: $\NT{constr}$ et $\NT{simple-constr}$. Le premier peut être utilisé lorsqu'il est suivi d'un séparateur particulier. Dans le cas où l'on veut une liste de termes séparés par un espace, il faut lire des $\NT{simple-constr}$. Les constructions $\TERM{fix}$ et $\TERM{cofix}$ (voir aussi figure~\ref{gram-fix}) sont fermées par end pour simplifier l'analyse. Sinon, une expression de point fixe peut être suivie par un \TERM{in} ou un \TERM{and}, ce qui pose les mêmes problèmes que le ``dangling else'': dans \begin{verbatim} fix f1 x {x} = fix f2 y {y} = ... and ... in ... \end{verbatim} il faut définir une stratégie pour associer le \TERM{and} et le \TERM{in} au bon point fixe. Un autre avantage est de faire apparaitre que le \TERM{fix} est un constructeur de terme de première classe et pas un lieur: \begin{verbatim} fix f1 ... and f2 ... in f1 end x \end{verbatim} Les propositions précédentes laissaient \texttt{f1} et \texttt{x} accolés, ce qui est source de confusion lorsque l'on fait par exemple \texttt{Pattern (f1 x)}. Les corps de points fixes et co-points fixes sont identiques, bien que ces derniers n'aient pas d'information de décroissance. Cela fonctionne puisque l'annotation est optionnelle. Cela préfigure des cas où l'on arrive à inférer quel est l'argument qui décroit structurellement (en particulier dans le cas où il n'y a qu'un seul argument). \begin{figure} \begin{rulebox} \DEFNT{fix-expr} \TERM{fix}~\NT{fix-decls} ~\NT{fix-select} ~\TERM{end} &\RNAME{fix} \nlsep \TERM{cofix}~\NT{cofix-decls}~\NT{fix-select} ~\TERM{end} &\RNAME{cofix} \SEPDEF \DEFNT{fix-decls} \NT{fix-decl}~\TERM{and}~\NT{fix-decls} \nlsep \NT{fix-decl} \SEPDEF \DEFNT{fix-decl} \NT{ident}~\PLUS{\NT{binder}}~\NT{type}~\NT{annot} ~\TERM{=}~\NT{constr} \SEPDEF \DEFNT{annot} \TERM{\{}~\NT{ident}~\TERM{\}} \nlsep \epsilon \SEPDEF \DEFNT{fix-select} \TERM{in}~\NT{ident} \nlsep \epsilon \end{rulebox} \caption{Grammaires annexes des points fixes} \label{gram-fix} \end{figure} La construction $\TERM{Case}$ peut-être considérée comme obsolète. Quant au $\TERM{Match}$ de la V6, il disparaît purement et simplement. \begin{figure} \begin{rulebox} \DEFNT{match-expr} \TERM{match}~\NT{case-items}~\NT{case-type}~\TERM{with}~ \NT{branches}~\TERM{end} &\RNAME{match} \nlsep \TERM{match}~\NT{case-items}~\TERM{with}~ \NT{branches}~\TERM{end} &\RNAME{infer-match} %%\nlsep \TERM{case}~\NT{constr}~\NT{case-predicate}~\TERM{of}~ %% \STAR{\NT{constr}}~\TERM{end} &\RNAME{case} \SEPDEF \DEFNT{case-items} \NT{case-item} ~\TERM{\&} ~\NT{case-items} \nlsep \NT{case-item} \SEPDEF \DEFNT{case-item} \NT{constr}~\NT{pred-pattern} &\RNAME{dep-case} \nlsep \NT{constr} &\RNAME{nodep-case} \SEPDEF \DEFNT{case-type} \TERM{$\Rightarrow$}~\NT{constr} \nlsep \epsilon \SEPDEF \DEFNT{pred-pattern} \TERM{as}~\NT{ident} ~\TERM{\!:}~\NT{constr} \SEPDEF \DEFNT{branches} \TERM{|} ~\NT{patterns} ~\TERM{$\Rightarrow$} ~\NT{constr} ~\NT{branches} \nlsep \epsilon \SEPDEF \DEFNT{patterns} \NT{pattern} ~\TERM{\&} ~\NT{patterns} \nlsep \NT{pattern} \SEPDEF \DEFNT{pattern} ... \end{rulebox} \caption{Grammaires annexes du filtrage} \label{gram-match} \end{figure} De manière globale, l'introduction de définitions dans les termes se fait avec le symbole $=$, et le $\!:=$ est réservé aux définitions au niveau vernac. Il y avait un manque de cohérence dans la V6, puisque l'on utilisait $=$ pour le $\TERM{let}$ et $\!:=$ pour les points fixes et les commandes vernac. % OBSOLETE: lieurs multiples supprimes %On peut remarquer que $\NT{binder}$ est un sous-ensemble de %$\NT{simple-constr}$, à l'exception de $\texttt{(a,b\!\!:T)}$: en tant %que lieur, {\tt a} et {\tt b} sont tous deux contraints, alors qu'en %tant que terme, seul {\tt b} l'est. Cela qui signifie que l'objectif %de rendre compatibles les membres gauches et droits est {\it presque} %atteint. \subsection{Infixes} \subsubsection{Infixes extensibles} Le problème de savoir si la liste des symboles pouvant apparaître en infixe est fixée ou extensible par l'utilisateur reste à voir. Notons que la solution où les symboles infixes sont des identificateurs que l'on peut définir paraît difficilement praticable: par exemple $\texttt{Logic.eq}$ n'est pas un opérateur binaire, mais ternaire. Il semble plus simple de garder des déclarations infixes qui relient un symbole infixe à un terme avec deux ``trous''. Par exemple: $$\begin{array}{c|l} $infixe$ & $identificateur$ \\ \hline = & \texttt{Logic.eq _ ?1 ?2} \\ == & \texttt{JohnMajor.eq _ ?1 _ ?2} \end{array}$$ La syntaxe d'une déclaration d'infixe serait par exemple: \begin{verbatim} Infix "=" 50 := Logic.eq _ ?1 ?2; \end{verbatim} \subsubsection{Gestion des précédences} Les infixes peuvent être soit laissé à Camlp4, ou bien (comme ici) considérer que tous les opérateurs ont la même précédence et gérer soit même la recomposition des termes à l'aide d'une pile (comme Yacc). \subsection{Extensions de syntaxe} \subsubsection{Litéraux numériques} La proposition est de considerer les litéraux numériques comme de simples identificateurs. Comme il en existe une infinité, il faut un nouveau mécanisme pour leur associer une définition. Par exemple, en ce qui concerne \texttt{Arith}, la définition de $5$ serait $\texttt{S}~4$. Pour \texttt{ZArith}, $5$ serait $\texttt{xI}~2$. Comme les infixes, les constantes numériques peuvent être qualifiées pour indiquer dans quels module est le type que l'on veut référencer. Par exemple (si on renomme \texttt{Arith} en \texttt{N} et \texttt{ZArith} en \texttt{Z}): \verb+N.5+, \verb+Z.5+. \begin{eqnarray*} \EXTNT{global} \NT{int} \end{eqnarray*} \subsubsection{Nouveaux lieurs} $$ \begin{array}{rclr} \EXTNT{constr} \TERM{ex}~\PLUS{\NT{binder}}~\TERM{.}~\NT{constr} &\RNAME{ex} \nlsep \TERM{ex}~\PLUS{\NT{binder}}~\TERM{.}~\NT{constr}~\TERM{,}~\NT{constr} &\RNAME{ex2} \nlsep \TERM{ext}~\PLUS{\NT{binder}}~\TERM{.}~\NT{constr} &\RNAME{exT} \nlsep \TERM{ext}~\PLUS{\NT{binder}}~\TERM{.}~\NT{constr}~\TERM{,}~\NT{constr} &\RNAME{exT2} \end{array} $$ Pour l'instant l'existentielle n'admet qu'une seule variable, ce qui oblige à écrire des cascades de $\TERM{ex}$. Pour parser les existentielles avec deux prédicats, on peut considérer \TERM{\&} comme un infixe intermédiaire et l'opérateur existentiel en présence de cet infixe se transforme en \texttt{ex2}. \subsubsection{Nouveaux infixes} Précédences des opérateurs infixes (les plus grands associent moins fort): $$ \begin{array}{l|l|c|l} $identificateur$ & $module$ & $infixe/préfixe$ & $précédence$ \\ \hline \texttt{iff} & $Logic$ & \longleftrightarrow & 100 \\ \texttt{or} & $Logic$ & \vee & 80\, R \\ \texttt{sum} & $Datatypes$ & + & 80\, R \\ \texttt{and} & $Logic$ & \wedge & 70\, R \\ \texttt{prod} & $Datatypes$ & * & 70\, R \\ \texttt{not} & $Logic$ & \tilde{} & 60\, L \\ \texttt{eq _} & $Logic$ & = & 50 \\ \texttt{eqT _} & $Logic_Type$ & = & 50 \\ \texttt{identityT _} & $Data_Type$ & = & 50 \\ \texttt{le} & $Peano$ & $<=$ & 50 \\ \texttt{lt} & $Peano$ & $<$ & 50 \\ \texttt{ge} & $Peano$ & $>=$ & 50 \\ \texttt{gt} & $Peano$ & $>$ & 50 \\ \texttt{Zle} & $zarith_aux$ & $<=$ & 50 \\ \texttt{Zlt} & $zarith_aux$ & $<$ & 50 \\ \texttt{Zge} & $zarith_aux$ & $>=$ & 50 \\ \texttt{Zgt} & $zarith_aux$ & $>$ & 50 \\ \texttt{Rle} & $Rdefinitions$ & $<=$ & 50 \\ \texttt{Rlt} & $Rdefinitions$ & $<$ & 50 \\ \texttt{Rge} & $Rdefinitions$ & $>=$ & 50 \\ \texttt{Rgt} & $Rdefinitions$ & $>$ & 50 \\ \texttt{plus} & $Peano$ & + & 40\,L \\ \texttt{Zplus} & $fast_integer$ & + & 40\,L \\ \texttt{Rplus} & $Rdefinitions$ & + & 40\,L \\ \texttt{minus} & $Minus$ & - & 40\,L \\ \texttt{Zminus} & $zarith_aux$ & - & 40\,L \\ \texttt{Rminus} & $Rdefinitions$ & - & 40\,L \\ \texttt{Zopp} & $fast_integer$ & - & 40\,L \\ \texttt{Ropp} & $Rdefinitions$ & - & 40\,L \\ \texttt{mult} & $Peano$ & * & 30\,L \\ \texttt{Zmult} & $fast_integer$ & * & 30\,L \\ \texttt{Rmult} & $Rdefinitions$ & * & 30\,L \\ \texttt{Rdiv} & $Rdefinitions$ & / & 30\,L \\ \texttt{pow} & $Rfunctions$ & \hat & 20\,L \\ \texttt{fact} & $Rfunctions$ & ! & 20\,L \\ \end{array} $$ Notons qu'il faudrait découper {\tt Logic_Type} en deux car celui-ci définit deux égalités, ou alors les mettre dans des modules différents. \subsection{Exemples} \begin{verbatim} Definition not (A:Prop) := A->False; Inductive eq (A:Set) (x:A) : A->Prop := refl_equal : eq A x x; Inductive ex (A:Set) (P:A->Prop) : Prop := ex_intro : !x. P x -> ex A P; Lemma not_all_ex_not : !(P:U->Prop). ~(!n. P n) -> ?n. ~ P n; Fixpoint plus n m : nat {struct n} := match n with O => m | (S k) => S (plus k m) end; \end{verbatim} \subsection{Questions ouvertes} Voici les points sur lesquels la discussion est particulièrement ouverte: \begin{itemize} \item choix d'autres symboles pour les quantificateurs \TERM{!} et \TERM{?}. En l'état actuel des discussions, on garderait le \TERM{!} pour la qunatification universelle, mais on choisirait quelquechose comme \TERM{ex} pour l'existentielle, afin de ne pas suggérer trop de symétrie entre ces quantificateurs (l'un est primitif, l'autre pas). \item syntaxe particulière pour les \texttt{sig}, \texttt{sumor}, etc. \item la possibilité d'introduire plusieurs variables du même type est pour l'instant supprimée au vu des problèmes de compatibilité de syntaxe entre les membres gauches et membres droits. L'idée étant que l'inference de type permet d'éviter le besoin de déclarer tous les types. \end{itemize} \subsection{Autres extensions} \subsubsection{Lieur multiple} L'écriture de types en présence de polymorphisme est souvent assez pénible: \begin{verbatim} Check !(A:Set) (x:A) (B:Set) (y:B). P A x B y; \end{verbatim} On pourrait avoir des déclarations introduisant à la fois un type d'une certaine sorte et une variable de ce type: \begin{verbatim} Check !(x:A:Set) (y:B:Set). P A x B y; \end{verbatim} Noter que l'on aurait pu écrire: \begin{verbatim} Check !A x B y. P A (x:A:Set) B (y:B:Set); \end{verbatim} \section{Syntaxe des tactiques} \subsection{Questions diverses} Changer ``Pattern nl c ... nl c'' en ``Pattern [ nl ] c ... [ nl ] c'' pour permettre des chiffres seuls dans la catégorie syntaxique des termes. Par uniformité remplacer ``Unfold nl c'' par ``Unfold [ nl ] c'' ? Même problème pour l'entier de Specialize (ou virer Specialize ?) ? \subsection{Questions en suspens} \verb=EAuto= : deux syntaxes différentes pour la recherche en largeur et en profondeur ? Quelle recherche par défaut ? \section*{Remarques pêle-mêle (HH)} Autoriser la syntaxe \begin{verbatim} Variable R (a : A) (b : B) : Prop. Hypotheses H (a : A) (b : B) : Prop; Y (u : U) : V. Variables H (a : A) (b : B), J (k : K) : nat; Z (v : V) : Set. \end{verbatim} Renommer eqT, refl_eqT, eqT_ind, eqT_rect, eqT_rec en eq, refl_equal, etc. Remplacer == en =. Mettre des \verb=?x= plutot que des \verb=?1= dans les motifs de ltac ?? \section{Moulinette} \begin{itemize} \item Mettre \verb=/= et * au même niveau dans R. \item Changer la précédence du - unaire dans R. \item Ajouter Require Arith par necessite si Require ArithRing ou Require ZArithRing. \item Ajouter Require ZArith par necessite si Require ZArithRing ou Require Omega. \item Enlever le Export de Bool, Arith et ZARith de Ring quand inapproprié et l'ajouter à côté des Require Ring. \item Remplacer "Check n" par "n:Check ..." \item Renommer Variable/Hypothesis hors section en Parameter/Axiom. \item Renommer les \verb=command0=, \verb=command1=, ... \verb=lcommand= etc en \verb=constr0=, \verb=constr1=, ... \verb=lconstr=. \item Remplacer les noms Coq.omega.Omega par Coq.Omega ... \item Remplacer AddPath par Add LoadPath (ou + court) \item Unify + and \{\}+\{\} and +\{\} using Prop $\leq$ Set ?? \item Remplacer Implicit Arguments On/Off par Set/Unset Implicit Arguments. \item La syntaxe \verb=Intros (a,b)= est inutile, \verb=Intros [a b]= fait l'affaire. \item Virer \verb=Goal= sans argument (synonyme de \verb=Proof= et sans effets). \item Remplacer Save. par Qed. \item Remplacer \verb=Zmult_Zplus_distr= par \verb=Zmult_plus_distr_r= et \verb=Zmult_plus_distr= par \verb=Zmult_plus_distr_l=. \end{itemize} \end{document} coq-8.15.0/dev/doc/archive/notes-on-conversion.v000066400000000000000000000050141417001151100214360ustar00rootroot00000000000000(**********************************************************************) (* A few examples showing the current limits of the conversion algorithm *) (**********************************************************************) (*** We define (pseudo-)divergence from Ackermann function ***) Definition ack (n : nat) := (fix F (n0 : nat) : nat -> nat := match n0 with | O => S | S n1 => fun m : nat => (fix F0 (n2 : nat) : nat := match n2 with | O => F n1 1 | S n3 => F n1 (F0 n3) end) m end) n. Notation OMEGA := (ack 4 4). Definition f (x:nat) := x. (* Evaluation in tactics can somehow be controlled *) Lemma l1 : OMEGA = OMEGA. reflexivity. (* succeed: identity *) Qed. (* succeed: identity *) Lemma l2 : OMEGA = f OMEGA. reflexivity. (* fail: conversion wants to convert OMEGA with f OMEGA *) Abort. (* but it reduces the right side first! *) Lemma l3 : f OMEGA = OMEGA. reflexivity. (* succeed: reduce left side first *) Qed. (* succeed: expected concl (the one with f) is on the left *) Lemma l4 : OMEGA = OMEGA. assert (f OMEGA = OMEGA) by reflexivity. (* succeed *) unfold f in H. (* succeed: no type-checking *) exact H. (* succeed: identity *) Qed. (* fail: "f" is on the left *) (* This example would fail whatever the preferred side is *) Lemma l5 : OMEGA = f OMEGA. unfold f. assert (f OMEGA = OMEGA) by reflexivity. unfold f in H. exact H. Qed. (* needs to convert (f OMEGA = OMEGA) and (OMEGA = f OMEGA) *) (**********************************************************************) (* Analysis of the inefficiency in Nijmegen/LinAlg/LinAlg/subspace_dim.v *) (* (proof of span_ind_uninject_prop *) In the proof, a problem of the form (Equal S t1 t2) is "simpl"ified, then "red"uced to (Equal S' t1 t1) where the new t1's are surrounded by invisible coercions. A reflexivity steps conclude the proof. The trick is that Equal projects the equality in the setoid S, and that (Equal S) itself reduces to some (fun x y => Equal S' (f x) (g y)). At the Qed time, the problem to solve is (Equal S t1 t2) = (Equal S' t1 t1) and the algorithm is to first compare S and S', and t1 and t2. Unfortunately it does not work, and since t1 and t2 involve concrete instances of algebraic structures, it takes a lot of time to realize that it is not convertible. The only hope to improve this problem is to observe that S' hides (behind two indirections) a Setoid constructor. This could be the argument to solve the problem. coq-8.15.0/dev/doc/archive/old_svn_branches.txt000066400000000000000000000025011417001151100213720ustar00rootroot00000000000000## During the migration to git, some old branches and tags have not been ## converted to directly visible git branches or tags. They are still there ## in the archive, their names on the gforge repository are in the 3rd ## column below (e.g. remotes/V8-0-bugfix). After a git clone, they ## could always be accessed by their git hashref (2nd column below). # SVN # GIT # Symbolic name on gforge repository r5 d2f789d remotes/tags/start r1714 0605b7c remotes/V7 r2583 372f3f0 remotes/tags/modules-2-branching r2603 6e15d9a remotes/modules r2866 76a93fa remotes/tags/modules-2-before-grammar r2951 356f749 remotes/tags/before-modules r2952 8ee67df remotes/tags/modules-2-update r2956 fb11bd9 remotes/modules-2 r3193 4d23172 remotes/mowgli r3194 c91e99b remotes/tags/mowgli-before-merge r3500 5078d29 remotes/mowgli2 r3672 63b0886 remotes/V7-3-bugfix r5086 bdceb72 remotes/V7-4-bugfix r5731 a274456 remotes/recriture r9046 e19553c remotes/tags/trunk r9146 b38ce05 remotes/coq-diff-tool r9786 a05abf8 remotes/ProofIrrelevance r10294 fdf8871 remotes/InternalExtraction r10408 df97909 remotes/TypeClasses r10673 4e19bca remotes/bertot r11130 bfd1cb3 remotes/proofs r12282 a726b30 remotes/revised-theories r13855 bae3a8e remotes/native r14062 b77191b remotes/recdef r16421 9f4bfa8 remotes/V8-0-bugfix coq-8.15.0/dev/doc/archive/perf-analysis000066400000000000000000000131231417001151100200220ustar00rootroot00000000000000Performance analysis (trunk repository) --------------------------------------- Jun 7, 2010: delayed re-typing of Ltac instances in matching (-1% on HighSchoolGeometry, -2% on JordanCurveTheorem) Jun 4, 2010: improvement in eauto and type classes inference by removing systematic preparation of debugging pretty-printing streams (std_ppcmds) (-7% in ATBR, visible only on V8.3 logs since ATBR is broken in trunk; -6% in HighSchoolGeometry) Apr 19, 2010: small improvement obtained by reducing evar instantiation from O(n^3) to O(n^2) in the size of the instance (-2% in Compcert, -2% AreaMethod, -15% in Ssreflect) Apr 17, 2010: small improvement obtained by not repeating unification twice in auto (-2% in Compcert, -2% in Algebra) Feb 15, 2010: Global decrease due to unicode inefficiency repaired Jan 8, 2010: Global increase due to an inefficiency in unicode treatment Dec 1, 2009 - Dec 19, 2009: Temporary addition of [forall x, P x] hints to exact (generally not significative but, e.g., +25% on Subst, +8% on ZFC, +5% on AreaMethod) Oct 19, 2009: Change in modules (CoLoR +35%) Aug 9, 2009: new files added in AreaMethod May 21, 2008: New version of CoRN (needs +84% more time to compile) Apr 25-29, 2008: Temporary attempt with delta in eauto (Matthieu) (+28% CoRN) Apr 17, 2008: improvement probably due to commit 10807 or 10813 (bug fixes, control of zeta in rewrite, auto (??)) (-18% Buchberger, -40% PAutomata, -28% IntMap, -43% CoRN, -13% LinAlg, but CatsInZFC -0.5% only, PiCalc stable, PersistentUnionFind -1%) Mar 11, 2008: (+19% PersistentUnionFind wrt Mar 3, +21% Angles, +270% Continuations between 7/3 and 18/4) Mar 7, 2008: (-10% PersistentUnionFind wrt Mar 3) Feb 20, 2008: temporary 1-day slow down (+64% LinAlg) Feb 14, 2008: (-10% PersistentUnionFind, -19% Groups) Feb 7, 8, 2008: temporary 2-days long slow down (+20 LinAlg, +50% BDDs) Feb 2, 2008: many updates of the module system (-13% LinAlg, -50% AMM11262, -5% Goedel, -1% PersistentUnionFind, -42% ExactRealArithmetic, -41% Icharate, -42% Kildall, -74% SquareMatrices) Jan 1, 2008: merge of TypeClasses branch (+8% PersistentUnionFind, +36% LinAlg, +76% Goedel) Nov 16, 17, 2007: (+18% Cantor, +4% LinAlg, +27% IEEE1394 on 2 days) Nov 8, 2007: (+18% Cantor, +16% LinAlg, +55% Continuations, +200% IEEE1394, +170% CTLTCTL, +220% SquareMatrices) Oct 29, V8.1 (+ 3% geometry but CoRN, Godel, Kildall, Stalmark stables) Between Oct 12 and Oct 27, 2007: inefficiency temporarily introduced in the tactic interpreter (from revision 10222 to 10267) (+22% CoRN, +10% geometry, ...) Sep 16, 2007: (+16% PersistentUnionFind on 3 days, LinAlg stable, Sep 4, 2007: (+26% PersistentUnionFind, LinAlg stable, Jun 6, 2007: optimization of the need for type unification in with-bindings (-3.5% Stalmark, -6% Kildall) May 20, 21, 22, 2007: improved inference of with-bindings (including activation of unification on types) (+4% PICALC, +5% Stalmark, +7% Kildall) May 11, 2007: added primitive integers (+6% CoLoR, +7% CoRN, +5% FSets, ...) Between Feb 22 and March 16, 2007: bench temporarily moved on JMN's computer (-25% CoRN, -25% Fairisle, ...) Oct 29 and Oct 30, 2006: abandoned attempt to add polymorphism on definitions (+4% in general during these two days) Oct 17, 2006: improvement in new field [r9248] (QArith -3%, geometry: -2%) Oct 5, 2006: fixing wrong unification of Meta below binders (e.g. CatsInZFC: +10%, CoRN: -2.5%, Godel: +4%, LinAlg: +7%, DISTRIBUTED_REFERENCE_COUNTING: +10%, CoLoR: +1%) Sep 26, 2006: new field [r9178-9181] (QArith: -16%, geometry: -5%, Float: +6%, BDDS:+5% but no ring in it) Sep 12, 2006: Rocq/AREA_METHOD extended (~ 530s) Aug 12, 2006: Rocq/AREA_METHOD added (~ 480s) May 30, 2006: Nancy/CoLoR added (~ 319s) May 23, 2006: new, lighter version of polymorphic inductive types (CoRN: -27%, back to Mar-24 time) May 17, 2006: changes in List.v (DISTRIBUTED_REFERENCE_COUNTING: -) May 5, 2006: improvement in closure (array instead of lists) (e.g. CatsInZFC: -10%, CoRN: -3%, May 23, 2006: polymorphic inductive types (precise, heavy algorithm) (CoRN: +37%) Dec 29, 2005: new test and use of -vm in Stalmarck Dec 27, 2005: contrib Karatsuba added (~ 30s) Dec 28, 2005: size decrease mainly due to Defined moved to Qed in FSets (reduction from 95M to 7Mo) Dec 1-14, 2005: benchmarking server down between the two dates: Godel: -10%, CoRN: -10% probably due to changes around vm (new informative Cast, change of equality in named_context_val) Oct 6, 2005: contribs IPC and Tait added (~ 22s and ~ 25s) Aug 19, 2005: time decrease after application of "Array.length x=0" Xavier's suggestions for optimisation (e.g. Nijmegen/QArith: -3%, Nijmegen/CoRN: -7%, Godel: -3%) Aug 1, 2005: contrib Kildall added (~ 65s) Jul 26-Aug 2, 2005: bench down Jul 14-15, 2005: 4 contribs failed including CoRN Jul 14, 2005: time increase after activation of "closure optimisation" (e.g. Nijmegen/QArith: +8%, Nijmegen/CoRN: +3%, Godel: +13%) Jul 7, 2005: adding contrib Fermat4 Jun 17, 2005: contrib Goodstein extended and moved to CantorOrdinals (~ 30s) May 19, 2005: contrib Goodstein and prfx (~ 9s) added Apr 21, 2005: strange time decrease (could it be due to the change of Back and Reset mechanism) (e.g. Nijmegen/CoRN: -2%, Nijmegen/QARITH: -4%, Godel: -11%) Mar 20, 2005: fixed Logic.with_check bug global time decrease (e.g. Nijmegen/CoRN: -3%, Nijmegen/QARITH: -1.5%) Jan 31-Feb 8, 2005: small instability (e.g. CoRN: ~2015s -> ~1999s -> ~2032s, Godel: ~340s -> ~370s) Jan 13, 2005: contrib SumOfTwoSquare added (~ 38s) coq-8.15.0/dev/doc/archive/v8-syntax/000077500000000000000000000000001417001151100172035ustar00rootroot00000000000000coq-8.15.0/dev/doc/archive/v8-syntax/check-grammar000077500000000000000000000022711417001151100216340ustar00rootroot00000000000000#!/bin/sh # This scripts checks that the new grammar of Coq as defined in syntax-v8.tex # is consistent in the sense that all invoked non-terminals are defined defined_nt() { grep "\\DEFNT{.*}" syntax-v8.tex | sed -e "s|.*DEFNT{\([^}]*\)}.*|\1|"|\ sort | sort -u } used_nt() { cat syntax-v8.tex | tr \\\\ \\n | grep "^NT{.*}" |\ sed -e "s|^NT{\([^}]*\)}.*|\1|" | egrep -v ^\#1\|non-terminal | sort -u } used_term() { cat syntax-v8.tex | tr \\\\ \\n | grep "^TERM{.*}" |\ sed -e "s|^TERM{\([^}]*\)}.*|\1|" -e "s|\\$||g" | egrep -v ^\#1\|terminal | sort -u } used_kwd() { cat syntax-v8.tex | tr \\\\ \\n | grep "^KWD{.*}" |\ sed -e "s|^KWD{\([^}]*\)}.*|\1|" -e "s|\\$||g" | egrep -v ^\#1 | sort -u } defined_nt > def used_nt > use used_term > use-t used_kwd > use-k diff def use > df ############################### echo if grep ^\> df > /dev/null 2>&1 ; then echo Undefined non-terminals: echo ======================== echo grep ^\> df | sed -e "s|^> ||" echo fi if grep ^\< df > /dev/null 2>&1 ; then echo Unused non-terminals: echo ===================== echo grep ^\< df | sed -e "s|^< ||" echo fi #echo Used terminals: #echo =============== #echo #cat use-tcoq-8.15.0/dev/doc/archive/v8-syntax/memo-v8.tex000066400000000000000000000230511417001151100212160ustar00rootroot00000000000000 \documentclass{article} \usepackage{verbatim} \usepackage{amsmath} \usepackage{amssymb} \usepackage{array} \usepackage{fullpage} \author{B.~Barras} \title{An introduction to syntax of Coq V8} %% Le _ est un caractère normal \catcode`\_=13 \let\subscr=_ \def_{\ifmmode\sb\else\subscr\fi} \def\NT#1{\langle\textit{#1}\rangle} \def\NTL#1#2{\langle\textit{#1}\rangle_{#2}} \def\TERM#1{\textsf{\bf #1}} \newenvironment{transbox} {\begin{center}\tt\begin{tabular}{l|ll} \hfil\textrm{V7} & \hfil\textrm{V8} \\ \hline} {\end{tabular}\end{center}} \def\TRANS#1#2 {\begin{tabular}[t]{@{}l@{}}#1\end{tabular} & \begin{tabular}[t]{@{}l@{}}#2\end{tabular} \\} \def\TRANSCOM#1#2#3 {\begin{tabular}[t]{@{}l@{}}#1\end{tabular} & \begin{tabular}[t]{@{}l@{}}#2\end{tabular} & #3 \\} \begin{document} \maketitle The goal of this document is to introduce by example to the new syntax of Coq. It is strongly recommended to read first the definition of the new syntax, but this document should also be useful for the eager user who wants to start with the new syntax quickly. \section{Changes in lexical conventions w.r.t. V7} \subsection{Identifiers} The lexical conventions changed: \TERM{_} is not a regular identifier anymore. It is used in terms as a placeholder for subterms to be inferred at type-checking, and in patterns as a non-binding variable. Furthermore, only letters (unicode letters), digits, single quotes and _ are allowed after the first character. \subsection{Quoted string} Quoted strings are used typically to give a filename (which may not be a regular identifier). As before they are written between double quotes ("). Unlike for V7, there is no escape character: characters are written normally but the double quote which is doubled. \section{Main changes in terms w.r.t. V7} \subsection{Precedence of application} In the new syntax, parentheses are not really part of the syntax of application. The precedence of application (10) is tighter than all prefix and infix notations. It makes it possible to remove parentheses in many contexts. \begin{transbox} \TRANS{(A x)->(f x)=(g y)}{A x -> f x = g y} \TRANS{(f [x]x)}{f (fun x => x)} \end{transbox} \subsection{Arithmetics and scopes} The specialized notation for \TERM{Z} and \TERM{R} (introduced by symbols \TERM{`} and \TERM{``}) have disappeared. They have been replaced by the general notion of scope. \begin{center} \begin{tabular}{l|l|l} type & scope name & delimiter \\ \hline types & type_scope & \TERM{T} \\ \TERM{bool} & bool_scope & \\ \TERM{nat} & nat_scope & \TERM{nat} \\ \TERM{Z} & Z_scope & \TERM{Z} \\ \TERM{R} & R_scope & \TERM{R} \\ \TERM{positive} & positive_scope & \TERM{P} \end{tabular} \end{center} In order to use notations of arithmetics on \TERM{Z}, its scope must be opened with command \verb+Open Scope Z_scope.+ Another possibility is using the scope change notation (\TERM{\%}). The latter notation is to be used when notations of several scopes appear in the same expression. In examples below, scope changes are not needed if the appropriate scope has been opened. Scope nat_scope is opened in the initial state of Coq. \begin{transbox} \TRANSCOM{`0+x=x+0`}{0+x=x+0}{\textrm{Z_scope}} \TRANSCOM{``0 + [if b then ``1`` else ``2``]``}{0 + if b then 1 else 2}{\textrm{R_scope}} \TRANSCOM{(0)}{0}{\textrm{nat_scope}} \end{transbox} Below is a table that tells which notation is available in which scope. The relative precedences and associativity of operators is the same as in usual mathematics. See the reference manual for more details. However, it is important to remember that unlike V7, the type operators for product and sum are left associative, in order not to clash with arithmetic operators. \begin{center} \begin{tabular}{l|l} scope & notations \\ \hline nat_scope & $+ ~- ~* ~< ~\leq ~> ~\geq$ \\ Z_scope & $+ ~- ~* ~/ ~\TERM{mod} ~< ~\leq ~> ~\geq ~?=$ \\ R_scope & $+ ~- ~* ~/ ~< ~\leq ~> ~\geq$ \\ type_scope & $* ~+$ \\ bool_scope & $\TERM{\&\&} ~\TERM{$||$} ~\TERM{-}$ \\ list_scope & $\TERM{::} ~\TERM{++}$ \end{tabular} \end{center} (Note: $\leq$ is written \TERM{$<=$}) \subsection{Notation for implicit arguments} The explicitation of arguments is closer to the \emph{bindings} notation in tactics. Argument positions follow the argument names of the head constant. \begin{transbox} \TRANS{f 1!t1 2!t2}{f (x:=t1) (y:=t2)} \TRANS{!f t1 t2}{@f t1 t2} \end{transbox} \subsection{Universal quantification} The universal quantification and dependent product types are now materialized with the \TERM{forall} keyword before the binders and a comma after the binders. The syntax of binders also changed significantly. A binder can simply be a name when its type can be inferred. In other cases, the name and the type of the variable are put between parentheses. When several consecutive variables have the same type, they can be grouped. Finally, if all variables have the same type parentheses can be omitted. \begin{transbox} \TRANS{(x:A)B}{forall (x:~A), B ~~\textrm{or}~~ forall x:~A, B} \TRANS{(x,y:nat)P}{forall (x y :~nat), P ~~\textrm{or}~~ forall x y :~nat, P} \TRANS{(x,y:nat;z:A)P}{forall (x y :~nat) (z:A), P} \TRANS{(x,y,z,t:?)P}{forall x y z t, P} \TRANS{(x,y:nat;z:?)P}{forall (x y :~nat) z, P} \end{transbox} \subsection{Abstraction} The notation for $\lambda$-abstraction follows that of universal quantification. The binders are surrounded by keyword \TERM{fun} and $\Rightarrow$ (\verb+=>+ in ascii). \begin{transbox} \TRANS{[x,y:nat; z](f a b c)}{fun (x y:nat) z => f a b c} \end{transbox} \subsection{Pattern-matching} Beside the usage of the keyword pair \TERM{match}/\TERM{with} instead of \TERM{Cases}/\TERM{of}, the main change is the notation for the type of branches and return type. It is no longer written between \TERM{$<$ $>$} before the \TERM{Cases} keyword, but interleaved with the destructured objects. The idea is that for each destructured object, one may specify a variable name to tell how the branches types depend on this destructured objects (case of a dependent elimination), and also how they depend on the value of the arguments of the inductive type of the destructured objects. The type of branches is then given after the keyword \TERM{return}, unless it can be inferred. Moreover, when the destructured object is a variable, one may use this variable in the return type. \begin{transbox} \TRANS{Cases n of\\~~ O => O \\| (S k) => (1) end}{match n with\\~~ 0 => 0 \\| (S k) => 1 end} \TRANS{Cases m n of \\~~0 0 => t \\| ... end}{match m, n with \\~~0, 0 => t \\| .. end} \TRANS{<[n:nat](P n)>Cases T of ... end}{match T as n return P n with ... end} \TRANS{<[n:nat][p:(even n)]\~{}(odd n)>Cases p of\\~~ ... \\end}{match p in even n return \~{} odd n with\\~~ ...\\end} \end{transbox} \subsection{Fixpoints and cofixpoints} An easier syntax for non-mutual fixpoints is provided, making it very close to the usual notation for non-recursive functions. The decreasing argument is now indicated by an annotation between curly braces, regardless of the binders grouping. The annotation can be omitted if the binders introduce only one variable. The type of the result can be omitted if inferable. \begin{transbox} \TRANS{Fix plus\{plus [n:nat] : nat -> nat :=\\~~ [m]...\}}{fix plus (n m:nat) \{struct n\}: nat := ...} \TRANS{Fix fact\{fact [n:nat]: nat :=\\ ~~Cases n of\\~~~~ O => (1) \\~~| (S k) => (mult n (fact k)) end\}}{fix fact (n:nat) :=\\ ~~match n with \\~~~~0 => 1 \\~~| (S k) => n * fact k end} \end{transbox} There is a syntactic sugar for mutual fixpoints associated to a local definition: \begin{transbox} \TRANS{let f := Fix f \{f [x:A] : T := M\} in\\(g (f y))}{let fix f (x:A) : T := M in\\g (f x)} \end{transbox} The same applies to cofixpoints, annotations are not allowed in that case. \subsection{Notation for type cast} \begin{transbox} \TRANS{O :: nat}{0 : nat} \end{transbox} \section{Main changes in tactics w.r.t. V7} The main change is that all tactic names are lowercase. This also holds for Ltac keywords. \subsection{Ltac} Definitions of macros are introduced by \TERM{Ltac} instead of \TERM{Tactic Definition}, \TERM{Meta Definition} or \TERM{Recursive Definition}. Rules of a match command are not between square brackets anymore. Context (understand a term with a placeholder) instantiation \TERM{inst} became \TERM{context}. Syntax is unified with subterm matching. \begin{transbox} \TRANS{match t with [C[x=y]] => inst C[y=x]}{match t with context C[x=y] => context C[y=x]} \end{transbox} \subsection{Named arguments of theorems} \begin{transbox} \TRANS{Apply thm with x:=t 1:=u}{apply thm with (x:=t) (1:=u)} \end{transbox} \subsection{Occurrences} To avoid ambiguity between a numeric literal and the optional occurrence numbers of this term, the occurrence numbers are put after the term itself. This applies to tactic \TERM{pattern} and also \TERM{unfold} \begin{transbox} \TRANS{Pattern 1 2 (f x) 3 4 d y z}{pattern (f x at 1 2) (d at 3 4) y z} \end{transbox} \section{Main changes in vernacular commands w.r.t. V7} \subsection{Binders} The binders of vernacular commands changed in the same way as those of fixpoints. This also holds for parameters of inductive definitions. \begin{transbox} \TRANS{Definition x [a:A] : T := M}{Definition x (a:A) : T := M} \TRANS{Inductive and [A,B:Prop]: Prop := \\~~conj : A->B->(and A B)}% {Inductive and (A B:Prop): Prop := \\~~conj : A -> B -> and A B} \end{transbox} \subsection{Hints} The syntax of \emph{extern} hints changed: the pattern and the tactic to be applied are separated by a \TERM{$\Rightarrow$}. \begin{transbox} \TRANS{Hint Extern 4 (toto ?) Apply lemma}{Hint Extern 4 (toto _) => apply lemma} \end{transbox} \end{document} coq-8.15.0/dev/doc/archive/v8-syntax/syntax-v8.tex000066400000000000000000001162261417001151100216160ustar00rootroot00000000000000 \documentclass{article} \usepackage{verbatim} \usepackage{amsmath} \usepackage{amssymb} \usepackage{array} \usepackage{fullpage} \author{B.~Barras} \title{Syntax of Coq V8} %% Le _ est un caractère normal \catcode`\_=13 \let\subscr=_ \def_{\ifmmode\sb\else\subscr\fi} \def\bfbar{\ensuremath{|\hskip -0.22em{}|\hskip -0.24em{}|}} \def\TERMbar{\bfbar} \def\TERMbarbar{\bfbar\bfbar} \def\notv{\text{_}} \def\infx#1{\notv#1\notv} %% Macros pour les grammaires \def\GR#1{\text{\large(}#1\text{\large)}} \def\NT#1{\langle\textit{#1}\rangle} \def\NTL#1#2{\langle\textit{#1}\rangle_{#2}} \def\TERM#1{{\bf\textrm{\bf #1}}} %\def\TERM#1{{\bf\textsf{#1}}} \def\KWD#1{\TERM{#1}} \def\ETERM#1{\TERM{#1}} \def\CHAR#1{\TERM{#1}} \def\STAR#1{#1*} \def\STARGR#1{\GR{#1}*} \def\PLUS#1{#1+} \def\PLUSGR#1{\GR{#1}+} \def\OPT#1{#1?} \def\OPTGR#1{\GR{#1}?} %% Tableaux de definition de non-terminaux \newenvironment{cadre} {\begin{array}{|c|}\hline\\} {\\\\\hline\end{array}} \newenvironment{rulebox} {$$\begin{cadre}\begin{array}{r@{~}c@{~}l@{}l@{}r}} {\end{array}\end{cadre}$$} \def\DEFNT#1{\NT{#1} & ::= &} \def\EXTNT#1{\NT{#1} & ::= & ... \\&|&} \def\RNAME#1{(\textsc{#1})} \def\SEPDEF{\\\\} \def\nlsep{\\&|&} \def\nlcont{\\&&} \newenvironment{rules} {\begin{center}\begin{rulebox}} {\end{rulebox}\end{center}} \begin{document} \maketitle \section{Meta notations used in this document} Non-terminals are printed between angle brackets (e.g. $\NT{non-terminal}$) and terminal symbols are printed in bold font (e.g. $\ETERM{terminal}$). Lexemes are displayed as non-terminals. The usual operators on regular expressions: \begin{center} \begin{tabular}{l|l} \hfil notation & \hfil meaning \\ \hline $\STAR{regexp}$ & repeat $regexp$ 0 or more times \\ $\PLUS{regexp}$ & repeat $regexp$ 1 or more times \\ $\OPT{regexp}$ & $regexp$ is optional \\ $regexp_1~\mid~regexp_2$ & alternative \end{tabular} \end{center} Parenthesis are used to group regexps. Beware to distinguish this operator $\GR{~}$ from the terminals $\ETERM{( )}$, and $\mid$ from terminal \TERMbar. Rules are optionally annotated in the right margin with: \begin{itemize} \item a precedence and associativity (L for left, R for right and N for no associativity), indicating how to solve conflicts; lower levels are tighter; \item a rule name. \end{itemize} In order to solve some conflicts, a non-terminal may be invoked with a precedence (notation: $\NTL{entry}{prec}$), meaning that rules with higher precedence do not apply. \section{Lexical conventions} Lexical categories are: \begin{rules} \DEFNT{ident} \STARGR{\NT{letter}\mid\CHAR{_}} \STARGR{\NT{letter}\mid \NT{digit} \mid \CHAR{'} \mid \CHAR{_}} \SEPDEF \DEFNT{field} \CHAR{.}\NT{ident} \SEPDEF \DEFNT{meta-ident} \CHAR{?}\NT{ident} \SEPDEF \DEFNT{num} \PLUS{\NT{digit}} \SEPDEF \DEFNT{int} \NT{num} \mid \CHAR{-}\NT{num} \SEPDEF \DEFNT{digit} \CHAR{0}-\CHAR{9} \SEPDEF \DEFNT{letter} \CHAR{a}-\CHAR{z}\mid\CHAR{A}-\CHAR{Z} \mid\NT{unicode-letter} \SEPDEF \DEFNT{string} \CHAR{"}~\STARGR{\CHAR{""}\mid\NT{unicode-char-but-"}}~\CHAR{"} \end{rules} Reserved identifiers for the core syntax are: \begin{quote} \KWD{as}, \KWD{cofix}, \KWD{else}, \KWD{end}, \KWD{fix}, \KWD{for}, \KWD{forall}, \KWD{fun}, \KWD{if}, \KWD{in}, \KWD{let}, \KWD{match}, \KWD{Prop}, \KWD{return}, \KWD{Set}, \KWD{then}, \KWD{Type}, \KWD{with} \end{quote} Symbols used in the core syntax: $$ \KWD{(} ~~ \KWD{)} ~~ \KWD{\{} ~~ \KWD{\}} ~~ \KWD{:} ~~ \KWD{,} ~~ \Rightarrow ~~ \rightarrow ~~ \KWD{:=} ~~ \KWD{_} ~~ \TERMbar ~~ \KWD{@} ~~ \KWD{\%} ~~ \KWD{.(} $$ Note that \TERM{struct} is not a reserved identifier. \section{Syntax of terms} \subsection{Core syntax} The main entry point of the term grammar is $\NTL{constr}{9}$. When no conflict can appear, $\NTL{constr}{200}$ is also used as entry point. \begin{rules} \DEFNT{constr} \NT{binder-constr} &200R~~ &\RNAME{binders} \nlsep \NT{constr}~\KWD{:}~\NT{constr} &100R &\RNAME{cast} \nlsep \NT{constr}~\KWD{:}~\NT{binder-constr} &100R &\RNAME{cast'} \nlsep \NT{constr}~\KWD{$\rightarrow$}~\NT{constr} &80R &\RNAME{arrow} \nlsep \NT{constr}~\KWD{$\rightarrow$}~\NT{binder-constr} &80R &\RNAME{arrow'} \nlsep \NT{constr}~\PLUS{\NT{appl-arg}} &10L &\RNAME{apply} \nlsep \KWD{@}~\NT{reference}~\STAR{\NTL{constr}{9}} &10L &\RNAME{expl-apply} \nlsep \NT{constr}~\KWD{.(} ~\NT{reference}~\STAR{\NT{appl-arg}}~\TERM{)} &1L & \RNAME{proj} \nlsep \NT{constr}~\KWD{.(}~\TERM{@} ~\NT{reference}~\STAR{\NTL{constr}{9}}~\TERM{)} &1L & \RNAME{expl-proj} \nlsep \NT{constr} ~ \KWD{\%} ~ \NT{ident} &1L &\RNAME{scope-chg} \nlsep \NT{atomic-constr} &0 \nlsep \NT{match-expr} &0 \nlsep \KWD{(}~\NT{constr}~\KWD{)} &0 \SEPDEF \DEFNT{binder-constr} \KWD{forall}~\NT{binder-list}~\KWD{,}~\NTL{constr}{200} &&\RNAME{prod} \nlsep \KWD{fun} ~\NT{binder-list} ~\KWD{$\Rightarrow$}~\NTL{constr}{200} &&\RNAME{lambda} \nlsep \NT{fix-expr} \nlsep \KWD{let}~\NT{ident-with-params} ~\KWD{:=}~\NTL{constr}{200} ~\KWD{in}~\NTL{constr}{200} &&\RNAME{let} \nlsep \KWD{let}~\NT{single-fix} ~\KWD{in}~\NTL{constr}{200} &&\RNAME{rec-let} \nlsep \KWD{let}~\KWD{(}~\OPT{\NT{let-pattern}}~\KWD{)}~\OPT{\NT{return-type}} ~\KWD{:=}~\NTL{constr}{200}~\KWD{in}~\NTL{constr}{200} &&\RNAME{let-case} \nlsep \KWD{if}~\NT{if-item} ~\KWD{then}~\NTL{constr}{200}~\KWD{else}~\NTL{constr}{200} &&\RNAME{if-case} \SEPDEF \DEFNT{appl-arg} \KWD{(}~\NT{ident}~\!\KWD{:=}~\NTL{constr}{200}~\KWD{)} &&\RNAME{impl-arg} \nlsep \KWD{(}~\NT{num}~\!\KWD{:=}~\NTL{constr}{200}~\KWD{)} &&\RNAME{impl-arg} \nlsep \NTL{constr}{9} \SEPDEF \DEFNT{atomic-constr} \NT{reference} && \RNAME{variables} \nlsep \NT{sort} && \RNAME{CIC-sort} \nlsep \NT{num} && \RNAME{number} \nlsep \KWD{_} && \RNAME{hole} \nlsep \NT{meta-ident} && \RNAME{meta/evar} \end{rules} \begin{rules} \DEFNT{ident-with-params} \NT{ident}~\STAR{\NT{binder-let}}~\NT{type-cstr} \SEPDEF \DEFNT{binder-list} \NT{binder}~\STAR{\NT{binder-let}} \nlsep \PLUS{\NT{name}}~\KWD{:}~\NT{constr} \SEPDEF \DEFNT{binder} \NT{name} &&\RNAME{infer} \nlsep \KWD{(}~\PLUS{\NT{name}}~\KWD{:}~\NT{constr} ~\KWD{)} &&\RNAME{binder} \SEPDEF \DEFNT{binder-let} \NT{binder} \nlsep \KWD{(}~\NT{name}~\NT{type-cstr}~\KWD{:=}~\NT{constr}~\KWD{)} \SEPDEF \DEFNT{let-pattern} \NT{name} \nlsep \NT{name} ~\KWD{,} ~\NT{let-pattern} \SEPDEF \DEFNT{type-cstr} \OPTGR{\KWD{:}~\NT{constr}} \SEPDEF \DEFNT{reference} \NT{ident} && \RNAME{short-ident} \nlsep \NT{ident}~\PLUS{\NT{field}} && \RNAME{qualid} \SEPDEF \DEFNT{sort} \KWD{Prop} ~\mid~ \KWD{Set} ~\mid~ \KWD{Type} \SEPDEF \DEFNT{name} \NT{ident} ~\mid~ \KWD{_} \end{rules} \begin{rules} \DEFNT{fix-expr} \NT{single-fix} \nlsep \NT{single-fix}~\PLUSGR{\KWD{with}~\NT{fix-decl}} ~\KWD{for}~\NT{ident} \SEPDEF \DEFNT{single-fix} \NT{fix-kw}~\NT{fix-decl} \SEPDEF \DEFNT{fix-kw} \KWD{fix} ~\mid~ \KWD{cofix} \SEPDEF \DEFNT{fix-decl} \NT{ident}~\STAR{\NT{binder-let}}~\OPT{\NT{annot}}~\NT{type-cstr} ~\KWD{:=}~\NTL{constr}{200} \SEPDEF \DEFNT{annot} \KWD{\{}~\TERM{struct}~\NT{ident}~\KWD{\}} \end{rules} \begin{rules} \DEFNT{match-expr} \KWD{match}~\NT{match-items}~\OPT{\NT{return-type}}~\KWD{with} ~\OPT{\TERMbar}~\OPT{\NT{branches}}~\KWD{end} &&\RNAME{match} \SEPDEF \DEFNT{match-items} \NT{match-item} ~\KWD{,} ~\NT{match-items} \nlsep \NT{match-item} \SEPDEF \DEFNT{match-item} \NTL{constr}{100}~\OPTGR{\KWD{as}~\NT{name}} ~\OPTGR{\KWD{in}~\NTL{constr}{100}} \SEPDEF \DEFNT{return-type} \KWD{return}~\NTL{constr}{100} \SEPDEF \DEFNT{if-item} \NT{constr}~\OPTGR{\OPTGR{\KWD{as}~\NT{name}}~\NT{return-type}} \SEPDEF \DEFNT{branches} \NT{eqn}~\TERMbar~\NT{branches} \nlsep \NT{eqn} \SEPDEF \DEFNT{eqn} \NT{pattern} ~\STARGR{\KWD{,}~\NT{pattern}} ~\KWD{$\Rightarrow$}~\NT{constr} \SEPDEF \DEFNT{pattern} \NT{reference}~\PLUS{\NT{pattern}} &1L~~ & \RNAME{constructor} \nlsep \NT{pattern}~\KWD{as}~\NT{ident} &1L & \RNAME{alias} \nlsep \NT{pattern}~\KWD{\%}~\NT{ident} &1L & \RNAME{scope-change} \nlsep \NT{reference} &0 & \RNAME{pattern-var} \nlsep \KWD{_} &0 & \RNAME{hole} \nlsep \NT{num} &0 \nlsep \KWD{(}~\NT{tuple-pattern}~\KWD{)} \SEPDEF \DEFNT{tuple-pattern} \NT{pattern} \nlsep \NT{tuple-pattern}~\KWD{,}~\NT{pattern} && \RNAME{pair} \end{rules} \subsection{Notations of the prelude (logic and basic arithmetic)} Reserved notations: $$ \begin{array}{l|c} \text{Symbol} & \text{precedence} \\ \hline \infx{,} & 250L \\ \KWD{IF}~\notv~\KWD{then}~\notv~\KWD{else}~\notv & 200R \\ \infx{:} & 100R \\ \infx{\leftrightarrow} & 95N \\ \infx{\rightarrow} & 90R \\ \infx{\vee} & 85R \\ \infx{\wedge} & 80R \\ \tilde{}\notv & 75R \\ \begin{array}[c]{@{}l@{}} \infx{=}\quad \infx{=}\KWD{$:>$}\notv \quad \infx{=}=\notv \quad \infx{\neq} \quad \infx{\neq}\KWD{$:>$}\notv \\ \infx{<}\quad\infx{>} \quad \infx{\leq}\quad\infx{\geq} \quad \infx{<}<\notv \quad \infx{<}\leq\notv \quad \infx{\leq}<\notv \quad \infx{\leq}\leq\notv \end{array} & 70N \\ \infx{+}\quad\infx{-}\quad -\notv & 50L \\ \infx{*}\quad\infx{/}\quad /\notv & 40L \\ \end{array} $$ Existential quantifiers follows the \KWD{forall} notation (with same precedence 200), but only one quantified variable is allowed. \begin{rules} \EXTNT{binder-constr} \NT{quantifier-kwd}~\NT{name}~\NT{type-cstr}~\KWD{,}~\NTL{constr}{200} \\ \SEPDEF \DEFNT{quantifier-kwd} \TERM{exists} && \RNAME{ex} \nlsep \TERM{exists2} && \RNAME{ex2} \end{rules} $$ \begin{array}{l|c|l} \text{Symbol} & \text{precedence} \\ \hline \notv+\{\notv\} & 50 & \RNAME{sumor} \\ \{\notv:\notv~|~\notv\} & 0 & \RNAME{sig} \\ \{\notv:\notv~|~\notv \& \notv \} & 0 & \RNAME{sig2} \\ \{\notv:\notv~\&~\notv \} & 0 & \RNAME{sigS} \\ \{\notv:\notv~\&~\notv \& \notv \} & 0 & \RNAME{sigS2} \\ \{\notv\}+\{\notv\} & 0 & \RNAME{sumbool} \\ \end{array} $$ %% Strange: nat + {x:nat|x=x} * nat == ( + ) * \section{Grammar of tactics} \def\tacconstr{\NTL{constr}{9}} \def\taclconstr{\NTL{constr}{200}} Additional symbols are: $$ \TERM{'} ~~ \KWD{;} ~~ \TERM{()} ~~ \TERMbarbar ~~ \TERM{$\vdash$} ~~ \TERM{[} ~~ \TERM{]} ~~ \TERM{$\leftarrow$} $$ Additional reserved keywords are: $$ \KWD{at} ~~ \TERM{using} $$ \subsection{Basic tactics} \begin{rules} \DEFNT{simple-tactic} \TERM{intros}~\TERM{until}~\NT{quantified-hyp} \nlsep \TERM{intros}~\NT{intro-patterns} \nlsep \TERM{intro}~\OPT{\NT{ident}}~\OPTGR{\TERM{after}~\NT{ident}} %% \nlsep \TERM{assumption} \nlsep \TERM{exact}~\tacconstr %% \nlsep \TERM{apply}~\NT{constr-with-bindings} \nlsep \TERM{elim}~\NT{constr-with-bindings}~\OPT{\NT{eliminator}} \nlsep \TERM{elimtype}~\tacconstr \nlsep \TERM{case}~\NT{constr-with-bindings} \nlsep \TERM{casetype}~\tacconstr \nlsep \KWD{fix}~\OPT{\NT{ident}}~\NT{num} \nlsep \KWD{fix}~\NT{ident}~\NT{num}~\KWD{with}~\PLUS{\NT{fix-spec}} \nlsep \KWD{cofix}~\OPT{\NT{ident}} \nlsep \KWD{cofix}~\NT{ident}~\PLUS{\NT{fix-spec}} %% \nlsep \TERM{cut}~\tacconstr \nlsep \TERM{assert}~\tacconstr \nlsep \TERM{assert}~ \TERM{(}~\NT{ident}~\KWD{:}~\taclconstr~\TERM{)} \nlsep \TERM{assert}~ \TERM{(}~\NT{ident}~\KWD{:=}~\taclconstr~\TERM{)} \nlsep \TERM{pose}~\tacconstr \nlsep \TERM{pose}~ \TERM{(}~\NT{ident}~\KWD{:=}~\taclconstr~\TERM{)} \nlsep \TERM{generalize}~\PLUS{\tacconstr} \nlsep \TERM{generalize}~\TERM{dependent}~\tacconstr \nlsep \TERM{set}~\tacconstr~\OPT{\NT{clause}} \nlsep \TERM{set}~ \TERM{(}~\NT{ident}~\KWD{:=}~\taclconstr~\TERM{)}~\OPT{\NT{clause}} \nlsep \TERM{instantiate}~ \TERM{(}~\NT{num}~\TERM{:=}~\taclconstr~\TERM{)}~\OPT{\NT{clause}} %% \nlsep \TERM{specialize}~\OPT{\NT{num}}~\NT{constr-with-bindings} \nlsep \TERM{lapply}~\tacconstr %% \nlsep \TERM{simple}~\TERM{induction}~\NT{quantified-hyp} \nlsep \TERM{induction}~\NT{induction-arg}~\OPT{\NT{with-names}} ~\OPT{\NT{eliminator}} \nlsep \TERM{double}~\TERM{induction}~\NT{quantified-hyp}~\NT{quantified-hyp} \nlsep \TERM{simple}~\TERM{destruct}~\NT{quantified-hyp} \nlsep \TERM{destruct}~\NT{induction-arg}~\OPT{\NT{with-names}} ~\OPT{\NT{eliminator}} \nlsep \TERM{decompose}~\TERM{record}~\tacconstr \nlsep \TERM{decompose}~\TERM{sum}~\tacconstr \nlsep \TERM{decompose}~\TERM{[}~\PLUS{\NT{reference}}~\TERM{]} ~\tacconstr %% \nlsep ... \end{rules} \begin{rules} \EXTNT{simple-tactic} \TERM{trivial}~\OPT{\NT{hint-bases}} \nlsep \TERM{auto}~\OPT{\NT{num}}~\OPT{\NT{hint-bases}} %% %%\nlsep \TERM{autotdb}~\OPT{\NT{num}} %%\nlsep \TERM{cdhyp}~\NT{ident} %%\nlsep \TERM{dhyp}~\NT{ident} %%\nlsep \TERM{dconcl} %%\nlsep \TERM{superauto}~\NT{auto-args} \nlsep \TERM{auto}~\OPT{\NT{num}}~\TERM{decomp}~\OPT{\NT{num}} %% \nlsep \TERM{clear}~\PLUS{\NT{ident}} \nlsep \TERM{clearbody}~\PLUS{\NT{ident}} \nlsep \TERM{move}~\NT{ident}~\TERM{after}~\NT{ident} \nlsep \TERM{rename}~\NT{ident}~\TERM{into}~\NT{ident} %% \nlsep \TERM{left}~\OPT{\NT{with-binding-list}} \nlsep \TERM{right}~\OPT{\NT{with-binding-list}} \nlsep \TERM{split}~\OPT{\NT{with-binding-list}} \nlsep \TERM{exists}~\OPT{\NT{binding-list}} \nlsep \TERM{constructor}~\NT{num}~\OPT{\NT{with-binding-list}} \nlsep \TERM{constructor}~\OPT{\NT{tactic}} %% \nlsep \TERM{reflexivity} \nlsep \TERM{symmetry}~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{transitivity}~\tacconstr %% \nlsep \NT{inversion-kwd}~\NT{quantified-hyp}~\OPT{\NT{with-names}}~\OPT{\NT{clause}} \nlsep \TERM{dependent}~\NT{inversion-kwd}~\NT{quantified-hyp} ~\OPT{\NT{with-names}}~\OPTGR{\KWD{with}~\tacconstr} \nlsep \TERM{inversion}~\NT{quantified-hyp}~\TERM{using}~\tacconstr~\OPT{\NT{clause}} %% \nlsep \NT{red-expr}~\OPT{\NT{clause}} \nlsep \TERM{change}~\NT{conversion}~\OPT{\NT{clause}} \SEPDEF \DEFNT{red-expr} \TERM{red} ~\mid~ \TERM{hnf} ~\mid~ \TERM{compute} \nlsep \TERM{simpl}~\OPT{\NT{pattern-occ}} \nlsep \TERM{cbv}~\PLUS{\NT{red-flag}} \nlsep \TERM{lazy}~\PLUS{\NT{red-flag}} \nlsep \TERM{unfold}~\NT{unfold-occ}~\STARGR{\KWD{,}~\NT{unfold-occ}} \nlsep \TERM{fold}~\PLUS{\tacconstr} \nlsep \TERM{pattern}~\NT{pattern-occ}~\STARGR{\KWD{,}~\NT{pattern-occ}} \SEPDEF \DEFNT{conversion} \NT{pattern-occ}~\KWD{with}~\tacconstr \nlsep \tacconstr \SEPDEF \DEFNT{inversion-kwd} \TERM{inversion} ~\mid~ \TERM{invesion_clear} ~\mid~ \TERM{simple}~\TERM{inversion} \end{rules} Conflicts exists between integers and constrs. \begin{rules} \DEFNT{quantified-hyp} \NT{int}~\mid~\NT{ident} \SEPDEF \DEFNT{induction-arg} \NT{int}~\mid~\tacconstr \SEPDEF \DEFNT{fix-spec} \KWD{(}~\NT{ident}~\STAR{\NT{binder}}~\OPT{\NT{annot}} ~\KWD{:}~\taclconstr~\KWD{)} \SEPDEF \DEFNT{intro-patterns} \STAR{\NT{intro-pattern}} \SEPDEF \DEFNT{intro-pattern} \NT{name} \nlsep \TERM{[}~\NT{intro-patterns}~\STARGR{\TERMbar~\NT{intro-patterns}} ~\TERM{]} \nlsep \KWD{(}~\NT{intro-pattern}~\STARGR{\KWD{,}~\NT{intro-pattern}} ~\KWD{)} \SEPDEF \DEFNT{with-names} % \KWD{as}~\TERM{[}~\STAR{\NT{ident}}~\STARGR{\TERMbar~\STAR{\NT{ident}}} % ~\TERM{]} \KWD{as}~\NT{intro-pattern} \SEPDEF \DEFNT{eliminator} \TERM{using}~\NT{constr-with-bindings} \SEPDEF \DEFNT{constr-with-bindings} % dangling ``with'' of ``fix'' can conflict with ``with'' \tacconstr~\OPT{\NT{with-binding-list}} \SEPDEF \DEFNT{with-binding-list} \KWD{with}~\NT{binding-list} \SEPDEF \DEFNT{binding-list} \PLUS{\tacconstr} \nlsep \PLUS{\NT{simple-binding}} \SEPDEF \DEFNT{simple-binding} \KWD{(}~\NT{quantified-hyp}~\KWD{:=}~\taclconstr~\KWD{)} \SEPDEF \DEFNT{red-flag} \TERM{beta} ~\mid~ \TERM{iota} ~\mid~ \TERM{zeta} ~\mid~ \TERM{delta} ~\mid~ \TERM{delta}~\OPT{\TERM{-}}~\TERM{[}~\PLUS{\NT{reference}}~\TERM{]} \SEPDEF \DEFNT{clause} \KWD{in}~\TERM{*} \nlsep \KWD{in}~\TERM{*}~\KWD{$\vdash$}~\OPT{\NT{concl-occ}} \nlsep \KWD{in}~\OPT{\NT{hyp-ident-list}} ~\KWD{$\vdash$} ~\OPT{\NT{concl-occ}} \nlsep \KWD{in}~\OPT{\NT{hyp-ident-list}} \SEPDEF \DEFNT{hyp-ident-list} \NT{hyp-ident} \nlsep \NT{hyp-ident}~\KWD{,}~\NT{hyp-ident-list} \SEPDEF \DEFNT{hyp-ident} \NT{ident} \nlsep \KWD{(}~\TERM{type}~\TERM{of}~\NT{ident}~\KWD{)} \nlsep \KWD{(}~\TERM{value}~\TERM{of}~\NT{ident}~\KWD{)} \SEPDEF \DEFNT{concl-occ} \TERM{*} ~\NT{occurrences} \SEPDEF \DEFNT{pattern-occ} \tacconstr ~\NT{occurrences} \SEPDEF \DEFNT{unfold-occ} \NT{reference}~\NT{occurrences} \SEPDEF \DEFNT{occurrences} ~\OPTGR{\KWD{at}~\PLUS{\NT{int}}} \SEPDEF \DEFNT{hint-bases} \KWD{with}~\TERM{*} \nlsep \KWD{with}~\PLUS{\NT{ident}} \SEPDEF \DEFNT{auto-args} \OPT{\NT{num}}~\OPTGR{\TERM{adding}~\TERM{[}~\PLUS{\NT{reference}} ~\TERM{]}}~\OPT{\TERM{destructuring}}~\OPTGR{\TERM{using}~\TERM{tdb}} \end{rules} \subsection{Ltac} %% Currently, there are conflicts with keyword \KWD{in}: in the following, %% has the keyword to be associated to \KWD{let} or to tactic \TERM{simpl} ? %% \begin{center} %% \texttt{let x := simpl in ...} %% \end{center} \begin{rules} \DEFNT{tactic} \NT{tactic} ~\KWD{;} ~\NT{tactic} &5 &\RNAME{Then} \nlsep \NT{tactic} ~\KWD{;}~\TERM{[} ~\OPT{\NT{tactic-seq}} ~\TERM{]} &5 &\RNAME{Then-seq} %% \nlsep \TERM{try} ~\NT{tactic} &3R &\RNAME{Try} \nlsep \TERM{do} ~\NT{int-or-var} ~\NT{tactic} \nlsep \TERM{repeat} ~\NT{tactic} \nlsep \TERM{progress} ~\NT{tactic} \nlsep \TERM{info} ~\NT{tactic} \nlsep \TERM{abstract}~\NTL{tactic}{2}~\OPTGR{\TERM{using}~\NT{ident}} %% \nlsep \NT{tactic} ~\TERMbarbar ~\NT{tactic} &2R &\RNAME{Orelse} %% \nlsep \KWD{fun} ~\PLUS{\NT{name}} ~\KWD{$\Rightarrow$} ~\NT{tactic} &1 &\RNAME{Fun-tac} \nlsep \KWD{let} ~\NT{let-clauses} ~\KWD{in} ~\NT{tactic} \nlsep \KWD{let} ~\TERM{rec} ~\NT{rec-clauses} ~\KWD{in} ~\NT{tactic} \nlsep \KWD{match}~\OPT{\TERM{reverse}}~\TERM{goal}~\KWD{with} ~\OPT{\TERMbar}~\OPT{\NT{match-goal-rules}} ~\KWD{end} \nlsep \KWD{match} ~\NT{tactic} ~\KWD{with} ~\OPT{\TERMbar}~\OPT{\NT{match-rules}} ~\KWD{end} \nlsep \TERM{first}~\TERM{[} ~\NT{tactic-seq} ~\TERM{]} \nlsep \TERM{solve}~\TERM{[} ~\NT{tactic-seq} ~\TERM{]} \nlsep \TERM{idtac} \nlsep \TERM{fail} ~\OPT{\NT{num}} ~\OPT{\NT{string}} \nlsep \TERM{constr}~\KWD{:}~\tacconstr \nlsep \TERM{ipattern}~\KWD{:}~\NT{intro-pattern} \nlsep \NT{term-ltac} \nlsep \NT{reference}~\STAR{\NT{tactic-arg}} &&\RNAME{call-tactic} \nlsep \NT{simple-tactic} %% \nlsep \NT{tactic-atom} &0 &\RNAME{atomic} \nlsep \KWD{(} ~\NT{tactic} ~\KWD{)} \SEPDEF \DEFNT{tactic-arg} \TERM{ltac}~\KWD{:}~\NTL{tactic}{0} \nlsep \TERM{ipattern}~\KWD{:}~\NT{intro-pattern} \nlsep \NT{term-ltac} \nlsep \NT{tactic-atom} \nlsep \tacconstr \SEPDEF \DEFNT{term-ltac} \TERM{fresh} ~\OPT{\NT{string}} \nlsep \TERM{context} ~\NT{ident} ~\TERM{[} ~\taclconstr ~\TERM{]} \nlsep \TERM{eval} ~\NT{red-expr} ~\KWD{in} ~\tacconstr \nlsep \TERM{type} ~\tacconstr \SEPDEF \DEFNT{tactic-atom} \NT{reference} \nlsep \TERM{()} \SEPDEF \DEFNT{tactic-seq} \NT{tactic} ~\TERMbar ~\NT{tactic-seq} \nlsep \NT{tactic} \end{rules} \begin{rules} \DEFNT{let-clauses} \NT{let-clause} ~\STARGR{\KWD{with}~\NT{let-clause}} \SEPDEF \DEFNT{let-clause} \NT{ident} ~\STAR{\NT{name}} ~\KWD{:=} ~\NT{tactic} \SEPDEF \DEFNT{rec-clauses} \NT{rec-clause} ~\KWD{with} ~\NT{rec-clauses} \nlsep \NT{rec-clause} \SEPDEF \DEFNT{rec-clause} \NT{ident} ~\PLUS{\NT{name}} ~\KWD{:=} ~\NT{tactic} \SEPDEF \DEFNT{match-goal-rules} \NT{match-goal-rule} \nlsep \NT{match-goal-rule} ~\TERMbar ~\NT{match-goal-rules} \SEPDEF \DEFNT{match-goal-rule} \NT{match-hyps-list} ~\TERM{$\vdash$} ~\NT{match-pattern} ~\KWD{$\Rightarrow$} ~\NT{tactic} \nlsep \KWD{[}~\NT{match-hyps-list} ~\TERM{$\vdash$} ~\NT{match-pattern} ~\KWD{]}~\KWD{$\Rightarrow$} ~\NT{tactic} \nlsep \KWD{_} ~\KWD{$\Rightarrow$} ~\NT{tactic} \SEPDEF \DEFNT{match-hyps-list} \NT{match-hyps} ~\KWD{,} ~\NT{match-hyps-list} \nlsep \NT{match-hyps} \SEPDEF \DEFNT{match-hyps} \NT{name} ~\KWD{:} ~\NT{match-pattern} \SEPDEF \DEFNT{match-rules} \NT{match-rule} \nlsep \NT{match-rule} ~\TERMbar ~\NT{match-rules} \SEPDEF \DEFNT{match-rule} \NT{match-pattern} ~\KWD{$\Rightarrow$} ~\NT{tactic} \nlsep \KWD{_} ~\KWD{$\Rightarrow$} ~\NT{tactic} \SEPDEF \DEFNT{match-pattern} \TERM{context}~\OPT{\NT{ident}} ~\TERM{[} ~\NT{constr-pattern} ~\TERM{]} &&\RNAME{subterm} \nlsep \NT{constr-pattern} \SEPDEF \DEFNT{constr-pattern} \tacconstr \end{rules} \subsection{Other tactics} \begin{rules} \EXTNT{simple-tactic} \TERM{rewrite} ~\NT{orient} ~\NT{constr-with-bindings} ~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{replace} ~\tacconstr ~\KWD{with} ~\tacconstr ~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{replace} ~\OPT{\NT{orient}} ~\tacconstr ~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{symplify_eq} ~\OPT{\NT{quantified-hyp}} \nlsep \TERM{discriminate} ~\OPT{\NT{quantified-hyp}} \nlsep \TERM{injection} ~\OPT{\NT{quantified-hyp}} \nlsep \TERM{conditional}~\NT{tactic}~\TERM{rewrite}~\NT{orient} ~\NT{constr-with-bindings}~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{dependent}~\TERM{rewrite}~\NT{orient}~\NT{ident} \nlsep \TERM{cutrewrite}~\NT{orient}~\tacconstr ~\OPTGR{\KWD{in}~\NT{ident}} \nlsep \TERM{absurd} ~\tacconstr \nlsep \TERM{contradiction} \nlsep \TERM{autorewrite}~\NT{hint-bases}~\OPTGR{\KWD{using}~\NT{tactic}} \nlsep \TERM{refine}~\tacconstr \nlsep \TERM{setoid_replace} ~\tacconstr ~\KWD{with} ~\tacconstr \nlsep \TERM{setoid_rewrite} ~\NT{orient} ~\tacconstr \nlsep \TERM{subst} ~\STAR{\NT{ident}} %% eqdecide.mlg \nlsep \TERM{decide}~\TERM{equality} ~\OPTGR{\tacconstr~\tacconstr} \nlsep \TERM{compare}~\tacconstr~\tacconstr %% eauto \nlsep \TERM{eexact}~\tacconstr \nlsep \TERM{eapply}~\NT{constr-with-bindings} \nlsep \TERM{prolog}~\TERM{[}~\STAR{\tacconstr}~\TERM{]} ~\NT{quantified-hyp} \nlsep \TERM{eauto}~\OPT{\NT{quantified-hyp}}~\OPT{\NT{quantified-hyp}} ~\NT{hint-bases} \nlsep \TERM{eautod}~\OPT{\NT{quantified-hyp}}~\OPT{\NT{quantified-hyp}} ~\NT{hint-bases} %% tauto \nlsep \TERM{tauto} \nlsep \TERM{simplif} \nlsep \TERM{intuition}~\OPT{\NTL{tactic}{0}} \nlsep \TERM{linearintuition}~\OPT{\NT{num}} %% plugins/cc \nlsep \TERM{cc} %% plugins/field \nlsep \TERM{field}~\STAR{\tacconstr} %% plugins/firstorder \nlsep \TERM{ground}~\OPT{\NTL{tactic}{0}} \nlsep \TERM{ground}~\OPT{\NTL{tactic}{0}}~\KWD{with}~\PLUS{\NT{reference}} \nlsep \TERM{ground}~\OPT{\NTL{tactic}{0}}~\KWD{using}~\PLUS{\NT{ident}} %%\nlsep \TERM{gtauto} \nlsep \TERM{gintuition}~\OPT{\NTL{tactic}{0}} %% plugins/fourier \nlsep \TERM{fourierZ} %% plugins/funind \nlsep \TERM{functional}~\TERM{induction}~\tacconstr~\PLUS{\tacconstr} %% plugins/jprover \nlsep \TERM{jp}~\OPT{\NT{num}} %% plugins/omega \nlsep \TERM{omega} %% plugins/ring \nlsep \TERM{quote}~\NT{ident}~\OPTGR{\KWD{[}~\PLUS{\NT{ident}}~\KWD{]}} \nlsep \TERM{ring}~\STAR{\tacconstr} \SEPDEF \DEFNT{orient} \KWD{$\rightarrow$}~\mid~\KWD{$\leftarrow$} \end{rules} \section{Grammar of commands} New symbols: $$ \TERM{.} ~~ \TERM{..} ~~ \TERM{\tt >->} ~~ \TERM{:$>$} ~~ \TERM{$<$:} $$ New keyword: $$ \KWD{where} $$ \subsection{Classification of commands} \begin{rules} \DEFNT{vernac} \TERM{Time}~\NT{vernac} &2~~ &\RNAME{Timing} %% \nlsep \NT{gallina}~\TERM{.} &1 \nlsep \NT{command}~\TERM{.} \nlsep \NT{syntax}~\TERM{.} \nlsep \TERM{[}~\PLUS{\NT{vernac}}~\TERM{]}~\TERM{.} %% \nlsep \OPTGR{\NT{num}~\KWD{:}}~\NT{subgoal-command}~\TERM{.} ~~~&0 \SEPDEF \DEFNT{subgoal-command} \NT{check-command} \nlsep %\OPT{\TERM{By}}~ \NT{tactic}~\OPT{\KWD{..}} \end{rules} \subsection{Gallina and extensions} \begin{rules} \DEFNT{gallina} \NT{thm-token}~\NT{ident}~\STAR{\NT{binder-let}}~\KWD{:}~\NT{constr} \nlsep \NT{def-token}~\NT{ident}~\NT{def-body} \nlsep \NT{assum-token}~\NT{assum-list} \nlsep \NT{finite-token}~\NT{inductive-definition} ~\STARGR{\KWD{with}~\NT{inductive-definition}} \nlsep \TERM{Fixpoint}~\NT{fix-decl}~\STARGR{\KWD{with}~\NT{fix-decl}} \nlsep \TERM{CoFixpoint}~\NT{fix-decl}~\STARGR{\KWD{with}~\NT{fix-decl}} \nlsep \TERM{Scheme}~\NT{scheme}~\STARGR{\KWD{with}~\NT{scheme}} %% Extension: record \nlsep \NT{record-tok}~\OPT{\TERM{$>$}}~\NT{ident}~\STAR{\NT{binder-let}} ~\KWD{:}~\NT{constr}~\KWD{:=} ~\OPT{\NT{ident}}~\KWD{\{}~\NT{field-list}~\KWD{\}} \nlsep \TERM{Ltac}~\NT{ltac-def}~\STARGR{~\TERM{with}~\NT{ltac-def}} \end{rules} \begin{rules} \DEFNT{thm-token} \TERM{Theorem} ~\mid~ \TERM{Lemma} ~\mid~ \TERM{Fact} ~\mid~ \TERM{Remark} \SEPDEF \DEFNT{def-token} \TERM{Definition} ~\mid~ \TERM{Let} ~\mid~ \OPT{\TERM{Local}}~\TERM{SubClass} \SEPDEF \DEFNT{assum-token} \TERM{Hypothesis} ~\mid~ \TERM{Variable} ~\mid~ \TERM{Axiom} ~\mid~ \TERM{Parameter} \SEPDEF \DEFNT{finite-token} \TERM{Inductive} ~\mid~ \TERM{CoInductive} \SEPDEF \DEFNT{record-tok} \TERM{Record} ~\mid~ \TERM{Structure} \end{rules} \begin{rules} \DEFNT{def-body} \STAR{\NT{binder-let}}~\NT{type-cstr}~\KWD{:=} ~\OPT{\NT{reduce}}~\NT{constr} \nlsep \STAR{\NT{binder-let}}~\KWD{:}~\NT{constr} \SEPDEF \DEFNT{reduce} \TERM{Eval}~\NT{red-expr}~\KWD{in} \SEPDEF \DEFNT{ltac-def} \NT{ident}~\STAR{\NT{name}}~\KWD{:=}~\NT{tactic} \SEPDEF \DEFNT{rec-definition} \NT{fix-decl}~\OPT{\NT{decl-notation}} \SEPDEF \DEFNT{inductive-definition} \OPT{\NT{string}}~\NT{ident}~\STAR{\NT{binder-let}}~\KWD{:} ~\NT{constr}~\KWD{:=} ~\OPT{\TERMbar}~\OPT{\NT{constructor-list}} ~\OPT{\NT{decl-notation}} \SEPDEF \DEFNT{constructor-list} \NT{constructor}~\TERMbar~\NT{constructor-list} \nlsep \NT{constructor} \SEPDEF \DEFNT{constructor} \NT{ident}~\STAR{\NT{binder-let}}\OPTGR{\NT{coerce-kwd}~\NT{constr}} \SEPDEF \DEFNT{decl-notation} \TERM{where}~\NT{string}~\TERM{:=}~\NT{constr} \SEPDEF \DEFNT{field-list} \NT{field}~\KWD{;}~\NT{field-list} \nlsep \NT{field} \SEPDEF \DEFNT{field} \NT{ident}~\OPTGR{\NT{coerce-kwd}~\NT{constr}} \nlsep \NT{ident}~\NT{type-cstr-coe}~\KWD{:=}~\NT{constr} \SEPDEF \DEFNT{assum-list} \PLUS{\GR{\KWD{(}~\NT{simple-assum-coe}~\KWD{)}}} \nlsep \NT{simple-assum-coe} \SEPDEF \DEFNT{simple-assum-coe} \PLUS{\NT{ident}}~\NT{coerce-kwd}~\NT{constr} \SEPDEF \DEFNT{coerce-kwd} \TERM{:$>$} ~\mid~ \KWD{:} \SEPDEF \DEFNT{type-cstr-coe} \OPTGR{\NT{coerce-kwd}~\NT{constr}} \SEPDEF \DEFNT{scheme} \NT{ident}~\KWD{:=}~\NT{dep-scheme}~\KWD{for}~\NT{reference} ~\TERM{Sort}~\NT{sort} \SEPDEF \DEFNT{dep-scheme} \TERM{Induction}~\mid~\TERM{Minimality} \end{rules} \subsection{Modules and sections} \begin{rules} \DEFNT{gallina} \TERM{Module}~\NT{ident}~\STAR{\NT{mbinder}}~\OPT{\NT{of-mod-type}} ~\OPTGR{\KWD{:=}~\NT{mod-expr}} \nlsep \TERM{Module}~\KWD{Type}~\NT{ident}~\STAR{\NT{mbinder}} ~\OPTGR{\KWD{:=}~\NT{mod-type}} \nlsep \TERM{Declare}~\TERM{Module}~\NT{ident}~\STAR{\NT{mbinder}} ~\OPT{\NT{of-mod-type}} ~\OPTGR{\KWD{:=}~\NT{mod-expr}} \nlsep \TERM{Section}~\NT{ident} \nlsep \TERM{Chapter}~\NT{ident} \nlsep \TERM{End}~\NT{ident} %% \nlsep \TERM{Require}~\OPT{\NT{export-token}}~\OPT{\NT{specif-token}} ~\PLUS{\NT{reference}} \nlsep \TERM{Require}~\OPT{\NT{export-token}}~\OPT{\NT{specif-token}} ~\NT{string} \nlsep \TERM{Import}~\PLUS{\NT{reference}} \nlsep \TERM{Export}~\PLUS{\NT{reference}} \SEPDEF \DEFNT{export-token} \TERM{Import} ~\mid~ \TERM{Export} \SEPDEF \DEFNT{specif-token} \TERM{Implementation} ~\mid~ \TERM{Specification} \SEPDEF \DEFNT{mod-expr} \NT{reference} \nlsep \NT{mod-expr}~\NT{mod-expr} & L \nlsep \KWD{(}~\NT{mod-expr}~\KWD{)} \SEPDEF \DEFNT{mod-type} \NT{reference} \nlsep \NT{mod-type}~\KWD{with}~\NT{with-declaration} \SEPDEF \DEFNT{with-declaration} %on forcera les ( ) %si exceptionnellemt %un fixpoint ici \TERM{Definition}~\NT{ident}~\KWD{:=}~\NTL{constr}{} %{100} \nlsep \TERM{Module}~\NT{ident}~\KWD{:=}~\NT{reference} \SEPDEF \DEFNT{of-mod-type} \KWD{:}~\NT{mod-type} \nlsep \TERM{$<$:}~\NT{mod-type} \SEPDEF \DEFNT{mbinder} \KWD{(}~\PLUS{\NT{ident}}~\KWD{:}~\NT{mod-type}~\KWD{)} \end{rules} \begin{rules} \DEFNT{gallina} \TERM{Transparent}~\PLUS{\NT{reference}} \nlsep \TERM{Opaque}~\PLUS{\NT{reference}} \nlsep \TERM{Canonical}~\TERM{Structure}~\NT{reference}~\OPT{\NT{def-body}} \nlsep \TERM{Coercion}~\OPT{\TERM{Local}}~\NT{reference}~\NT{def-body} \nlsep \TERM{Coercion}~\OPT{\TERM{Local}}~\NT{reference}~\KWD{:} ~\NT{class-rawexpr}~\TERM{$>->$}~\NT{class-rawexpr} \nlsep \TERM{Identity}~\TERM{Coercion}~\OPT{\TERM{Local}}~\NT{ident}~\KWD{:} ~\NT{class-rawexpr}~\TERM{$>->$}~\NT{class-rawexpr} \nlsep \TERM{Implicit}~\TERM{Arguments}~\NT{reference}~\TERM{[}~\STAR{\NT{num}}~\TERM{]} \nlsep \TERM{Implicit}~\TERM{Arguments}~\NT{reference} \nlsep \TERM{Implicit}~\KWD{Type}~\PLUS{\NT{ident}}~\KWD{:}~\NT{constr} \SEPDEF \DEFNT{command} \TERM{Comments}~\STAR{\NT{comment}} \nlsep \TERM{Pwd} \nlsep \TERM{Cd}~\OPT{\NT{string}} \nlsep \TERM{Drop} ~\mid~ \TERM{ProtectedLoop} ~\mid~\TERM{Quit} %% \nlsep \TERM{Load}~\OPT{\TERM{Verbose}}~\NT{ident} \nlsep \TERM{Load}~\OPT{\TERM{Verbose}}~\NT{string} \nlsep \TERM{Declare}~\TERM{ML}~\TERM{Module}~\PLUS{\NT{string}} \nlsep \TERM{Locate}~\NT{locatable} \nlsep \TERM{Add}~\OPT{\TERM{Rec}}~\TERM{LoadPath}~\NT{string}~\OPT{\NT{as-dirpath}} \nlsep \TERM{Remove}~\TERM{LoadPath}~\NT{string} \nlsep \TERM{Add}~\OPT{\TERM{Rec}}~\TERM{ML}~\TERM{Path}~\NT{string} %% \nlsep \KWD{Type}~\NT{constr} \nlsep \TERM{Print}~\NT{printable} \nlsep \TERM{Print}~\NT{reference} \nlsep \TERM{Inspect}~\NT{num} \nlsep \TERM{About}~\NT{reference} %% \nlsep \TERM{Search}~\NT{reference}~\OPT{\NT{in-out-modules}} \nlsep \TERM{SearchPattern}~\NT{constr-pattern}~\OPT{\NT{in-out-modules}} \nlsep \TERM{SearchRewrite}~\NT{constr-pattern}~\OPT{\NT{in-out-modules}} \nlsep \TERM{SearchAbout}~\NT{reference}~\OPT{\NT{in-out-modules}} \nlsep \TERM{SearchAbout}~\TERM{[}~\STAR{\NT{ref-or-string}}~\TERM{]}\OPT{\NT{in-out-modules}} \nlsep \KWD{Set}~\NT{ident}~\OPT{\NT{opt-value}} \nlsep \TERM{Unset}~\NT{ident} \nlsep \KWD{Set}~\NT{ident}~\NT{ident}~\OPT{\NT{opt-value}} \nlsep \KWD{Set}~\NT{ident}~\NT{ident}~\PLUS{\NT{opt-ref-value}} \nlsep \TERM{Unset}~\NT{ident}~\NT{ident}~\STAR{\NT{opt-ref-value}} %% \nlsep \TERM{Print}~\TERM{Table}~\NT{ident}~\NT{ident} \nlsep \TERM{Print}~\TERM{Table}~\NT{ident} \nlsep \TERM{Add}~\NT{ident}~\OPT{\NT{ident}}~\PLUS{\NT{opt-ref-value}} %% \nlsep \TERM{Test}~\NT{ident}~\OPT{\NT{ident}}~\STAR{\NT{opt-ref-value}} %% \nlsep \TERM{Remove}~\NT{ident}~\OPT{\NT{ident}}~\PLUS{\NT{opt-ref-value}} \SEPDEF \DEFNT{check-command} \TERM{Eval}~\NT{red-expr}~\KWD{in}~\NT{constr} \nlsep \TERM{Check}~\NT{constr} \SEPDEF \DEFNT{ref-or-string} \NT{reference} \nlsep \NT{string} \end{rules} \begin{rules} \DEFNT{printable} \TERM{Term}~\NT{reference} \nlsep \TERM{All} \nlsep \TERM{Section}~\NT{reference} \nlsep \TERM{Grammar}~\NT{ident} \nlsep \TERM{LoadPath} \nlsep \TERM{Module}~\OPT{\KWD{Type}}~\NT{reference} \nlsep \TERM{Modules} \nlsep \TERM{ML}~\TERM{Path} \nlsep \TERM{ML}~\TERM{Modules} \nlsep \TERM{Graph} \nlsep \TERM{Classes} \nlsep \TERM{Coercions} \nlsep \TERM{Coercion}~\TERM{Paths}~\NT{class-rawexpr}~\NT{class-rawexpr} \nlsep \TERM{Tables} % \nlsep \TERM{Proof}~\NT{reference} % Obsolete, useful in V6.3 ?? \nlsep \TERM{Hint}~\OPT{\NT{reference}} \nlsep \TERM{Hint}~\TERM{*} \nlsep \TERM{HintDb}~\NT{ident} \nlsep \TERM{Scopes} \nlsep \TERM{Scope}~\NT{ident} \nlsep \TERM{Visibility}~\OPT{\NT{ident}} \nlsep \TERM{Implicit}~\NT{reference} \SEPDEF \DEFNT{class-rawexpr} \TERM{Funclass}~\mid~\TERM{Sortclass}~\mid~\NT{reference} \SEPDEF \DEFNT{locatable} \NT{reference} \nlsep \TERM{File}~\NT{string} \nlsep \TERM{Library}~\NT{reference} \nlsep \NT{string} \SEPDEF \DEFNT{opt-value} \NT{ident} ~\mid~ \NT{string} \SEPDEF \DEFNT{opt-ref-value} \NT{reference} ~\mid~ \NT{string} \SEPDEF \DEFNT{as-dirpath} \KWD{as}~\NT{reference} \SEPDEF \DEFNT{in-out-modules} \TERM{inside}~\PLUS{\NT{reference}} \nlsep \TERM{outside}~\PLUS{\NT{reference}} \SEPDEF \DEFNT{comment} \NT{constr} \nlsep \NT{string} \end{rules} \subsection{Other commands} %% TODO: min/maj pas a jour \begin{rules} \EXTNT{command} \TERM{Debug}~\TERM{On} \nlsep \TERM{Debug}~\TERM{Off} %% TODO: vernac \nlsep \TERM{Add}~\TERM{setoid}~\tacconstr~\tacconstr~\tacconstr \nlsep \TERM{Add}~\TERM{morphism}~\tacconstr~\KWD{:}~\NT{ident} \nlsep \TERM{Derive}~\TERM{inversion_clear} ~\OPT{\NT{num}}~\NT{ident}~\NT{ident} \nlsep \TERM{Derive}~\TERM{inversion_clear} ~\NT{ident}~\KWD{with}~\tacconstr~\OPTGR{\TERM{Sort}~\NT{sort}} \nlsep \TERM{Derive}~\TERM{inversion} ~\OPT{\NT{num}}~\NT{ident}~\NT{ident} \nlsep \TERM{Derive}~\TERM{inversion} ~\NT{ident}~\KWD{with}~\tacconstr~\OPTGR{\TERM{Sort}~\NT{sort}} \nlsep \TERM{Derive}~\TERM{dependent}~\TERM{inversion_clear} ~\NT{ident}~\KWD{with}~\tacconstr~\OPTGR{\TERM{Sort}~\NT{sort}} \nlsep \TERM{Derive}~\TERM{dependent}~\TERM{inversion} ~\NT{ident}~\KWD{with}~\tacconstr~\OPTGR{\TERM{Sort}~\NT{sort}} %% Correctness: obsolete ? %\nlsep Correctness %\nlsep Global Variable %% TODO: extraction \nlsep Extraction ... %% field \nlsep \TERM{Add}~\TERM{Field}~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\tacconstr~\OPT{\NT{minus-div}} %% funind \nlsep \TERM{Functional}~\TERM{Scheme}~\NT{ident}~\KWD{:=} ~\TERM{Induction}~\KWD{for}~\tacconstr ~\OPTGR{\KWD{with}~\PLUS{\tacconstr}} %% ring \nlsep \TERM{Add}~\TERM{Ring}~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\tacconstr~\KWD{[}~\PLUS{\tacconstr}~\KWD{]} \nlsep \TERM{Add}~\TERM{Semi}~\TERM{Ring}~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\KWD{[}~\PLUS{\tacconstr}~\KWD{]} \nlsep \TERM{Add}~\TERM{Abstract}~\TERM{Ring}~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\tacconstr \nlsep \TERM{Add}~\TERM{Abstract}~\TERM{Semi}~\TERM{Ring}~\tacconstr ~\tacconstr~\tacconstr~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr \nlsep \TERM{Add}~\TERM{Setoid}~\TERM{Ring}~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\tacconstr~\tacconstr~\tacconstr~\tacconstr~\tacconstr ~\tacconstr~\KWD{[}~\PLUS{\tacconstr}~\KWD{]} \nlsep \TERM{Add}~\TERM{Setoid}~\TERM{Semi}~\TERM{Ring}~\tacconstr~\tacconstr ~\tacconstr~\tacconstr~\tacconstr~\tacconstr \nlcont~~~~\tacconstr~\tacconstr~\tacconstr~\tacconstr~\tacconstr ~\KWD{[}~\PLUS{tacconstr}~\KWD{]} \SEPDEF \DEFNT{minus-div} \KWD{with}~\NT{minus-arg}~\NT{div-arg} \nlsep \KWD{with}~\NT{div-arg}~\NT{minus-arg} \SEPDEF \DEFNT{minus-arg} \TERM{minus}~\KWD{:=}~\tacconstr \SEPDEF \DEFNT{div-arg} \TERM{div}~\KWD{:=}~\tacconstr \end{rules} \begin{rules} \EXTNT{command} \TERM{Write}~\TERM{State}~\NT{ident} \nlsep \TERM{Write}~\TERM{State}~\NT{string} \nlsep \TERM{Restore}~\TERM{State}~\NT{ident} \nlsep \TERM{Restore}~\TERM{State}~\NT{string} \nlsep \TERM{Reset}~\NT{ident} \nlsep \TERM{Reset}~\TERM{Initial} \nlsep \TERM{Back}~\OPT{\NT{num}} \end{rules} \subsection{Proof-editing commands} \begin{rules} \EXTNT{command} \TERM{Goal}~\NT{constr} \nlsep \TERM{Proof}~\OPT{\NT{constr}} \nlsep \TERM{Proof}~\KWD{with}~\NT{tactic} \nlsep \TERM{Abort}~\OPT{\TERM{All}} \nlsep \TERM{Abort}~\NT{ident} \nlsep \TERM{Existential}~\NT{num}~\KWD{:=}~\NT{constr-body} \nlsep \TERM{Qed} \nlsep \TERM{Save}~\NT{ident} \nlsep \TERM{Defined}~\OPT{\NT{ident}} \nlsep \TERM{Suspend} \nlsep \TERM{Resume}~\OPT{\NT{ident}} \nlsep \TERM{Restart} \nlsep \TERM{Undo}~\OPT{\NT{num}} \nlsep \TERM{Focus}~\OPT{\NT{num}} \nlsep \TERM{Unfocus} \nlsep \TERM{Show}~\OPT{\NT{num}} \nlsep \TERM{Show}~\TERM{Implicit}~\TERM{Arguments}~\OPT{\NT{num}} \nlsep \TERM{Show}~\TERM{Node} \nlsep \TERM{Show}~\TERM{Existentials} \nlsep \TERM{Show}~\TERM{Tree} \nlsep \TERM{Show}~\TERM{Conjecture} \nlsep \TERM{Show}~\TERM{Proof} \nlsep \TERM{Show}~\TERM{Intro} \nlsep \TERM{Show}~\TERM{Intros} %% Correctness: obsolete ? %%\nlsep \TERM{Show}~\TERM{Programs} \nlsep \TERM{Hint}~\OPT{\TERM{Local}}~\NT{hint}~\OPT{\NT{inbases}} %% PrintConstr not documented \end{rules} \begin{rules} \DEFNT{constr-body} \NT{type-cstr}~\KWD{:=}~\NT{constr} \SEPDEF \DEFNT{hint} \TERM{Resolve}~\PLUS{\NTL{constr}{9}} \nlsep \TERM{Immediate}~\PLUS{\NTL{constr}{9}} \nlsep \TERM{Unfold}~\PLUS{\NT{reference}} \nlsep \TERM{Constructors}~\PLUS{\NT{reference}} \nlsep \TERM{Extern}~\NT{num}~\NT{constr}~\KWD{$\Rightarrow$}~\NT{tactic} \nlsep \TERM{Destruct}~\NT{ident}~\KWD{:=}~\NT{num}~\NT{destruct-loc} ~\NT{constr}~\KWD{$\Rightarrow$}~\NT{tactic} \nlsep \TERM{Rewrite}~\NT{orient}~\PLUS{\NTL{constr}{9}} ~\OPTGR{\KWD{using}~\NT{tactic}} \SEPDEF \DEFNT{inbases} \KWD{:}~\PLUS{\NT{ident}} \SEPDEF \DEFNT{destruct-loc} \TERM{Conclusion} \nlsep \OPT{\TERM{Discardable}}~\TERM{Hypothesis} \end{rules} \subsection{Syntax extensions} \begin{rules} \DEFNT{syntax} \TERM{Open}~\TERM{Scope}~\NT{ident} \nlsep \TERM{Close}~\TERM{Scope}~\NT{ident} \nlsep \TERM{Delimit}~\TERM{Scope}~\NT{ident}~\KWD{with}~\NT{ident} \nlsep \TERM{Bind}~\TERM{Scope}~\NT{ident}~\KWD{with}~\PLUS{\NT{class-rawexpr}} \nlsep \TERM{Arguments}~\TERM{Scope}~\NT{reference} ~\TERM{[}~\PLUS{\NT{name}}~\TERM{]} \nlsep \TERM{Infix}~\OPT{\TERM{Local}} %%% ~\NT{prec}~\OPT{\NT{num}} ~\NT{string}~\KWD{:=}~\NT{reference}~\OPT{\NT{modifiers}} ~\OPT{\NT{in-scope}} \nlsep \TERM{Notation}~\OPT{\TERM{Local}}~\NT{string}~\KWD{:=}~\NT{constr} ~\OPT{\NT{modifiers}}~\OPT{\NT{in-scope}} \nlsep \TERM{Notation}~\OPT{\TERM{Local}}~\NT{ident}~\KWD{:=}~\NT{constr} ~\OPT{\KWD{(}\TERM{only~\TERM{parsing}\KWD{)}}} \nlsep \TERM{Reserved}~\TERM{Notation}~\OPT{\TERM{Local}}~\NT{string} ~\OPT{\NT{modifiers}} \nlsep \TERM{Tactic}~\TERM{Notation}~\NT{string}~\STAR{\NT{tac-production}} ~\KWD{:=}~\NT{tactic} \SEPDEF \DEFNT{modifiers} \KWD{(}~\NT{mod-list}~\KWD{)} \SEPDEF \DEFNT{mod-list} \NT{modifier} \nlsep \NT{modifier}~\KWD{,}~\NT{mod-list} \SEPDEF \DEFNT{modifier} \NT{ident}~\KWD{at}~\NT{num} \nlsep \NT{ident}~\STARGR{\KWD{,}~\NT{ident}}~\KWD{at}~\NT{num} \nlsep \KWD{at}~\TERM{next}~\TERM{level} \nlsep \KWD{at}~\TERM{level}~\NT{num} \nlsep \TERM{left}~\TERM{associativity} \nlsep \TERM{right}~\TERM{associativity} \nlsep \TERM{no}~\TERM{associativity} \nlsep \NT{ident}~\NT{syntax-entry} \nlsep \TERM{only}~\TERM{parsing} \nlsep \TERM{format}~\NT{string} \SEPDEF \DEFNT{in-scope} \KWD{:}~\NT{ident} \SEPDEF \DEFNT{syntax-entry} \TERM{ident}~\mid~\TERM{global}~\mid~\TERM{bigint} \SEPDEF \DEFNT{tac-production} \NT{string} \nlsep \NT{ident}~\TERM{(}~\NT{ident}~\TERM{)} %%% \SEPDEF %%% \DEFNT{prec} %%% \TERM{LeftA}~\mid~\TERM{RightA}~\mid~\TERM{NonA} \end{rules} \end{document} coq-8.15.0/dev/doc/archive/versions-history.tex000066400000000000000000000347361417001151100214300ustar00rootroot00000000000000\documentclass[a4paper]{book} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage[T1]{fontenc} \usepackage{amsfonts} \newcommand{\feature}[1]{{\em #1}} \begin{document} \begin{center} \begin{huge} A history of Coq versions \end{huge} \end{center} \bigskip \centerline{\large 1984-1989: The Calculus of Constructions} \bigskip \centerline{\large (see README.V1-V5 for details)} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline CONSTR V1.10& mention of dates from 6 December & \feature{type-checker for Coquand's Calculus }\\ & 1984 to 13 February 1985 & \feature{of Constructions}, implementation \\ & frozen 22 December 1984 & language is a predecessor of CAML\\ CONSTR V1.11& mention of dates from 6 December\\ & 1984 to 19 February 1985 (freeze date) &\\ CoC V2.8& dated 16 December 1985 (freeze date)\\ CoC V2.9& & \feature{cumulative hierarchy of universes}\\ CoC V2.13& dated 25 June 1986 (freeze date)\\ CoC V3.1& started summer 1986 & \feature{AUTO tactic}\\ & dated 20 November 1986 & implementation language now named CAML\\ CoC V3.2& dated 27 November 1986\\ CoC V3.3& dated 1 January 1987 & creation of a directory for examples\\ CoC V3.4& dated 1 January 1987 & \feature{lambda and product distinguished in the syntax}\\ CoC V4.1& dated 24 July 1987 (freeze date)\\ CoC V4.2& dated 10 September 1987\\ CoC V4.3& dated 15 September 1987 & \feature{mathematical vernacular toplevel}\\ & frozen November 1987 & \feature{section mechanism}\\ & & \feature{logical vs computational content (sorte Spec)}\\ & & \feature{LCF engine}\\ CoC V4.4& dated 27 January 1988 & \feature{impredicatively encoded inductive types}\\ & frozen March 1988\\ CoC V4.5 and V4.5.5& dated 15 March 1988 & \feature{program extraction}\\ & demonstrated in June 1988\\ CoC V4.6& dated 1 September 1988 & start of LEGO fork\\ CoC V4.7& started 6 September 1988 \\ CoC V4.8& dated 1 December 1988 (release time) & \feature{floating universes}\\ CoC V4.8.5& dated 1 February 1989 & \\ CoC V4.9& dated 1 March 1989 (release date)\\ CoC V4.10 and 4.10.1& dated 1 May 1989 & released with documentation in English\\ \end{tabular} \bigskip \noindent Note: CoC above stands as an abbreviation for {\em Calculus of Constructions}, official name of the system. \bigskip \bigskip \newpage \centerline{\large 1989-now: The Calculus of Inductive Constructions} \mbox{}\\ \centerline{I- RCS archives in Caml and Caml-Light} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Coq V5.0 & headers dated 1 January 1990 & internal use \\ & & \feature{inductive types with primitive recursor}\\ Coq V5.1 & ended 12 July 1990 & internal use \\ Coq V5.2 & log dated 4 October 1990 & internal use \\ Coq V5.3 & log dated 12 October 1990 & internal use \\ Coq V5.4 & headers dated 24 October 1990 & internal use, new \feature{extraction} (version 1) [3-12-90]\\ Coq V5.5 & started 6 December 1990 & internal use \\ Coq V5.6 beta & 1991 & first announce of the new Coq based on CIC \\ & & (in May at TYPES?)\\ & & \feature{rewrite tactic}\\ & & use of RCS at least from February 1991\\ Coq V5.6& 7 August 1991 & \\ Coq V5.6 patch 1& 13 November 1991 & \\ Coq V5.6 (last) & mention of 27 November 1992\\ Coq V5.7.0& 1992 & translation to Caml-Light \footnotemark\\ Coq V5.8& 12 February 1993 & \feature{Program} (version 1), \feature{simpl}\\ & & has the xcoq graphical interface\\ & & first explicit notion of standard library\\ & & includes a MacOS 7-9 version\\ Coq V5.8.1& released 28 April 1993 & with xcoq graphical interface and MacOS 7-9 support\\ Coq V5.8.2& released 9 July 1993 & with xcoq graphical interface and MacOS 7-9 support\\ Coq V5.8.3& released 6 December 1993 % Announce on coq-club & with xcoq graphical interface and MacOS 7-9 support\\ & & 3 branches: Lyon (V5.8.x), Ulm (V5.10.x) and Rocq (V5.9)\\ Coq V5.9 alpha& 7 July 1993 & experimental version based on evars refinement \\ & & (merge from experimental ``V6.0'' and some pre-V5.8.3 \\ & & version), not released\\ & March 1994 & \feature{tauto} tactic in V5.9 branch\\ Coq V5.9 & 27 January 1993 & experimental version based on evars refinement\\ & & not released\\ \end{tabular} \bigskip \bigskip \footnotetext{archive lost?} \newpage \centerline{II- Starting with CVS archives in Caml-Light} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Coq V5.10 ``Murthy'' & 22 January 1994 & introduction of the ``DOPN'' structure\\ & & \feature{eapply/prolog} tactics\\ & & private use of cvs on madiran.inria.fr\\ Coq V5.10.1 ``Murthy''& 15 April 1994 \\ Coq V5.10.2 ``Murthy''& 19 April 1994 & \feature{mutual inductive types, fixpoint} (from Lyon's branch)\\ Coq V5.10.3& 28 April 1994 \\ Coq V5.10.5& dated 13 May 1994 & \feature{inversion}, \feature{discriminate}, \feature{injection} \\ & & \feature{type synthesis of hidden arguments}\\ & & \feature{separate compilation}, \feature{reset mechanism} \\ Coq V5.10.6& dated 30 May 1994\\ Coq Lyon's archive & in 1994 & cvs server set up on woodstock.ens-lyon.fr\\ Coq V5.10.9& announced on 17 August 1994 & % Announced by Catherine Parent on coqdev % Version avec une copie de THEORIES pour les inductifs mutuels \\ Coq V5.10.11& announced on 2 February 1995 & \feature{compute}\\ Coq Rocq's archive & on 16 February 1995 & set up of ``V5.10'' cvs archive on pauillac.inria.fr \\ & & with first dispatch of files over src/* directories\\ Coq V5.10.12& dated 30 January 1995 & on Lyon's cvs\\ Coq V5.10.13& dated 9 June 1995 & on Lyon's cvs\\ Coq V5.10.14.OO& dated 30 June 1995 & on Lyon's cvs\\ Coq V5.10.14.a& announced 5 September 1995 & bug-fix release \\ % Announce on coq-club by BW Coq V5.10.14.b& released 2 October 1995 & bug-fix release\\ & & MS-DOS version released on 30 October 1995\\ % still available at ftp://ftp.ens-lyon.fr/pub/LIP/COQ/V5.10.14.old/ in May 2009 % also known in /net/pauillac/constr archive as ``V5.11 old'' \\ % A copy of Coq V5.10.15 dated 1 January 1996 coming from Lyon's CVS is % known in /net/pauillac/constr archive as ``V5.11 new old'' \\ Coq V5.10.15 & released 20 February 1996 & \feature{Logic, Sorting, new Sets and Relations libraries} \\ % Announce on coq-club by BW % dated 15 February 1996 and bound to pauillac's cvs in /net/pauillac/constr archive & & MacOS 7-9 version released on 1 March 1996 \\ % Announce on coq-club by BW Coq V5.11 & dated 1 March 1996 & not released, not in pauillac's CVS, \feature{eauto} \\ \end{tabular} \bigskip \bigskip \newpage \centerline{III- A CVS archive in Caml Special Light} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Coq ``V6'' archive & 20 March 1996 & new cvs repository on pauillac.inria.fr with code ported \\ & & to Caml Special Light (to later become Objective Caml)\\ & & has implicit arguments and coercions\\ & & has coinductive types\\ Coq V6.1beta& released 18 November 1996 & \feature{coercions} [23-5-1996], \feature{user-level implicit arguments} [23-5-1996]\\ & & \feature{omega} [10-9-1996] \\ & & \feature{natural language proof printing} (stopped from Coq V7) [6-9-1996]\\ & & \feature{pattern-matching compilation} [7-10-1996]\\ & & \feature{ring} (version 1, ACSimpl) [11-12-1996]\\ Coq V6.1& released December 1996 & \\ Coq V6.2beta& released 30 January 1998 & % Announced on coq-club 2-2-1998 by CP \feature{SearchIsos} (stopped from Coq V7) [9-11-1997]\\ & & grammar extension mechanism moved to Camlp4 [12-6-1997]\\ & & \feature{refine tactic}\\ & & includes a Windows version\\ Coq V6.2& released 4 May 1998 & % Announced on coq-club 5-5-1998 by CP \feature{ring} (version 2) [7-4-1998] \\ Coq V6.2.1& released 23 July 1998\\ Coq V6.2.2 beta& released 30 January 1998\\ Coq V6.2.2& released 23 September 1998\\ Coq V6.2.3& released 22 December 1998 & \feature{Real numbers library} [from 13-11-1998] \\ Coq V6.2.4& released 8 February 1999\\ Coq V6.3& released 27 July 1999 & \feature{autorewrite} [25-3-1999]\\ & & \feature{Correctness} (deprecated in V8, led to Why) [28-10-1997]\\ Coq V6.3.1& released 7 December 1999\\ \end{tabular} \medskip \bigskip \newpage \centerline{IV- New CVS, back to a kernel-centric implementation} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Coq ``V7'' archive & August 1999 & new cvs archive based on J.-C. Filliâtre's \\ & & \feature{kernel-centric} architecture \\ & & more care for outside readers\\ & & (indentation, ocaml warning protection)\\ Coq V7.0beta& released 27 December 2000 & \feature{${\mathcal{L}}_{\mathit{tac}}$} \\ Coq V7.0beta2& released 2 February 2001\\ Coq V7.0& released 25 April 2001 & \feature{extraction} (version 2) [6-2-2001] \\ & & \feature{field} (version 1) [19-4-2001], \feature{fourier} [20-4-2001] \\ Coq V7.1& released 25 September 2001 & \feature{setoid rewriting} (version 1) [10-7-2001]\\ Coq V7.2& released 10 January 2002\\ Coq V7.3& released 16 May 2002\\ Coq V7.3.1& released 5 October 2002 & \feature{module system} [2-8-2002]\\ & & \feature{pattern-matching compilation} (version 2) [13-6-2002]\\ Coq V7.4& released 6 February 2003 & \feature{notation}, \feature{scopes} [13-10-2002]\\ \end{tabular} \medskip \bigskip \centerline{V- New concrete syntax} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Coq V8.0& released 21 April 2004 & \feature{new concrete syntax}, \feature{Set predicative}, \feature{CoqIDE} [from 4-2-2003]\\ Coq V8.0pl1& released 18 July 2004\\ Coq V8.0pl2& released 22 January 2005\\ Coq V8.0pl3& released 13 January 2006\\ Coq V8.0pl4& released 26 January 2007\\ Coq ``svn'' archive & 6 March 2006 & cvs archive moved to subversion control management\\ Coq V8.1beta& released 12 July 2006 & \feature{bytecode compiler} [20-10-2004] \\ & & \feature{setoid rewriting} (version 2) [3-9-2004]\\ & & \feature{functional induction} [1-2-2006]\\ & & \feature{Strings library} [8-2-2006], \feature{FSets/FMaps library} [15-3-2006] \\ & & \feature{Program} (version 2, Russell) [5-3-2006] \\ & & \feature{declarative language} [20-9-2006]\\ & & \feature{ring} (version 3) [18-11-2005]\\ Coq V8.1gamma& released 7 November 2006 & \feature{field} (version 2) [29-9-2006]\\ Coq V8.1& released 10 February 2007 & \\ Coq V8.1pl1& released 27 July 2007 & \\ Coq V8.1pl2& released 13 October 2007 & \\ Coq V8.1pl3& released 13 December 2007 & \\ Coq V8.1pl4& released 9 October 2008 & \\ Coq V8.2 beta1& released 13 June 2008 & \\ Coq V8.2 beta2& released 19 June 2008 & \\ Coq V8.2 beta3& released 27 June 2008 & \\ Coq V8.2 beta4& released 8 August 2008 & \\ Coq V8.2 & released 17 February 2009 & \feature{type classes} [10-12-2007], \feature{machine words} [11-5-2007]\\ & & \feature{big integers} [11-5-2007], \feature{abstract arithmetics} [9-2007]\\ & & \feature{setoid rewriting} (version 3) [18-12-2007] \\ & & \feature{micromega solving platform} [19-5-2008]\\ & & a first package released on February 11 was incomplete\\ Coq V8.2pl1& released 4 July 2009 & \\ Coq V8.2pl2& released 29 June 2010 & \\ \end{tabular} \medskip \bigskip \newpage \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} Coq V8.3 beta & released 16 February 2010 & \feature{MSets library} [13-10-2009] \\ Coq V8.3 & released 14 October 2010 & \feature{nsatz} [3-6-2010] \\ Coq V8.3pl1& released 23 December 2010 & \\ Coq V8.3pl2& released 19 April 2011 & \\ Coq V8.3pl3& released 19 December 2011 & \\ Coq V8.3pl3& released 26 March 2012 & \\ Coq V8.3pl5& released 28 September 2012 & \\ Coq V8.4 beta & released 27 December 2011 & \feature{modular arithmetic library} [2010-2012]\\ && \feature{vector library} [10-12-2010]\\ && \feature{structured scripts} [22-4-2010]\\ && \feature{eta-conversion} [20-9-2010]\\ && \feature{new proof engine available} [10-12-2010]\\ Coq V8.4 beta2 & released 21 May 2012 & \\ Coq V8.4 & released 12 August 2012 &\\ Coq V8.4pl1& released 22 December 2012 & \\ Coq V8.4pl2& released 4 April 2013 & \\ Coq V8.4pl3& released 21 December 2013 & \\ Coq V8.4pl4& released 24 April 2014 & \\ Coq V8.4pl5& released 22 October 2014 & \\ Coq V8.4pl6& released 9 April 2015 & \\ Coq V8.5 beta1 & released 21 January 2015 & \feature{computation via compilation to OCaml} [22-1-2013]\\ && \feature{asynchronous evaluation} [8-8-2013]\\ && \feature{new proof engine deployed} [2-11-2013]\\ && \feature{universe polymorphism} [6-5-2014]\\ && \feature{primitive projections} [6-5-2014]\\ && \feature{miscellaneous optimizations}\\ Coq V8.5 beta2 & released 22 April 2015 & \feature{MMaps library} [4-3-2015]\\ Coq V8.5 & released 22 January 2016 & \\ Coq V8.6 beta 1 & released 19 November 2016 & \feature{irrefutable patterns} [15-2-2016]\\ && \feature{Ltac profiling} [14-6-2016]\\ && \feature{warning system} [29-6-2016]\\ && \feature{miscellaneous optimizations}\\ Coq V8.6 & released 14 December 2016 & \\ Coq V8.7 beta 1 & released 6 September 2017 & \feature{bundled with Ssreflect plugin} [6-6-2017]\\ && \feature{cumulative polymorphic inductive types} [19-6-2017]\\ && \feature{further optimizations}\\ Coq V8.7 beta 2 & released 6 October 2017 & \\ Coq V8.7.0 & released 18 October 2017 & \\ Coq V8.7.1 & released 15 December 2017 & \\ Coq V8.7.2 & released 17 February 2018 & \\ Coq V8.8 beta1 & released 19 March 2018 & \\ Coq V8.8.0 & released 17 April 2018 & \feature{reference manual moved to Sphinx} \\ && \feature{effort towards better documented, better structured ML API}\\ && \feature{miscellaneous changes/improvements of existing features}\\ \end{tabular} \medskip \bigskip \newpage \centerline{\large Other important dates} \mbox{}\\ \mbox{}\\ \begin{tabular}{l|l|l} version & date & comments \\ \hline Lechenadec's version in C& mention of \\ & 13 January 1985 on \\ & some vernacular files\\ Set up of the coq-club mailing list & 28 July 1993\\ Coq V6.0 ``evars'' & & experimentation based on evars refinement started \\ & & in 1991 by Gilles from V5.6 beta,\\ & & with work by Hugo in July 1992\\ Coq V6.0 ``evars'' ``light'' & July 1993 & Hugo's port of the first evars-based experimentation \\ & & to Coq V5.7, version from October/November 1992\\ CtCoq & released 25 October 1995 & first beta-version \\ % Announce on coq-club by Janet Proto with explicit substitutions & 1997 &\\ Coq web site & 15 April 1998 & new site designed by David Delahaye \\ Coq web site & January 2004 & web site new style \\ & & designed by Julien Narboux and Florent Kirchner \\ Coq web site & April 2009 & new Drupal-based site \\ & & designed by Jean-Marc Notin and Denis Cousineau \\ \end{tabular} \end{document} coq-8.15.0/dev/doc/archive/whodidwhat/000077500000000000000000000000001417001151100174645ustar00rootroot00000000000000coq-8.15.0/dev/doc/archive/whodidwhat/whodidwhat-8.2update.tex000066400000000000000000000316601417001151100240660ustar00rootroot00000000000000\documentclass{article} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage{t1enc} \begin{document} \title{Who did what in the Coq archive?} \author{The Coq development team} \maketitle \centerline{(updated for Coq 8.2)} \section{The Calculus of Inductive Constructions} \begin{itemize} \item The Calculus of Constructions \begin{itemize} \item Core type-checker: Gérard Huet and Thierry Coquand with optimizations by Chet Murthy, Bruno Barras \item Head reduction functions: Gérard Huet, Christine Paulin, Bruno Barras \end{itemize} \item Conversion and reduction \begin{itemize} \item Lazy conversion machine: Bruno Barras \item Transparency/opacity: Bruno Barras \item Bytecode-based conversion: Benjamin Grégoire \item Binary-words retroknowledge: Arnaud Spiwack \end{itemize} \item The universe hierarchy \begin{itemize} \item Floating universes: Gérard Huet, with contributions from Bruno Barras \item Algebraic universes: Hugo Herbelin \end{itemize} \item Mutual inductive types and recursive definitions \begin{itemize} \item Type-checking: Christine Paulin \item Positivity condition: Christine Paulin \item Guardness condition for fixpoints: Christine Paulin; extensions by Eduardo Gimenez and Bruno Barras \item Recursively non-uniform parameters: Christine Paulin \item Sort-polymorphism of inductive types: Hugo Herbelin \end{itemize} \item Local definitions: Hugo Herbelin \item Mutual coinductive types and corecursive definitions: Eduardo Gimenez \item Module system \begin{itemize} \item Core system: Jacek Chrz\k{a}szcz \item Inlining: Claudio Sacerdoti Coen and Élie Soubiran \item Module inclusion: Élie Soubiran \item Functorial signature application: Élie Soubiran \item Transparent name space: Élie Soubiran \item Resolution of qualified names: Hugo Herbelin \end{itemize} \item Minimalist stand-alone type-checker (\texttt{coqchk}): Bruno Barras \end{itemize} \section{Specification language} \begin{itemize} \item Sections: Gilles Dowek with extra contributions by Gérard Huet, Chet Murthy, Hugo Herbelin \item The \texttt{Russell} specifications language, proof obligations (\texttt{Program}): Matthieu Sozeau \item Type inference: Chet Murthy, with extra contributions by Bruno Barras, Hugo Herbelin and Matthieu Sozeau \item Pattern-matching: Hugo Herbelin on top of a first version by Cristina Cornes \item Implicit arguments: Amokrane Saïbi, with extensions by Hugo Herbelin and Matthieu Sozeau \item Coercions: Amokrane Saïbi \item Records: Amokrane Saïbi with extensions by Arnaud Spiwack and Matthieu Sozeau \item Canonical structures: Amokrane Saïbi \item Type classes: Matthieu Sozeau \item Functional schemes (\texttt{Function}, \texttt{Functional Scheme}, ...): Julien Forest and Pierre Courtieu (preliminary version by Yves Bertot) \item Generation of induction schemes: Christine Paulin, Vincent Siles, Matthieu Sozeau \end{itemize} \section{Tactics} \subsection{General tactic support} \begin{itemize} \item Proof engine: Chet Murthy (first version by Thierry Coquand) \item Ltac: David Delahaye, with extensions by Hugo Herbelin, Bruno Barras, ... \item Tactic notations: Hugo Herbelin (first version by Chet Murthy) \item Main tactic unification procedure: Chet Murthy with contributions from Hugo Herbelin and Matthieu Sozeau \item Mathematical-style language (C-Zar): Pierre Corbineau \item Communication with external tools (\texttt{external}): Hugo Herbelin \end{itemize} \subsection{Predefined tactics} \begin{itemize} \item Basic tactics (\texttt{intro}, \texttt{apply}, \texttt{assumption}, \texttt{exact}): Thierry Coquand, with further collective extensions \item Reduction tactics: Christine Paulin (\texttt{simpl}), Bruno Barras (\texttt{cbv}, \texttt{lazy}), ... \item Tacticals: Thierry Coquand, Chet Murthy, Eduardo Gimenez, ... \item Induction: Christine Paulin (\texttt{elim}, \texttt{case}), Hugo Herbelin (\texttt{induction}, \texttt{destruct} \item Refinement (\texttt{refine}): Jean-Christophe Filliâtre \item Introduction patterns: Eduardo Gimenez with collective extensions \item Forward reasoning: Hugo Herbelin (\texttt{assert}, \texttt{apply in}), Pierre Letouzey (\texttt{specialize}, initial version by Amy Felty) \item Rewriting tactics (\texttt{rewrite}): basic version by Christine Paulin, extensions by Jean-Christophe Filliâtre and Pierre Letouzey \item Tactics about equivalence properties (\texttt{reflexivity}, \texttt{symmetry}, \texttt{transitivity}): Christine Paulin (?), \item Equality tactics (\texttt{injection}/\texttt{discriminate}): Cristina Cornes \item Inversion tactics (\texttt{inversion}): Cristina Cornes, Chet Murthy \item Setoid rewriting: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen), contributions from Nicolas Tabareau \item Decision of equality: Eduardo Gimenez \item Basic Ltac-level tactics: Pierre Letouzey, Matthieu Sozeau, Evgeny Makarov \end{itemize} \subsection{General automation tactics} \begin{itemize} \item Resolution (\texttt{auto}, \texttt{trivial}): Christine Paulin with extensions from Chet Murthy, Eduardo Gimenez, Patrick Loiseleur (hint bases), Matthieu Sozeau \item Resolution with existential variables (\texttt{eauto}): Chet Murthy, Jean-Christophe Filliâtre, with extensions from Matthieu Sozeau \item Automatic rewriting (\texttt{autorewrite}): David Delahaye \end{itemize} \subsection{Domain-specific decision tactics} \begin{itemize} \item Congruence closure (\texttt{cc}): Pierre Corbineau \item Decision of first-order logic (\texttt{firstorder}): Pierre Corbineau \item Simplification of polynomial fractions (\texttt{field}): Laurent Théry and Benjamin Grégoire (first version by David Delahaye and Micaela Mayero) \item Simplification of polynomial expressions (\texttt{ring}): Assia Mahboubi, Bruno Barras and Benjamin Grégoire (first version by Samuel Boutin, second version by Patrick Loiseleur) \item Decision of systems of linear inequations: Frédéric Besson (\texttt{psatzl}); Loïc Pottier (\texttt{fourier}) \item Decision of systems of linear inequations over integers: Frédéric Besson (\texttt{lia}); Pierre Crégut (\texttt{omega} and \texttt{romega}) \item (Partial) decision of systems of polynomical inequations (\texttt{sos}, \texttt{psatz}): Frédéric Besson, with generalization over arbitrary rings by Evgeny Makarov; uses HOL-Light interface to \texttt{csdp} by John Harrisson \item Decision/simplification of intuitionistic propositional logic: David Delahaye (\texttt{tauto}, \texttt{intuition}, first version by Cesar Mu\~noz, second version by Chet Murthy), with contributions from Judicaël Courant; Pierre Corbineau (\texttt{rtauto}) \item Decision/simplification of intuition first-order logic: Pierre Corbineau (\texttt{firstorder}) \end{itemize} \section{Extra tools} \begin{itemize} \item Program extraction: Pierre Letouzey (first implementation by Benjamin Werner, second by Jean-Christophe Filliâtre) \item Export of context to external communication tools (\texttt{dp}): Nicolas Ayache and Jean-Christophe Filliâtre, with contributions by Claude Marché \item Export of terms and environments to XML format: Claudio Sacerdoti Coen, with extensions from Cezary Kaliszyk \end{itemize} \section{Environment management} \begin{itemize} \item Separate compilation: initiated by Chet Murthy \item Import/Export: initiated by Chet Murthy \item Options management: Hugo Herbelin with contributions by Arnaud Spiwack \item Resetting and backtracking: Chet Murthy with contributions from Pierre Courtieu \item Searching: Hugo Herbelin, Yves Bertot \item Whelp support: Hugo Herbelin \end{itemize} \section{Parsing and printing} \begin{itemize} \item General parsing support: Chet Murthy, Bruno Barras, Daniel de Rauglaudre \item General printing support: Chet Murthy, Jean-Christophe Filliâtre \item Lexing: Daniel de Rauglaudre \item Support for UTF-8: Hugo Herbelin, with contributions from Alexandre Miquel \item Numerical notations: Hugo Herbelin, Patrick Loiseleur, Micaela Mayero \item String notations: Hugo Herbelin \item New ``V8'' syntax: Bruno Barras, Hugo Herbelin with contributions by Olivier Desmettre \item Abbreviations: Chet Murthy \item Notations: Chet Murthy, Hugo Herbelin \end{itemize} \section{Libraries} \begin{itemize} \item Init: collective (initiated by Christine Paulin and Gérard Huet) \item Arith: collective (initiated by Christine Paulin) \item ZArith: collective (initiated by Pierre Crégut) \item Bool: collective (initiated by Christine Paulin) \item NArith: Hugo Herbelin, Pierre Letouzey, Evgeny Makarov (out of initial contibution by Pierre Crégut) \item Lists: Pierre Letouzey, Jean-Marc Notin (initiated by Christine Paulin) \item Reals: Micaela Mayero (axiomatization and main properties), Olivier Desmettre (convergence, derivability, integrals, trigonometric functions), contributions from Russell O'Connor and Cezary Kaliszyk \item Relations: Bruno Barras, Cristina Cornes with contributions from Pierre Castéran \item Wellfounded: Bruno Barras, Cristina Cornes \item FSets: Pierre Letouzey, from initial work with Jean-Christophe Filliâtre, decision tactic for FSets by Aaron Bohannon \item Logic: Christine Paulin, Hugo Herbelin, Bruno Barras \item Numbers: Evgeny Makarov (abstractions), Laurent Théry and Benjamin Grégoire (big numbers), Arnaud Spiwack and Pierre Letouzey (word-based arithmetic) \item Classes: Matthieu Sozeau \item QArith: Pierre Letouzey, with contributions from Russell O'Connor \item Setoid: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen) \item Sets: Gilles Kahn and Gérard Huet \item Sorting: Gérard Huet \item Strings: Laurent Théry \item Program: Matthieu Sozeau \item Unicode: Claude Marché \end{itemize} \section{Commands} \begin{itemize} \item Batch compiler (\texttt{coqc}): Chet Murthy (?) \item Compilation dependency calculator (\texttt{coqdep}): Jean-Christophe Filliâtre \item Statistic tool (\texttt{coqwc}): Jean-Christophe Filliâtre \item Simple html presentation tool (\texttt{gallina}) (deprecated): Jean-Christophe Filliâtre \item Auto-maker (\texttt{coq\_makefile}): Jean-Christophe Filliâtre, with contributions from Judicaël Courant \item LaTeX presentation tool (\texttt{coq-tex}): Jean-Christophe Filliâtre \item Multi-purpose presentation tool (\texttt{coqdoc}): Jean-Christophe Filliâtre with extensions from Matthieu Sozeau, Jean-Marc Notin, Hugo Herbelin \item Interactive toplevel (\texttt{coqtop}): Jean-Christophe Filliâtre (?) \item Custom toplevel builder (\texttt{coqmktop}): Jean-Christophe Filliâtre (?) \end{itemize} \section{Graphical interfaces} \begin{itemize} \item Support for {\em PCoq}: Yves Bertot with contributions by Laurence Rideau and Loïc Pottier; additional support for {\em TmEgg} by Lionel Mamane \item Support for {\em Proof General}: Pierre Courtieu \item {\em CoqIDE}: Benjamin Monate with contributions from Jean-Christophe Filliâtre, Claude Marché, Pierre Letouzey, Julien Narboux, Hugo Herbelin, Pierre Corbineau; uses the Cameleon library by Maxence Guesdon \end{itemize} \section{Architecture} \begin{itemize} \item Functional-kernel-based architecture: Jean-Christophe Filliâtre \item Extensible objects and summaries: Chet Murthy \item Hash-consing: Bruno Barras \item Error locations: Jean-Christophe Filliâtre, Bruno Barras, Hugo Herbelin \item Existential variables engine: Chet Murthy with a revision by Bruno Barras and extensions by Clément Renard and Hugo Herbelin \end{itemize} \section{Development tools} \begin{itemize} \item Makefile's: Chet Murthy, Jean-Christophe Filliâtre, Judicaël Courant, Lionel Mamane, Pierre Corbineau, Pierre Letouzey \item Debugging: Jean-Christophe Filliâtre with contributions from Jacek Chrz\k{a}szcz, Hugo Herbelin, Bruno Barras, ... \item ML quotations: David Delahaye and Daniel de Rauglaudre \item ML tactic and vernacular extensions: Hugo Herbelin (first version by Chet Murthy) \item Test suite: collective content, initiated by Jean-Christophe Filliâtre with further extensions by Hugo Herbelin, Jean-Marc Notin \end{itemize} \section{Documentation} \begin{itemize} \item Reference Manual: collective, layout by Patrick Loiseleur, Claude Marché (former User's Guide in 1991 by Gilles Dowek, Amy Felty, Hugo Herbelin, Gérard Huet, Christine Paulin, Benjamin Werner; initial documentation in 1989 by Thierry Coquand, Gilles Dowek, Gérard Huet, Christine Paulin), \item Basic tutorial: Gérard Huet, Gilles Kahn, Christine Paulin \item Tutorial on recursive types: Eduardo Gimenez with updates by Pierre Castéran \item FAQ: Hugo Herbelin, Julien Narboux, Florent Kirchner \end{itemize} \section{Features discontinued by lack of support} \begin{itemize} \item Searching modulo isomorphism: David Delahaye \item Explanation of proofs in pseudo-natural language: Yann Coscoy \end{itemize} Errors may have been inopportunely introduced, please report them to Hugo~\verb=.=~Herbelin~\verb=@=~inria~\verb=.=~fr \end{document} coq-8.15.0/dev/doc/archive/whodidwhat/whodidwhat-8.3update.tex000066400000000000000000000333731417001151100240720ustar00rootroot00000000000000\documentclass{article} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage{t1enc} \begin{document} \title{Who did what in the Coq archive?} \author{The Coq development team} \maketitle \centerline{(updated for Coq 8.3)} \section{The Calculus of Inductive Constructions} \begin{itemize} \item The Calculus of Constructions \begin{itemize} \item Core type-checker: Gérard Huet and Thierry Coquand with optimizations by Chet Murthy, Bruno Barras \item Head reduction functions: Gérard Huet, Christine Paulin, Bruno Barras \end{itemize} \item Conversion and reduction \begin{itemize} \item Lazy conversion machine: Bruno Barras \item Transparency/opacity: Bruno Barras \item Bytecode-based conversion: Benjamin Grégoire \item Binary-words retroknowledge: Arnaud Spiwack \end{itemize} \item The universe hierarchy \begin{itemize} \item Floating universes: Gérard Huet, with contributions from Bruno Barras \item Algebraic universes: Hugo Herbelin \end{itemize} \item Mutual inductive types and recursive definitions \begin{itemize} \item Type-checking: Christine Paulin \item Positivity condition: Christine Paulin \item Guardness condition for fixpoints: Christine Paulin; extensions by Eduardo Gimenez and Bruno Barras \item Recursively non-uniform parameters: Christine Paulin \item Sort-polymorphism of inductive types: Hugo Herbelin \end{itemize} \item Local definitions: Hugo Herbelin \item Mutual coinductive types and corecursive definitions: Eduardo Gimenez \item Module system \begin{itemize} \item Core system: Jacek Chrz\k{a}szcz \item Inlining: Claudio Sacerdoti Coen and Élie Soubiran \item Module inclusion: Élie Soubiran \item Functorial signature application: Élie Soubiran \item Transparent name space: Élie Soubiran \item Resolution of qualified names: Hugo Herbelin \item Operator for nested functor application: Élie Soubiran and Pierre Letouzey \end{itemize} \item Minimalist stand-alone type-checker (\texttt{coqchk}): Bruno Barras, with extra support for modules by Élie Soubiran \end{itemize} \section{Specification language} \begin{itemize} \item Sections: Gilles Dowek with extra contributions by Gérard Huet, Chet Murthy, Hugo Herbelin \item The \texttt{Russell} specifications language, proof obligations (\texttt{Program}): Matthieu Sozeau \item Type inference: Chet Murthy, with extra contributions by Bruno Barras, Hugo Herbelin and Matthieu Sozeau \item Pattern-matching: Hugo Herbelin on top of a first version by Cristina Cornes \item Implicit arguments: Amokrane Saïbi, with extensions by Hugo Herbelin and Matthieu Sozeau \item Coercions: Amokrane Saïbi \item Records: Amokrane Saïbi with extensions by Arnaud Spiwack and Matthieu Sozeau \item Canonical structures: Amokrane Saïbi \item Type classes: Matthieu Sozeau \item Functional schemes (\texttt{Function}, \texttt{Functional Scheme}, ...): Julien Forest and Pierre Courtieu (preliminary version by Yves Bertot) \item Generation of induction schemes: Christine Paulin, Vincent Siles, Matthieu Sozeau \end{itemize} \section{Tactics} \subsection{General tactic support} \begin{itemize} \item Proof engine: Chet Murthy (first version by Thierry Coquand) \item Ltac: David Delahaye, with extensions by Hugo Herbelin, Bruno Barras, ... \item Tactic notations: Hugo Herbelin (first version by Chet Murthy) \item Main tactic unification procedure: Chet Murthy with contributions from Hugo Herbelin and Matthieu Sozeau \item Mathematical-style language (C-Zar): Pierre Corbineau \item Communication with external tools (\texttt{external}): Hugo Herbelin \end{itemize} \subsection{Predefined tactics} \begin{itemize} \item Basic tactics (\texttt{intro}, \texttt{apply}, \texttt{assumption}, \texttt{exact}): Thierry Coquand, with further collective extensions \item Reduction tactics: Christine Paulin (\texttt{simpl}), Bruno Barras (\texttt{cbv}, \texttt{lazy}), ... \item Tacticals: Thierry Coquand, Chet Murthy, Eduardo Gimenez, ... \item Induction: Christine Paulin (\texttt{elim}, \texttt{case}), Hugo Herbelin (\texttt{induction}, \texttt{destruct}, {\tt e}-variants of them), Matthieu Sozeau ({\tt dependent destruction}, {\tt dependent induction}) \item Refinement (\texttt{refine}): Jean-Christophe Filliâtre \item Introduction patterns: Eduardo Gimenez with collective extensions \item Forward reasoning: Hugo Herbelin (\texttt{assert}, \texttt{apply in}), Pierre Letouzey (\texttt{specialize}, initial version by Amy Felty) \item Rewriting tactics (\texttt{rewrite}): basic version by Christine Paulin, extensions by Jean-Christophe Filliâtre ({\tt subst}), Pierre Letouzey (\verb=!=, \verb=?= modifiers) and Matthieu Sozeau (\verb=*=) \item Tactics about equivalence properties (\texttt{reflexivity}, \texttt{symmetry}, \texttt{transitivity}): Christine Paulin (?), {\tt e}-variants by Hugo Herbelin, type-classes-based generalization to arbitrary appropriate relations by Matthieu Sozeau \item Equality tactics (\texttt{injection}/\texttt{discriminate}): Cristina Cornes \item Inversion tactics (\texttt{inversion}): Cristina Cornes, Chet Murthy \item Setoid rewriting: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen), contributions from Nicolas Tabareau \item Decision of equality: Eduardo Gimenez \item Basic Ltac-level tactics: Pierre Letouzey, Matthieu Sozeau, Evgeny Makarov, Hugo Herbelin \end{itemize} \subsection{General automation tactics} \begin{itemize} \item Resolution (\texttt{auto}, \texttt{trivial}): Christine Paulin with extensions from Chet Murthy, Eduardo Gimenez, Patrick Loiseleur (hint bases), Matthieu Sozeau \item Resolution with existential variables (\texttt{eauto}): Chet Murthy, Jean-Christophe Filliâtre, with extensions from Matthieu Sozeau \item Automatic rewriting (\texttt{autorewrite}): David Delahaye \end{itemize} \subsection{Domain-specific decision tactics} \begin{itemize} \item Congruence closure (\texttt{cc}): Pierre Corbineau \item Decision of first-order logic (\texttt{firstorder}): Pierre Corbineau \item Simplification of polynomial fractions (\texttt{field}): Laurent Théry and Benjamin Grégoire (first version by David Delahaye and Micaela Mayero) \item Simplification of polynomial expressions (\texttt{ring}): Assia Mahboubi, Bruno Barras and Benjamin Grégoire (first version by Samuel Boutin, second version by Patrick Loiseleur) \item Decision of systems of polynomial equations: Loïc Pottier (\texttt{nsatz}) \item Decision of systems of linear inequations: Frédéric Besson (\texttt{psatzl}); Loïc Pottier (\texttt{fourier}) \item Decision of systems of linear inequations over integers: Frédéric Besson (\texttt{lia}); Pierre Crégut (\texttt{omega} and \texttt{romega}) \item (Partial) decision of systems of polynomical inequations (\texttt{sos}, \texttt{psatz}): Frédéric Besson, with generalization over arbitrary rings by Evgeny Makarov; uses HOL-Light interface to \texttt{csdp} by John Harrisson \item Decision/simplification of intuitionistic propositional logic: David Delahaye (\texttt{tauto}, \texttt{intuition}, first version by Cesar Mu\~noz, second version by Chet Murthy), with contributions from Judicaël Courant; Pierre Corbineau (\texttt{rtauto}) \item Decision/simplification of intuition first-order logic: Pierre Corbineau (\texttt{firstorder}) \item Reification ({\tt quote}): Patrick Loiseleur, with generalization by Stéphane Glondu \end{itemize} \section{Extra tools} \begin{itemize} \item Program extraction: Pierre Letouzey (first implementation by Benjamin Werner, second by Jean-Christophe Filliâtre) \item Export of context to external communication tools (\texttt{dp}): Nicolas Ayache and Jean-Christophe Filliâtre, with contributions by Claude Marché \item Export of terms and environments to XML format: Claudio Sacerdoti Coen, with extensions from Cezary Kaliszyk \end{itemize} \section{Environment management} \begin{itemize} \item Separate compilation: initiated by Chet Murthy \item Import/Export: initiated by Chet Murthy \item Options management: Hugo Herbelin with contributions by Arnaud Spiwack \item Resetting and backtracking: Chet Murthy with contributions from Pierre Courtieu \item Searching: Hugo Herbelin and Yves Bertot with extensions by Matthias Puech \item Whelp support: Hugo Herbelin \end{itemize} \section{Parsing and printing} \begin{itemize} \item General parsing support: Chet Murthy, Bruno Barras, Daniel de Rauglaudre \item General printing support: Chet Murthy, Jean-Christophe Filliâtre \item Lexing: Daniel de Rauglaudre \item Support for UTF-8: Hugo Herbelin, with contributions from Alexandre Miquel and Yann Régis-Gianas \item Numerical notations: Hugo Herbelin, Patrick Loiseleur, Micaela Mayero \item String notations: Hugo Herbelin \item New ``V8'' syntax: Bruno Barras, Hugo Herbelin with contributions by Olivier Desmettre \item Abbreviations: Chet Murthy \item Notations: Chet Murthy, Hugo Herbelin \end{itemize} \section{Libraries} \begin{itemize} \item Init: collective (initiated by Christine Paulin and Gérard Huet) \item Arith: collective (initiated by Christine Paulin) \item ZArith: collective (initiated by Pierre Crégut) \item Bool: collective (initiated by Christine Paulin) \item NArith: Hugo Herbelin, Pierre Letouzey, Evgeny Makarov (out of initial contibution by Pierre Crégut) \item Lists: Pierre Letouzey, Jean-Marc Notin (initiated by Christine Paulin) \item Reals: Micaela Mayero (axiomatization and main properties), Olivier Desmettre (convergence, derivability, integrals, trigonometric functions), contributions from Russell O'Connor, Cezary Kaliszyk, Guillaume Melquiond \item Relations: Bruno Barras, Cristina Cornes with contributions from Pierre Castéran \item Wellfounded: Bruno Barras, Cristina Cornes \item FSets: Pierre Letouzey, from initial work with Jean-Christophe Filliâtre, decision tactic for FSets by Aaron Bohannon \item MSets: Pierre Letouzey \item Logic: Christine Paulin, Hugo Herbelin, Bruno Barras \item Numbers: Evgeny Makarov (abstractions), Laurent Théry and Benjamin Grégoire (big numbers), Arnaud Spiwack and Pierre Letouzey (word-based arithmetic), further extensions by Pierre Letouzey \item Classes: Matthieu Sozeau \item QArith: Pierre Letouzey, with contributions from Russell O'Connor \item Setoid: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen) \item Sets: Gilles Kahn and Gérard Huet \item Sorting: Gérard Huet with revisions by Hugo Herbelin \item Strings: Laurent Théry \item Program: Matthieu Sozeau \item Unicode: Claude Marché \end{itemize} \section{Commands} \begin{itemize} \item Batch compiler (\texttt{coqc}): Chet Murthy (?) \item Compilation dependency calculator (\texttt{coqdep}): Jean-Christophe Filliâtre \item Statistic tool (\texttt{coqwc}): Jean-Christophe Filliâtre \item Simple html presentation tool (\texttt{gallina}) (deprecated): Jean-Christophe Filliâtre \item Auto-maker (\texttt{coq\_makefile}): Jean-Christophe Filliâtre, with contributions from Judicaël Courant \item LaTeX presentation tool (\texttt{coq-tex}): Jean-Christophe Filliâtre \item Multi-purpose presentation tool (\texttt{coqdoc}): Jean-Christophe Filliâtre with extensions from Matthieu Sozeau, Jean-Marc Notin, Hugo Herbelin \item Interactive toplevel (\texttt{coqtop}): Jean-Christophe Filliâtre (?) \item Custom toplevel builder (\texttt{coqmktop}): Jean-Christophe Filliâtre (?) \end{itemize} \section{Graphical interfaces} \begin{itemize} \item Support for {\em PCoq}: Yves Bertot with contributions by Laurence Rideau and Loïc Pottier; additional support for {\em TmEgg} by Lionel Mamane \item Support for {\em Proof General}: Pierre Courtieu \item {\em CoqIDE}: Benjamin Monate with contributions from Jean-Christophe Filliâtre, Claude Marché, Pierre Letouzey, Julien Narboux, Hugo Herbelin, Pierre Corbineau, Vincent Gross; uses the Cameleon library by Maxence Guesdon \end{itemize} \section{Architecture} \begin{itemize} \item Functional-kernel-based architecture: Jean-Christophe Filliâtre \item Extensible objects and summaries: Chet Murthy \item Hash-consing: Bruno Barras \item Error locations: Jean-Christophe Filliâtre, Bruno Barras, Hugo Herbelin \item Existential variables engine: Chet Murthy with revisions by Bruno Barras and Arnaud Spiwack and extensions by Clément Renard and Hugo Herbelin \end{itemize} \section{Development tools} \begin{itemize} \item Makefile's: Chet Murthy, Jean-Christophe Filliâtre, Judicaël Courant, Lionel Mamane, Pierre Corbineau, Pierre Letouzey \item Debugging: Jean-Christophe Filliâtre with contributions from Jacek Chrz\k{a}szcz, Hugo Herbelin, Bruno Barras, ... \item ML quotations: David Delahaye and Daniel de Rauglaudre \item ML tactic and vernacular extensions: Hugo Herbelin (first version by Chet Murthy) \item Test suite: collective content, initiated by Jean-Christophe Filliâtre with further extensions by Hugo Herbelin, Jean-Marc Notin \end{itemize} \section{Documentation} \begin{itemize} \item Reference Manual: collective, layout by Patrick Loiseleur, Claude Marché (former User's Guide in 1991 by Gilles Dowek, Amy Felty, Hugo Herbelin, Gérard Huet, Christine Paulin, Benjamin Werner; initial documentation in 1989 by Thierry Coquand, Gilles Dowek, Gérard Huet, Christine Paulin), \item Basic tutorial: Gérard Huet, Gilles Kahn, Christine Paulin \item Tutorial on recursive types: Eduardo Gimenez with updates by Pierre Castéran \item FAQ: Hugo Herbelin, Julien Narboux, Florent Kirchner \end{itemize} \section{Features discontinued by lack of support} \begin{itemize} \item Searching modulo isomorphism: David Delahaye \item Explanation of proofs in pseudo-natural language: Yann Coscoy \end{itemize} Errors may have been inopportunely introduced, please report them to Hugo~\verb=.=~Herbelin~\verb=@=~inria~\verb=.=~fr \end{document} coq-8.15.0/dev/doc/archive/whodidwhat/whodidwhat-8.4update.tex000066400000000000000000000372261417001151100240740ustar00rootroot00000000000000\documentclass{article} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage{t1enc} \usepackage{hyperref} \begin{document} \title{Who did what in the Coq archive?} \author{The Coq development team} \maketitle \centerline{(updated for Coq 8.4)} \section{The Calculus of Inductive Constructions} \begin{itemize} \item The Calculus of Constructions \begin{itemize} \item Core type-checker: Gérard Huet and Thierry Coquand with optimizations by Chet Murthy, Bruno Barras \item Head reduction functions: Gérard Huet, Christine Paulin, Bruno Barras \end{itemize} \item Conversion and reduction \begin{itemize} \item Lazy conversion machine: Bruno Barras \item Transparency/opacity: Bruno Barras \item Bytecode-based conversion: Benjamin Grégoire \item Binary-words retroknowledge: Arnaud Spiwack \end{itemize} \item The universe hierarchy \begin{itemize} \item Floating universes: Gérard Huet, with contributions from Bruno Barras and Pierre Letouzey \item Algebraic universes: Hugo Herbelin \end{itemize} \item Mutual inductive types and recursive definitions \begin{itemize} \item Type-checking: Christine Paulin \item Positivity condition: Christine Paulin \item Guardness condition for fixpoints: Christine Paulin; extensions by Eduardo Gimenez, Bruno Barras, Pierre Boutillier \item Recursively non-uniform parameters: Christine Paulin \item Sort-polymorphism of inductive types: Hugo Herbelin \end{itemize} \item Local definitions: Hugo Herbelin \item Mutual coinductive types and corecursive definitions: Eduardo Gimenez \item Module system \begin{itemize} \item Core system: Jacek Chrz\k{a}szcz \item Inlining: Claudio Sacerdoti Coen and Élie Soubiran \item Module inclusion: Élie Soubiran \item Functorial signature application: Élie Soubiran \item Transparent name space: Élie Soubiran \item Resolution of qualified names: Hugo Herbelin \item Operator for nested functor application: Élie Soubiran and Pierre Letouzey \end{itemize} \item Minimalist stand-alone type-checker (\texttt{coqchk}): Bruno Barras, with extra support for modules by Élie Soubiran and Pierre Letouzey \item Eta-conversion: Hugo Herbelin, with contributions from Stéphane Glondu, Benjamin Grégoire \end{itemize} \section{Specification language} \begin{itemize} \item Sections: Gilles Dowek with extra contributions by Gérard Huet, Chet Murthy, Hugo Herbelin \item The \texttt{Russell} specifications language, proof obligations (\texttt{Program}): Matthieu Sozeau \item Type inference: Chet Murthy, with extra contributions by Bruno Barras, Hugo Herbelin, Matthieu Sozeau, Enrico Tassi \item Pattern-matching: Hugo Herbelin on top of a first version by Cristina Cornes \item Implicit arguments: Amokrane Saïbi, with extensions by Hugo Herbelin, Matthieu Sozeau, Pierre Boutillier \item Synthetic {\tt Arguments} command: Enrico Tassi \item Coercions: Amokrane Saïbi \item Records: Amokrane Saïbi with extensions by Arnaud Spiwack and Matthieu Sozeau \item Canonical structures: Amokrane Saïbi \item Type classes: Matthieu Sozeau \item Functional schemes (\texttt{Function}, \texttt{Functional Scheme}, ...): Julien Forest and Pierre Courtieu (preliminary version by Yves Bertot) \item Generation of induction schemes: Christine Paulin, Vincent Siles, Matthieu Sozeau \end{itemize} \section{Tactics} \subsection{General tactic support} \begin{itemize} \item Proof engine: Arnaud Spiwack (first version by Thierry Coquand, second version by Chet Murthy) \item Ltac: David Delahaye, with extensions by Hugo Herbelin, Bruno Barras, ... \item Tactic notations: Hugo Herbelin (first version by Chet Murthy) \item Main tactic unification procedure: Chet Murthy with contributions from Hugo Herbelin and Matthieu Sozeau \item Mathematical-style language (C-Zar): Pierre Corbineau \item Communication with external tools (\texttt{external}): Hugo Herbelin \item Proof structuring (bullets and brackets): Arnaud Spiwack \end{itemize} \subsection{Predefined tactics} \begin{itemize} \item Basic tactics (\texttt{intro}, \texttt{apply}, \texttt{assumption}, \texttt{exact}): Thierry Coquand, with further collective extensions \item Reduction tactics: Christine Paulin (\texttt{simpl}), Bruno Barras (\texttt{cbv}, \texttt{lazy}), with contributions from Hugo Herbelin, Enrico Tassi, ... \item Tacticals: Thierry Coquand, Chet Murthy, Eduardo Gimenez, ...; new versions of {\tt info} and {\tt Show Script} by Pierre Letouzey; {\tt timeout} by Pierre Letouzey \item Induction: Christine Paulin (\texttt{elim}, \texttt{case}), Hugo Herbelin (\texttt{induction}, \texttt{destruct} \item Refinement (\texttt{refine}): Jean-Christophe Filliâtre \item Introduction patterns: Eduardo Gimenez with collective extensions \item Forward reasoning: Hugo Herbelin (\texttt{assert}, \texttt{apply in}), Pierre Letouzey (\texttt{specialize}, initial version by Amy Felty) \item Rewriting tactics (\texttt{rewrite}): basic version by Christine Paulin, extensions by Jean-Christophe Filliâtre and Pierre Letouzey \item Tactics about equivalence properties (\texttt{reflexivity}, \texttt{symmetry}, \texttt{transitivity}): Christine Paulin (?), \item Equality tactics (\texttt{injection}/\texttt{discriminate}): Cristina Cornes \item Inversion tactics (\texttt{inversion}): Cristina Cornes, Chet Murthy \item Setoid rewriting: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen), contributions from Nicolas Tabareau \item Decision of equality: Eduardo Gimenez \item Basic Ltac-level tactics: Pierre Letouzey, Matthieu Sozeau, Evgeny Makarov \item Tactics about existential variables: Clément Renard, Pierre Corbineau, Stéphane Glondu, Arnaud Spiwack, ... \end{itemize} \subsection{General automation tactics} \begin{itemize} \item Resolution (\texttt{auto}, \texttt{trivial}): Christine Paulin with extensions from Chet Murthy, Eduardo Gimenez, Patrick Loiseleur (hint bases), Matthieu Sozeau \item Resolution with existential variables (\texttt{eauto}): Chet Murthy, Jean-Christophe Filliâtre, with extensions from Matthieu Sozeau \item Automatic rewriting (\texttt{autorewrite}): David Delahaye \end{itemize} \subsection{Domain-specific decision tactics} \begin{itemize} \item Congruence closure (\texttt{cc}): Pierre Corbineau \item Decision of first-order logic (\texttt{firstorder}): Pierre Corbineau \item Simplification of polynomial fractions (\texttt{field}): Laurent Théry and Benjamin Grégoire (first version by David Delahaye and Micaela Mayero) \item Simplification of polynomial expressions (\texttt{ring}): Assia Mahboubi, Bruno Barras and Benjamin Grégoire (first version by Samuel Boutin, second version by Patrick Loiseleur) \item Decision of systems of polynomial equations: Loïc Pottier (\texttt{nsatz}) \item Decision of systems of linear inequations: Frédéric Besson (\texttt{psatzl}); Loïc Pottier (\texttt{fourier}) \item Decision of systems of linear inequations over integers: Frédéric Besson (\texttt{lia}); Pierre Crégut (\texttt{omega} and \texttt{romega}) \item (Partial) decision of systems of polynomical inequations (\texttt{sos}, \texttt{psatz}): Frédéric Besson, with generalization over arbitrary rings by Evgeny Makarov; uses HOL-Light interface to \texttt{csdp} by John Harrisson \item Decision/simplification of intuitionistic propositional logic: David Delahaye (\texttt{tauto}, \texttt{intuition}, first version by Cesar Mu\~noz, second version by Chet Murthy), with contributions from Judicaël Courant; Pierre Corbineau (\texttt{rtauto}) \item Decision/simplification of intuition first-order logic: Pierre Corbineau (\texttt{firstorder}) \end{itemize} \section{Extra tools} \begin{itemize} \item Program extraction: Pierre Letouzey (first implementation by Benjamin Werner, second by Jean-Christophe Filliâtre) \item Export of context to external communication tools (\texttt{dp}): Nicolas Ayache and Jean-Christophe Filliâtre, with contributions by Claude Marché \item Export of terms and environments to XML format: Claudio Sacerdoti Coen, with extensions from Cezary Kaliszyk \end{itemize} \section{Environment management} \begin{itemize} \item Separate compilation: initiated by Chet Murthy \item Import/Export: initiated by Chet Murthy \item Options management: Hugo Herbelin with contributions by Arnaud Spiwack \item Resetting and backtracking: Chet Murthy with contributions from Pierre Courtieu \item Searching: Hugo Herbelin and Yves Bertot with extensions by Matthias Puech \item Whelp support: Hugo Herbelin \end{itemize} \section{Parsing and printing} \begin{itemize} \item General parsing support: Chet Murthy, Bruno Barras, Daniel de Rauglaudre \item General printing support: Chet Murthy, Jean-Christophe Filliâtre \item Lexing: Daniel de Rauglaudre \item Support for UTF-8: Hugo Herbelin, with contributions from Alexandre Miquel and Yann Régis-Gianas \item Numerical notations: Hugo Herbelin, Patrick Loiseleur, Micaela Mayero \item String notations: Hugo Herbelin \item New ``V8'' syntax: Bruno Barras, Hugo Herbelin with contributions by Olivier Desmettre \item Abbreviations: Chet Murthy \item Notations: Chet Murthy, Hugo Herbelin \end{itemize} \section{Libraries} \begin{itemize} \item Init: collective (initiated by Christine Paulin and Gérard Huet) \item Arith: collective (initiated by Christine Paulin) \item ZArith: collective (initiated by Pierre Crégut) \item Bool: collective (initiated by Christine Paulin) \item NArith: Hugo Herbelin, Pierre Letouzey, Evgeny Makarov (out of initial contibution by Pierre Crégut) \item Lists: Pierre Letouzey, Jean-Marc Notin (initiated by Christine Paulin) \item Vectors: Pierre Boutillier \item Reals: Micaela Mayero (axiomatization and main properties), Olivier Desmettre (convergence, derivability, integrals, trigonometric functions), contributions from Russell O'Connor, Cezary Kaliszyk, Guillaume Melquiond, Yves Bertot, Guillaume Allais \item Relations: Bruno Barras, Cristina Cornes with contributions from Pierre Castéran \item Wellfounded: Bruno Barras, Cristina Cornes \item FSets: Pierre Letouzey, from initial work with Jean-Christophe Filliâtre, decision tactic for FSets by Aaron Bohannon, red-black trees by Andrew Appel and Pierre Letouzey \item MSets: Pierre Letouzey \item Logic: Christine Paulin, Hugo Herbelin, Bruno Barras \item Numbers: Evgeny Makarov (abstractions), Laurent Théry and Benjamin Grégoire (big numbers), Arnaud Spiwack and Pierre Letouzey (word-based arithmetic), further extensions by Pierre Letouzey; integration of Arith and ZArith to Numbers by Pierre Letouzey \item Classes: Matthieu Sozeau \item QArith: Pierre Letouzey, with contributions from Russell O'Connor \item Setoid: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen) \item Sets: Gilles Kahn and Gérard Huet \item Sorting: Gérard Huet with revisions by Hugo Herbelin \item Strings: Laurent Théry \item Program: Matthieu Sozeau \item Unicode: Claude Marché \end{itemize} \section{Commands} \begin{itemize} \item Batch compiler (\texttt{coqc}): Chet Murthy (?) \item Compilation dependency calculator (\texttt{coqdep}): Jean-Christophe Filliâtre \item Statistic tool (\texttt{coqwc}): Jean-Christophe Filliâtre \item Simple html presentation tool (\texttt{gallina}) (deprecated): Jean-Christophe Filliâtre \item Auto-maker (\texttt{coq\_makefile}): Jean-Christophe Filliâtre, with contributions from Judicaël Courant, updated by Pierre Boutillier \item LaTeX presentation tool (\texttt{coq-tex}): Jean-Christophe Filliâtre \item Multi-purpose presentation tool (\texttt{coqdoc}): Jean-Christophe Filliâtre with extensions from Matthieu Sozeau, Jean-Marc Notin, Hugo Herbelin and contributions from Adam Chlipala \item Interactive toplevel (\texttt{coqtop}): Jean-Christophe Filliâtre (?) \item Custom toplevel builder (\texttt{coqmktop}): Jean-Christophe Filliâtre (?) \end{itemize} \section{Graphical interfaces} \begin{itemize} \item Support for {\em PCoq}: Yves Bertot with contributions by Laurence Rideau and Loïc Pottier; additional support for {\em TmEgg} by Lionel Mamane \item Support for {\em Proof General}: Pierre Courtieu with contributions from Arnaud Spiwack \item {\em CoqIDE}: Benjamin Monate with contributions from Jean-Christophe Filliâtre, Claude Marché, Pierre Letouzey, Julien Narboux, Hugo Herbelin, Pierre Corbineau, Pierre Boutillier, Pierre-Marie Pédrot; processus-based communication protocol by Vincent Gross with contributions from Pierre Letouzey, Pierre Boutillier, Pierre-Marie Pédrot; backtracking revised by Pierre Letouzey; uses the Cameleon library by Maxence Guesdon; \end{itemize} \section{Architecture} \begin{itemize} \item Functional-kernel-based architecture: Jean-Christophe Filliâtre \item Extensible objects and summaries: Chet Murthy \item Hash-consing: Bruno Barras \item Error locations: Jean-Christophe Filliâtre, Bruno Barras, Hugo Herbelin, with contributions from Arnaud Spiwack \item Existential variables engine: Chet Murthy with revisions by Bruno Barras and Arnaud Spiwack and extensions by Clément Renard and Hugo Herbelin \end{itemize} \section{Development tools} \begin{itemize} \item Makefile's: Chet Murthy, Jean-Christophe Filliâtre, Judicaël Courant, Lionel Mamane, Pierre Corbineau, Pierre Letouzey with contributions from Stéphane Glondu, Hugo Herbelin, ... \item Debugging: Jean-Christophe Filliâtre with contributions from Jacek Chrz\k{a}szcz, Hugo Herbelin, Bruno Barras, ... \item ML quotations: David Delahaye and Daniel de Rauglaudre \item ML tactic and vernacular extensions: Hugo Herbelin (first version by Chet Murthy) \item Test suite: collective content, initiated by Jean-Christophe Filliâtre with further extensions by Hugo Herbelin, Jean-Marc Notin \end{itemize} \section{Maintenance and system engineering} \begin{itemize} %\item General maintenance in version 8.0: Bruno Barras, Hugo Herbelin %\item General maintenance in version 8.1: Bruno Barras, Hugo Herbelin, Jean-Marc Notin %\item General maintenance in version 8.2: Hugo Herbelin, Pierre Letouzey, Jean-Marc Notin, %\item General maintenance in version 8.3: Hugo Herbelin, Pierre % Letouzey \item General maintenance in version 8.4: Pierre Letouzey, Hugo Herbelin, Pierre Boutillier, Matthieu Sozeau, Stéphane Glondu with contributions from Guillaume Melquiond, Julien Narboux and Pierre-Marie Pédrot \item Team coordination: Gérard Huet, Christine Paulin, Hugo Herbelin, with various other contributions \item Packaging tools: Henri Laulhere, David Delahaye, Julien Narboux, Pierre Letouzey, Enrico Tassi (Windows); Damien Doligez, Hugo Herbelin, Pierre Boutillier (MacOS); Jean-Christophe Filliâtre, Judicaël Courant, Hugo Herbelin, Stéphane Glondu (Linux) \end{itemize} \section{Documentation} \begin{itemize} \item Reference Manual: collective, layout by Patrick Loiseleur, Claude Marché (former User's Guide in 1991 by Gilles Dowek, Amy Felty, Hugo Herbelin, Gérard Huet, Christine Paulin, Benjamin Werner; initial documentation in 1989 by Thierry Coquand, Gilles Dowek, Gérard Huet, Christine Paulin), \item Basic tutorial: Gérard Huet, Gilles Kahn, Christine Paulin \item Tutorial on recursive types: Eduardo Gimenez with updates by Pierre Castéran \item FAQ: Hugo Herbelin, Julien Narboux, Florent Kirchner \end{itemize} \section{Features discontinued by lack of support} \begin{itemize} \item Searching modulo isomorphism: David Delahaye \item Explanation of proofs in pseudo-natural language: Yann Coscoy \item Dp: Jean-Christophe Filliâtre, Nicolas Ayache with contributions from Claude Marché (now integrated to \href{http://why3.lri.fr/}{Why3}) \end{itemize} For oversights or accidental errors, please report to Hugo~\verb=.=~Herbelin~\verb=@=~inria~\verb=.=~fr \end{document} coq-8.15.0/dev/doc/archive/whodidwhat/whodidwhat-8.5update.tex000066400000000000000000000376661417001151100241050ustar00rootroot00000000000000\documentclass{article} \usepackage{fullpage} \usepackage[utf8]{inputenc} \usepackage{t1enc} \begin{document} \title{Who did what in the Coq archive?} \author{The Coq development team} \maketitle \centerline{(updated for Coq 8.5)} \section{The Calculus of Inductive Constructions} \begin{itemize} \item The Calculus of Constructions \begin{itemize} \item Core type-checker: Gérard Huet and Thierry Coquand with optimizations by Chet Murthy, Bruno Barras \item Head reduction functions: Gérard Huet, Christine Paulin, Bruno Barras \end{itemize} \item Conversion and reduction \begin{itemize} \item Lazy conversion machine: Bruno Barras \item Transparency/opacity: Bruno Barras \item Bytecode-based conversion: Benjamin Grégoire \item Binary-words retroknowledge: Arnaud Spiwack \item Native code based conversion: Maxime Dénès, Benjamin Grégoire \end{itemize} \item The universe hierarchy \begin{itemize} \item Floating universes: Gérard Huet, with contributions from Bruno Barras \item Algebraic universes: Hugo Herbelin \end{itemize} \item Mutual inductive types and recursive definitions \begin{itemize} \item Type-checking: Christine Paulin \item Positivity condition: Christine Paulin \item Guardness condition for fixpoints: Christine Paulin; extensions by Eduardo Gimenez, Bruno Barras, Pierre Boutillier; fixes by Bruno Barras, Maxime Dénès \item Recursively non-uniform parameters: Christine Paulin \item Sort-polymorphism of inductive types: Hugo Herbelin \end{itemize} \item Local definitions: Hugo Herbelin \item Mutual coinductive types and corecursive definitions: Eduardo Gimenez \item Module system \begin{itemize} \item Core system: Jacek Chrz\k{a}szcz \item Inlining: Claudio Sacerdoti Coen and Élie Soubiran \item Module inclusion: Élie Soubiran \item Functorial signature application: Élie Soubiran \item Transparent name space: Élie Soubiran \item Resolution of qualified names: Hugo Herbelin \item Operator for nested functor application: Élie Soubiran and Pierre Letouzey \end{itemize} \item Minimalist stand-alone type-checker (\texttt{coqchk}): Bruno Barras, with extra support for modules by Élie Soubiran and Pierre Letouzey \item Eta-conversion: Hugo Herbelin, with contributions from Stéphane Glondu, Benjamin Grégoire \end{itemize} \section{Specification language} \begin{itemize} \item Sections: Gilles Dowek with extra contributions by Gérard Huet, Chet Murthy, Hugo Herbelin \item The \texttt{Russell} specifications language, proof obligations (\texttt{Program}): Matthieu Sozeau \item Type inference: Chet Murthy, with extra contributions by Bruno Barras, Hugo Herbelin, Matthieu Sozeau, Enrico Tassi \item Pattern-matching: Hugo Herbelin on top of a first version by Cristina Cornes, contributions by Arnaud Spiwack \item Implicit arguments: Amokrane Saïbi, with extensions by Hugo Herbelin, Matthieu Sozeau, Pierre Boutillier \item Synthetic {\tt Arguments} command: Enrico Tassi \item Coercions: Amokrane Saïbi \item Records \begin{itemize} \item Core implementation: Amokrane Saïbi with extensions by Matthieu Sozeau \item Extension to inductive and co-inductive records: Arnaud Spiwack \item Non-recursive variants: Arnaud Spiwack \end{itemize} \item Canonical structures: Amokrane Saïbi \item Type classes: Matthieu Sozeau \item Function (\texttt{Function}, \texttt{functional induction}...): Julien Forest (preliminary versions by Pierre Courtieu (\texttt{Functional Schemes}) and Yves Bertot (\texttt{Recursive Definition})) \item Generation of induction schemes: Christine Paulin, Vincent Siles, Matthieu Sozeau \end{itemize} \section{Tactics} \subsection{General tactic support} \begin{itemize} \item Proof engine: Arnaud Spiwack (first version by Thierry Coquand, second version by Chet Murthy) \item Ltac: David Delahaye, with extensions by Hugo Herbelin, Bruno Barras, ... Evolution to the new proof engine Arnaud Spiwack, Pierre-Marie P\'edrot \item Tactic notations: Hugo Herbelin (first version by Chet Murthy) \item Main tactic unification procedure: Chet Murthy with contributions from Hugo Herbelin and Matthieu Sozeau \item Mathematical-style language (C-Zar): Pierre Corbineau \item Communication with external tools (\texttt{external}): Hugo Herbelin \item Proof structuring (bullets and brackets): Arnaud Spiwack \end{itemize} \subsection{Predefined tactics} \begin{itemize} \item Basic refinement tactic (\texttt{refine}): Arnaud Spiwack (previous non-basic version by Jean-Christophe Filliâtre) \item Core tactics (\texttt{intro}, \texttt{apply}, \texttt{assumption}, \texttt{exact}): Thierry Coquand, with further collective extensions \item Reduction tactics: Christine Paulin (\texttt{simpl}), Bruno Barras (\texttt{cbv}, \texttt{lazy}), Pierre Boutillier (\texttt{cbn}) with contributions from Hugo Herbelin, Enrico Tassi, ... \item Tacticals: Thierry Coquand, Chet Murthy, Eduardo Gimenez, ...; new versions of {\tt info} and {\tt Show Script} by Pierre Letouzey; {\tt timeout} by Pierre Letouzey; backtracking-related tacticals by Arnaud Spiwack \item Generic tactic traces ({\tt Info}) by Arnaud Spiwack (based on the former {\tt info} tactical) \item Induction: Christine Paulin (\texttt{elim}, \texttt{case}), Hugo Herbelin (\texttt{induction}, \texttt{destruct} \item Introduction patterns: Eduardo Gimenez with collective extensions \item Forward reasoning: Hugo Herbelin (\texttt{assert}, \texttt{enough}, \texttt{apply in}), Pierre Letouzey (\texttt{specialize}, initial version by Amy Felty) \item Rewriting tactics (\texttt{rewrite}): basic version by Christine Paulin, extensions by Jean-Christophe Filliâtre and Pierre Letouzey \item Setoid rewriting: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen), contributions from Nicolas Tabareau \item Tactics about equivalence properties (\texttt{reflexivity}, \texttt{symmetry}, \texttt{transitivity}): Christine Paulin (?), \item Equality tactics (\texttt{injection}/\texttt{discriminate}): Cristina Cornes, extensions by Hugo Herbelin \item Inversion tactics (\texttt{inversion}): Cristina Cornes, Chet Murthy \item Decision of equality: Eduardo Gimenez \item Basic Ltac-level tactics: Pierre Letouzey, Matthieu Sozeau, Evgeny Makarov \item Tactics about existential variables: Clément Renard, Pierre Corbineau, Stéphane Glondu, Arnaud Spiwack, ... \end{itemize} \subsection{General automation tactics} \begin{itemize} \item Resolution (\texttt{auto}, \texttt{trivial}): Christine Paulin with extensions from Chet Murthy, Eduardo Gimenez, Patrick Loiseleur (hint bases), Matthieu Sozeau \item Resolution with existential variables (\texttt{eauto}): Chet Murthy, Jean-Christophe Filliâtre, with extensions from Matthieu Sozeau \item Automatic rewriting (\texttt{autorewrite}): David Delahaye \end{itemize} \subsection{Domain-specific decision tactics} \begin{itemize} \item Congruence closure (\texttt{cc}): Pierre Corbineau \item Decision of first-order logic (\texttt{firstorder}): Pierre Corbineau \item Simplification of polynomial fractions (\texttt{field}): Laurent Théry and Benjamin Grégoire (first version by David Delahaye and Micaela Mayero) \item Simplification of polynomial expressions (\texttt{ring}): Assia Mahboubi, Bruno Barras and Benjamin Grégoire (first version by Samuel Boutin, second version by Patrick Loiseleur) \item Decision of systems of polynomial equations: Loïc Pottier (\texttt{nsatz}) \item Decision of systems of linear inequations: Frédéric Besson (\texttt{psatzl}); Loïc Pottier (\texttt{fourier}) \item Decision of systems of linear inequations over integers: Frédéric Besson (\texttt{lia}); Pierre Crégut (\texttt{omega} and \texttt{romega}) \item (Partial) decision of systems of polynomical inequations (\texttt{sos}, \texttt{psatz}): Frédéric Besson, with generalization over arbitrary rings by Evgeny Makarov; uses HOL-Light interface to \texttt{csdp} by John Harrisson \item Decision/simplification of intuitionistic propositional logic: David Delahaye (\texttt{tauto}, \texttt{intuition}, first version by Cesar Mu\~noz, second version by Chet Murthy), with contributions from Judicaël Courant; Pierre Corbineau (\texttt{rtauto}) \item Decision/simplification of intuition first-order logic: Pierre Corbineau (\texttt{firstorder}) \end{itemize} \section{Extra tools} \begin{itemize} \item Program extraction: Pierre Letouzey (first implementation by Benjamin Werner, second by Jean-Christophe Filliâtre) \end{itemize} \section{Environment management} \begin{itemize} \item Separate compilation: initiated by Chet Murthy \item Import/Export: initiated by Chet Murthy \item Options management: Hugo Herbelin with contributions by Arnaud Spiwack \item Resetting and backtracking: Chet Murthy with contributions from Pierre Courtieu \item Searching: Hugo Herbelin and Yves Bertot with extensions by Matthias Puech \item Whelp support: Hugo Herbelin \end{itemize} \section{Parsing and printing} \begin{itemize} \item General parsing support: Chet Murthy, Bruno Barras, Daniel de Rauglaudre \item General printing support: Chet Murthy, Jean-Christophe Filliâtre \item Lexing: Daniel de Rauglaudre \item Support for UTF-8: Hugo Herbelin, with contributions from Alexandre Miquel and Yann Régis-Gianas \item Numerical notations: Hugo Herbelin, Patrick Loiseleur, Micaela Mayero \item String notations: Hugo Herbelin \item New ``V8'' syntax: Bruno Barras, Hugo Herbelin with contributions by Olivier Desmettre \item Abbreviations: Chet Murthy \item Notations: Chet Murthy, Hugo Herbelin \end{itemize} \section{Libraries} \begin{itemize} \item Init: collective (initiated by Christine Paulin and Gérard Huet) \item Arith: collective (initiated by Christine Paulin) \item ZArith: collective (initiated by Pierre Crégut) \item Bool: collective (initiated by Christine Paulin) \item NArith: Hugo Herbelin, Pierre Letouzey, Evgeny Makarov (out of initial contibution by Pierre Crégut) \item Lists: Pierre Letouzey, Jean-Marc Notin (initiated by Christine Paulin) \item Vectors: Pierre Boutillier \item Reals: Micaela Mayero (axiomatization and main properties), Olivier Desmettre (convergence, derivability, integrals, trigonometric functions), contributions from Russell O'Connor, Cezary Kaliszyk, Guillaume Melquiond, Yves Bertot, Guillaume Allais \item Relations: Bruno Barras, Cristina Cornes with contributions from Pierre Castéran \item Wellfounded: Bruno Barras, Cristina Cornes \item FSets: Pierre Letouzey, from initial work with Jean-Christophe Filliâtre, decision tactic for FSets by Aaron Bohannon, red-black trees by Andrew Appel and Pierre Letouzey \item MSets: Pierre Letouzey \item Logic: Christine Paulin, Hugo Herbelin, Bruno Barras, contributions by Arnaud Spiwack \item Numbers: Evgeny Makarov (abstractions), Laurent Théry and Benjamin Grégoire (big numbers), Arnaud Spiwack and Pierre Letouzey (word-based arithmetic), further extensions by Pierre Letouzey; integration of Arith and ZArith to Numbers by Pierre Letouzey \item Classes: Matthieu Sozeau \item QArith: Pierre Letouzey, with contributions from Russell O'Connor \item Setoid: Matthieu Sozeau (first version by Clément Renard, second version by Claudio Sacerdoti Coen) \item Sets: Gilles Kahn and Gérard Huet \item Sorting: Gérard Huet with revisions by Hugo Herbelin \item Strings: Laurent Théry \item Program: Matthieu Sozeau \item Unicode: Claude Marché \end{itemize} \section{Commands} \begin{itemize} \item Batch compiler (\texttt{coqc}): Chet Murthy (?) \item Compilation dependency calculator (\texttt{coqdep}): Jean-Christophe Filliâtre \item Statistic tool (\texttt{coqwc}): Jean-Christophe Filliâtre \item Simple html presentation tool (\texttt{gallina}) (deprecated): Jean-Christophe Filliâtre \item Auto-maker (\texttt{coq\_makefile}): Jean-Christophe Filliâtre, with contributions from Judicaël Courant, updated by Pierre Boutillier \item LaTeX presentation tool (\texttt{coq-tex}): Jean-Christophe Filliâtre \item Multi-purpose presentation tool (\texttt{coqdoc}): Jean-Christophe Filliâtre with extensions from Matthieu Sozeau, Jean-Marc Notin, Hugo Herbelin and contributions from Adam Chlipala \item Interactive toplevel (\texttt{coqtop}): Jean-Christophe Filliâtre (?) \item Custom toplevel builder (\texttt{coqmktop}): Jean-Christophe Filliâtre (?) \end{itemize} \section{Graphical interfaces} \begin{itemize} \item Support for {\em Proof General}: Pierre Courtieu with contributions from Arnaud Spiwack \item {\em CoqIDE}: Benjamin Monate with contributions from Jean-Christophe Filliâtre, Claude Marché, Pierre Letouzey, Julien Narboux, Hugo Herbelin, Pierre Corbineau, Pierre Boutillier, Pierre-Marie Pédrot; processus-based communication protocol by Vincent Gross with contributions from Pierre Letouzey, Pierre Boutillier, Pierre-Marie Pédrot; backtracking revised by Pierre Letouzey; uses the Cameleon library by Maxence Guesdon; \end{itemize} \section{Architecture} \begin{itemize} \item Functional-kernel-based architecture: Jean-Christophe Filliâtre \item Extensible objects and summaries: Chet Murthy \item Hash-consing: Bruno Barras \item Error locations: Jean-Christophe Filliâtre, Bruno Barras, Hugo Herbelin, with contributions from Arnaud Spiwack \item Existential variables engine: Chet Murthy with revisions by Bruno Barras and Arnaud Spiwack and extensions by Clément Renard and Hugo Herbelin \end{itemize} \section{Development tools} \begin{itemize} \item Makefile's: Chet Murthy, Jean-Christophe Filliâtre, Judicaël Courant, Lionel Mamane, Pierre Corbineau, Pierre Letouzey with contributions from Stéphane Glondu, Hugo Herbelin, ... \item Debugging: Jean-Christophe Filliâtre with contributions from Jacek Chrz\k{a}szcz, Hugo Herbelin, Bruno Barras, ... \item ML quotations: David Delahaye and Daniel de Rauglaudre \item ML tactic and vernacular extensions: Hugo Herbelin (first version by Chet Murthy) \item Test suite: collective content, initiated by Jean-Christophe Filliâtre with further extensions by Hugo Herbelin, Jean-Marc Notin \end{itemize} \section{Maintenance and system engineering} \begin{itemize} \item General bug support: Gérard Huet, Christine Paulin, Chet Murthy, Jean-Christophe Filliâtre, Hugo Herbelin, Bruno Barras, Pierre Letouzey with contributions at some time from Benjamin Werner, Jean-Marc Notin, Pierre Boutillier, ... \item Team coordination: Gérard Huet, Christine Paulin, Hugo Herbelin, with various other contributions \item Packaging tools: Henri Laulhere, David Delahaye, Julien Narboux, Pierre Letouzey, Enrico Tassi (Windows); Damien Doligez, Hugo Herbelin, Pierre Boutillier (MacOS); Jean-Christophe Filliâtre, Judicaël Courant, Hugo Herbelin, Stéphane Glondu (Linux) \end{itemize} \section{Documentation} \begin{itemize} \item Reference Manual: collective, layout by Patrick Loiseleur, Claude Marché (former User's Guide in 1991 by Gilles Dowek, Amy Felty, Hugo Herbelin, Gérard Huet, Christine Paulin, Benjamin Werner; initial documentation in 1989 by Thierry Coquand, Gilles Dowek, Gérard Huet, Christine Paulin), \item Basic tutorial: Gérard Huet, Gilles Kahn, Christine Paulin \item Tutorial on recursive types: Eduardo Gimenez with updates by Pierre Castéran \item FAQ: Hugo Herbelin, Julien Narboux, Florent Kirchner \end{itemize} \section{Features discontinued by lack of support} \begin{itemize} \item Searching modulo isomorphism: David Delahaye \item Explanation of proofs in pseudo-natural language: Yann Coscoy \item Export of context to external communication tools (\texttt{dp}): Nicolas Ayache and Jean-Christophe Filliâtre, with contributions by Claude Marché \item Support for {\em PCoq}: Yves Bertot with contributions by Laurence Rideau and Loïc Pottier; additional support for {\em TmEgg} by Lionel Mamane \item Export of terms and environments to XML format: Claudio Sacerdoti Coen, with extensions from Cezary Kaliszyk \end{itemize} For probable oversights or accidental errors, please report to Hugo~\verb=.=~Herbelin~\verb=@=~inria~\verb=.=~fr \end{document} coq-8.15.0/dev/doc/build-system.dev.txt000066400000000000000000000145001417001151100176400ustar00rootroot00000000000000HISTORY: ------- * July 2007 (Pierre Corbineau & Lionel Elie Mamane). Inclusion of a build system with 3 explicit phases: - Makefile.stage1: ocamldep, sed, camlp4 without Coq grammar extension - Makefile.stage2: camlp4 with grammar.cma or q_constr.cmo - Makefile.stage3: coqdep (.vo) * March 2010 (Pierre Letouzey). Revised build system. In particular, no more stage1,2,3 : - Stage3 was removed some time ago when coqdep was split into coqdep_boot and full coqdep. - Stage1,2 were replaced by brutal inclusion of all .d at the start of Makefile.build, without trying to guess what could be done at what time. Some initial inclusions hence _fail_, but "make" tries again later and succeed. - Btw, .ml4 are explicitly turned into .ml files, which stay after build. By default, they are in binary ast format, see READABLE_ML4 option. * February 2014 (Pierre Letouzey). Another revision of the build system. We avoid relying on the awkward include-which-fails-but-works-finally-after-a-retry feature of gnu make. This was working, but was quite hard to understand. Instead, we reuse the idea of two explicit phases, but in a lighter way than in 2007. The main Makefile calls Makefile.build twice : - first for building grammar.cma (and q_constr.cmo), with a restricted set of .ml4 (see variable BUILDGRAMMAR). - then on the true target asked by the user. * June 2016 (Pierre Letouzey) The files in grammar/ are now self-contained, we could compile grammar.cma (and q_constr.cmo) directly, no need for a separate subcall to make nor awkward include-failed-and-retry. * February - September 2018 (Emilio Jesús Gallego Arias) Dune support added. The build setup is mostly vanilla for the OCaml part, however the `.v` to `.vo` compilation relies on `coq_dune` a `coqdep` wrapper that will generate the necessary `dune` files. As a developer, you should not have to deal with Dune configuration files on a regular basis unless adding a new library or plugin. The vanilla setup declares all the Coq libraries and binaries [we must respect proper containment/module implementation rules as to allow packing], and we build custom preprocessors (based on `camlp5` and `coqpp`) that will process the `ml4`/`mlg` files. This suffices to build `coqtop` and `coqide`, all that remains to handle is `.vo` compilation. To teach Dune about the `.vo`, we use a small utility `coq_dune`, that will generate a `dune` file for each directory in `plugins` and `theories`. The code is pretty straightforward and declares build and install rules for each `.v` straight out of `coqdep`. Thus, our build strategy looks like this: 1. Use `dune` to build `coqdep` and `coq_dune`. 2. Use `coq_dune` to generate `dune` files for each directory with `.v` files. 3. ? 4. Profit! [Seriously, at this point Dune has all the information to build Coq] --------------------------------------------------------------------------- This file documents internals of the implementation of the make-based build system. For what a Coq developer needs to know about the build system, see build-system.txt and build-system.dune.md . .ml4 files ---------- .ml4 are converted to .ml by camlp5. By default, they are produced in the binary ast format understood by ocamlc/ocamlopt/ocamldep. Pros: - faster than parsing clear-text source file. - no risk of editing them by mistake instead of the .ml4 - the location in the binary .ml are those of the initial .ml4, hence errors are properly reported in the .ml4. Cons: - This format may depend on your ocaml version, they should be cleaned if you change your build environment. - Unreadable in case you want to inspect this generated code. For that, use make with the READABLE_ML4=1 option to switch to clear-text generated .ml. Makefiles hierarchy ------------------- The Makefile is separated in several files : - Makefile: wrapper that triggers a call to Makefile.build, except for clean and a few other little things doable without dependency analysis. - Makefile.common : variable definitions (mostly lists of files or directories) - Makefile.build : contains compilation rules, and the "include" of dependencies - Makefile.doc : specific rules for compiling the documentation. FIND_SKIP_DIRS -------------- The recommended style of using FIND_SKIP_DIRS is for example find . $(FIND_SKIP_DIRS) '(' -name '*.example' ')' -print find . $(FIND_SKIP_DIRS) '(' -name '*.example' -or -name '*.foo' ')' -print 1) The parentheses even in the one-criteria case is so that if one adds other conditions, e.g. change the first example to the second find . $(FIND_SKIP_DIRS) '(' -name '*.example' -and -not -name '*.bak.example' ')' -print one is not tempted to write find . $(FIND_SKIP_DIRS) -name '*.example' -and -not -name '*.bak.example' -print because this will not necessarily work as expected; $(FIND_SKIP_DIRS) ends with an -or, and how it combines with what comes later depends on operator precedence and all that. Much safer to override it with parentheses. In short, it protects against the -or one doesn't see. 2) As to the -print at the end, yes it is necessary. Here's why. You are used to write: find . -name '*.example' and it works fine. But the following will not: find . $(FIND_SKIP_DIRS) -name '*.example' it will also list things directly matched by FIND_SKIP_DIRS (directories we want to prune, in which we don't want to find anything). C'est subtil... Il y a effectivement un -print implicite à la fin, qui fait que la commande habituelle sans print fonctionne bien, mais dès que l'on introduit d'autres commandes dans le lot (le -prune de FIND_SKIP_DIRS), ça se corse à cause d'histoires de parenthèses du -print implicite par rapport au parenthésage dans la forme recommandée d'utilisation: Si on explicite le -print et les parenthèses implicites, cela devient: find . '(' '(' '(' -name .git -or -name debian ')' -prune ')' -or \ '(' -name '*.example' ')' ')' -print Le print agit TOUT ce qui précède, soit sur ce qui matche "'(' -name .git -or -name debian ')'" ET sur ce qui matche "'(' -name '*.example' ')'". alors qu'ajouter le print explicite change cela en find . '(' '(' -name .git -or -name debian ')' -prune ')' -or \ '(' '(' -name '*.example' ')' -print ')' Le print n'agit plus que sur ce qui matche "'(' -name '*.example' ')'" coq-8.15.0/dev/doc/build-system.dune.md000066400000000000000000000267501417001151100176100ustar00rootroot00000000000000This file documents what a Coq developer needs to know about the Dune-based build system. If you want to enhance the build system itself (or are curious about its implementation details), see build-system.dev.txt, and in particular its initial HISTORY section. About Dune ========== Coq can now be built using [Dune](https://github.com/ocaml/dune). ## Quick Start Usually, using the latest version of Dune is recommended, see the first line of the `dune-project` file for the minimum required version. If you set `COQ_USE_DUNE=1`, then you don't need to explicitly add `-f Makefile.dune` in any of the commands below. However, you will then need an explicit `-f Makefile.make` if you want to use one of the legacy targets. It is strongly recommended that you use the helper targets available in `Makefile.dune`, `make -f Makefile.dune` will display help. Note that dune will call configure for you if needed, so no need to call `./configure` in the regular development workflow. 4 common operations targets are: - `make -f Makefile.dune check` : build all ml targets as fast as possible - `make -f Makefile.dune world` : build a complete Coq distribution - `dune exec -- dev/shim/coqtop-prelude` : build and launch coqtop + prelude [equivalent to `make states`]. - `dune build $target`: where `$target` can refer to the build directory or the source directory [but will be placed under `_build`] `dune build @install` will build all the public Coq artifacts; `dune build` will build all the targets in the workspace, including tests and documentation (so this is usually not what you want). Dune puts build artifacts in a separate directory `_build/$context`; usual `context` is `default`; dune also produces an "install" layout under `_build/install/$context/`. Depending on whether you want refer to the source layout or to the install layout, you may refer to targets in one or the other directory. It will also generate an `.install` file so files can be properly installed by package managers. Dune doesn't allow leftovers of object files it may generate in-tree [as to avoid conflicts], so please be sure your tree is clean from objects files generated by the make-based system or from manual compilation. Contrary to other systems, Dune doesn't use a global `Makefile` but local build files named `dune` which are later composed to form a global build, for example `plugins/ltac/dune` or `kernel/dune`. As a developer, Dune should take care of all OCaml-related build tasks including library management, `merlin` setup, linking order, etc... You should not have to modify `dune` files in regular workflow unless you are adding a new binary, library, or plugin, or want to tweak some low-level option. ## Per-User Custom Settings Dune will read the file `~/.config/dune/config`; see `man dune-config`. Among others, you can set in this file the custom number of build threads `(jobs N)` and display options `(display _mode_)`. ## Running binaries [coqtop / coqide] Running `coqtop` directly with `dune exec -- coqtop` won't in general work well unless you are using `dune exec -- coqtop -noinit`. The `coqtop` binary doesn't depend itself on Coq's prelude, so plugins / vo files may go stale if you rebuild only `coqtop`. Instead, you should use the provided "shims" for running `coqtop` and `coqide` in a fast build. In order to use them, do: ``` $ dune exec -- dev/shim/coqtop-prelude ``` or `quickide` / `dev/shim/coqide-prelude` for CoqIDE, etc.... See `dev/shim/dune` for a complete list of targets. These targets enjoy quick incremental compilation thanks to `-opaque` so they tend to be very fast while developing. Note that for a fast developer build of ML files, the `check` target is faster, as it doesn't link the binaries and uses the non-optimizing compiler. If you built the full standard library with the `world` target, then you can run the commands in the `_build/install/default/bin` directories (including `coq_makefile`). ## Targets The default dune target is `dune build` (or `dune build @install`), which will scan all sources in the Coq tree and then build the whole project, creating an "install" overlay in `_build/install/default`. You can build some other target by doing `dune build $TARGET`, where `$TARGET` can be a `.cmxa`, a binary, a file that Dune considers a target, an alias, etc... In order to build a single package, you can do `dune build $PACKAGE.install`. A very useful target is `dune build @check`, that will compile all the ml files in quick mode. Dune also provides targets for documentation, testing, and release builds, please see below. ## Testing and documentation targets There are two ways to run the test suite using Dune: - After building Coq with `make -f Makefile.dune world`, you can run the test-suite in place, generating output files in the source tree by running `make -C test-suite` from the top directory of the source tree (equivalent to running `make test-suite` from the `test-suite` directory). This permits incremental usage since output files will be preserved. - You can also run the test suite in a hygienic way using `make -f Makefile.dune test-suite` or `dune runtest`. This is convenient for full runs from scratch, for instance in CI. Since `dune` still invokes the test-suite makefile, the environment variable `NJOBS` is used to set the `-j` option that is passed to make (for example, with the command `NJOBS=8 dune runtest`). This use of `NJOBS` will be removed when the test suite is fully ported to Dune. There is preliminary support to build the API documentation and reference manual in HTML format, use `dune build {@doc,@refman-html}` to generate them. So far these targets will build the documentation artifacts, however no install rules are generated yet. ## Developer shell You can create a developer shell with `dune utop $library`, where `$library` can be any directory in the current workspace. For example, `dune utop engine` or `dune utop plugins/ltac` will launch `utop` with the right libraries already loaded. Note that you must invoke the `#rectypes;;` toplevel flag in order to use Coq libraries. The provided `.ocamlinit` file does this automatically. ## ocamldebug You can use [ocamldebug](https://ocaml.org/learn/tutorials/debug.html#The-OCaml-debugger) with Dune; after a build, do: ``` dune exec -- dev/dune-dbg coqc foo.v (ocd) source dune_db ``` to start `coqc.byte foo.v`, other targets are `{checker,coqide,coqtop}`: ``` dune exec -- dev/dune-dbg checker foo.vo (ocd) source dune_db ``` Unfortunately, dependency handling is not fully refined / automated, you may find the occasional hiccup due to libraries being renamed, etc... Please report any issue. For running in emacs, use `coqdev-ocamldebug` from `coqdev.el`. **Note**: If you are using OCaml >= 4.08 you need to use ``` (ocd) source dune_db_408 ``` or ``` (ocd) source dune_db_409 ``` depending on your OCaml version. This is due to several factors: - OCaml >= 4.08 doesn't allow doubly-linking modules, however `source` is not re entrant and seems to doubly-load in the default setup, see https://github.com/coq/coq/issues/8952 - OCaml >= 4.09 comes with `dynlink` already linked in so we need to modify the list of modules loaded. ### Debugging hints - To debug a failure/error/anomaly, add a breakpoint in `Vernacinterp.interp_gen` (in `vernac/vernacinterp.ml`) at the with clause of the "try ... with ..." block, then go "back" a few steps to find where the failure/error/anomaly has been raised - Alternatively, for an error or an anomaly, add breakpoints where it was raised (eg in `user_err` or `anomaly` in `lib/cErrors.ml`, or the functions in `pretyping/pretype_errors.ml`, or other raises depending on the error) - If there is a linking error (eg from "source dune_db"), do a "dune build coq-core.install" and try again. - If you build Coq with an OCaml version earlier than 4.06, and have the OCAMLRUNPARAM environment variable set, Coq may hang on startup when run from the debugger. If this happens, unset the variable, re-start Emacs, and run the debugger again. ## Dropping from coqtop: The following commands should work: ``` dune exec -- dev/shim/coqbyte-prelude > Drop. # #directory "dev";; # #use "include";; ``` ## Compositionality, developer and release modes. By default [in "developer mode"], Dune will compose all the packages present in the tree and perform a global build. That means that for example you could drop the `ltac2` folder under `plugins` and get a build using `ltac2`, that will use the current Coq version. This is very useful to develop plugins and Coq libraries as your plugin will correctly track dependencies and rebuild incrementally as needed. However, it is not always desirable to go this way. For example, the current Coq source tree contains two packages [Coq and CoqIDE], and in the OPAM CoqIDE package we don't want to build CoqIDE against the local copy of Coq. For this purpose, Dune supports the `-p` option, so `dune build -p coqide` will build CoqIDE against the system-installed version of Coq libs, and use a "release" profile that for example enables stronger compiler optimizations. ## OPAM file generation `.opam` files are automatically generated by Dune from the package descriptions in the `dune-project` file; see Dune's manual for more details. ## Stanzas `dune` files contain the so-called "stanzas", that may declare: - libraries, - executables, - documentation, arbitrary blobs. The concrete options for each stanza can be seen in the Dune manual, but usually the default setup will work well with the current Coq sources. Note that declaring a library or an executable won't make it installed by default, for that, you need to provide a "public name". ## Workspaces and Profiles Dune provides support for tree workspaces so the developer can set global options --- such as flags --- on all packages, or build Coq with different OPAM switches simultaneously [for example to test compatibility]; for more information, please refer to the Dune manual. ## Inlining reports The `ireport` profile will produce standard OCaml [inlining reports](https://caml.inria.fr/pub/docs/manual-ocaml/flambda.html#sec488). These are to be found under `_build/default/$lib/$lib.objs/$module.$round.inlining.org` and are in Emacs `org-mode` format. Note that due to https://github.com/ocaml/dune/issues/1401 , we must perform a full rebuild each time as otherwise Dune will remove the files. We hope to solve this in the future. ## Planned and Advanced features Dune supports or will support extra functionality that may result very useful to Coq, some examples are: - Cross-compilation. - Automatic Generation of OPAM files. - Multi-directory libraries. ## FAQ - I get "Error: Dynlink error: Interface mismatch": You are likely running a partial build which doesn't include implicitly loaded plugins / vo files. See the "Running binaries [coqtop / coqide]" section above as to how to correctly call Coq's binaries. ## Dune cheat sheet - `dune build` build all targets in the current workspace - `dune build @check` build all ML targets as fast as possible, setup merlin - `dune utop $dir` open a shell for libraries in `$dir` - `dune exec -- $file` build and execute binary `$file`, can be in path or be an specific name - `dune build _build/$context/$foo` build target `$foo$` in `$context`, with build dir layout - `dune build _build/install/$context/foo` build target `$foo$` in `$context`, with install dir layout ### packaging: - `dune subst` generate metadata for a package to be installed / distributed, necessary for opam - `dune build -p $pkg` build a package in release mode coq-8.15.0/dev/doc/build-system.txt000066400000000000000000000124621417001151100170700ustar00rootroot00000000000000 This file documents what a Coq developer needs to know about the build system. If you want to enhance the build system itself (or are curious about its implementation details), see build-system.dev.txt, and in particular its initial HISTORY section. FAQ: special features used in this Makefile ------------------------------------------- * Order-only dependencies: | Dependencies placed after a bar (|) should be built before the current rule, but having one of them is out-of-date do not trigger a rebuild of the current rule. See http://www.gnu.org/software/make/manual/make.htmlPrerequisite-Types * Annotation before commands: +/-/@ a command starting by - is always successful (errors are ignored) a command starting by + is run even if option -n is given to make a command starting by @ is not echoed before being run * Custom functions Definition via "define foo" followed by commands (arg is $(1) etc) Call via "$(call foo,arg1)" * Useful builtin functions $(subst ...), $(patsubst ...), $(shell ...), $(foreach ...), $(if ...) * Behavior of -include If the file given to -include doesn't exist, make tries to build it, and even retries again if necessary, but doesn't care if this build finally fails. We used to rely on this "feature", but this should not be the case anymore. We kept "-include" instead of "include" for avoiding warnings about initially non-existent files. Changes (for old-timers) ------------------------ The contents of the old Makefile has been mostly split into: - variable declarations for file lists in Makefile.common. These declarations are now static (for faster Makefile execution), so their definitions are order-dependent. - actual building rules and compiler flags variables in Makefile.build The handling of globals is now: the globals of FOO.v are in FOO.glob and the global glob.dump is created by concatenation of all .glob files. In particular, .glob files are now always created. See also section "cleaning targets" Reducing build system overhead ------------------------------ When you are actively working on a file in a "make a change, make to test, make a change, make to test", etc mode, here are a few tips to save precious time: - Always ask for what you want directly (e.g. bin/coqtop, foo/bar.cmo, ...), don't do "make world" and interrupt it when it has done what you want. For example, if you only want to test whether bin/coqtop still builds (and eventually start it to test your bugfix or new feature), use "make bin/coqtop" or "make coqbinaries" or something like that. - To disable all dependency recalculation, use the NO_RECALC_DEPS=1 option. It disables REcalculation of dependencies, not calculation of dependencies. In other words, if a .d file does not exist, it is still created, but it is not updated every time the source file (e.g. .ml) is changed. Dependencies ------------ There are no dependencies in the archive anymore, they are always bootstrapped. The dependencies of a file FOO are in FOO.d . This enables partial recalculation of dependencies (only the dependencies of changed files are recomputed). If you add a dependency to a Coq camlp5 extension (grammar.cma or q_constr.cmo), then see sections ".mlg files" and "new files". Cleaning Targets ---------------- Targets for cleaning various parts: - distclean: clean everything; must leave only what should end up in distribution tarball and/or is in a svn checkout. - clean: clean everything except effect of "./configure" and documentation. - cleanconfig: clean effect of "./configure" only - archclean: remove all architecture-dependent generated files - indepclean: remove all architecture-independent generated files (not documentation) - objclean: clean all generated files, but not Makefile meta-data (e.g. dependencies), nor debugging/development information nor other cruft (e.g. editor backup files), nor documentation - docclean: clean documentation .mlg/.mlp files --------------- There is now two kinds of preprocessed files : - a .mlp do not need grammar.cma (they are in grammar/) - a .mlg is now always preprocessed with grammar.cma (and q_constr.cmo), except coqide_main.mlg and its specific rule This classification replaces the old mechanism of declaring the use of a grammar extension via a line of the form: (*i camlp4deps: "grammar.cma q_constr.cmo" i*) The use of (*i camlp4use: ... i*) to mention uses of standard extension such as IFDEF has also been discontinued, the Makefile now always calls camlp5 with pa_macros.cmo and a few others by default. For debugging a Coq grammar extension, it could be interesting to use the READABLE_ML4=1 option, otherwise the generated .ml are in an internal binary format (see build-system.dev.txt). New files --------- For a new file, in most cases, you just have to add it to the proper file list(s): - For .ml, in the corresponding .mllib (e.g. kernel/kernel.mllib) Be careful with order, duplicated entries, whitespace errors, and do not mention .mli there. If module B depends on module A, then B should be after A in the .mllib file. - For .v, in the corresponding vo.itarget (e.g theories/Init/vo.itarget) - The definitions in Makefile.common might have to be adapted too. - If your file needs a specific rule, add it to Makefile.build The list of all ml4 files is not handled manually anymore. coq-8.15.0/dev/doc/case-repr.md000066400000000000000000000107001417001151100161020ustar00rootroot00000000000000## Case representation Starting from Coq 8.14, the term representation of pattern-matching uses a so-called *compact form*. Compared to the previous representation, the major difference is that all type and term annotations on lambda and let abstractions that were present in branches and return clause of pattern-matchings were removed. In order to keep the ability to construct the old expanded form out of the new compact form, the case node also makes explicit data that was stealthily present in the expanded return clause, namely universe instances and parameters of the inductive type being eliminated. ### ML Representation The case node now looks like ``` Case of case_info * Instance.t * (* universe instances of the inductive *) constr array * (* parameters of the inductive *) case_return * (* erased return clause *) case_invert * (* SProp inversion data *) constr * (* scrutinee *) case_branch array (* erased branches *) ``` where ``` type case_branch = Name.t binder_annot array * constr type case_return = Name.t binder_annot array * types ``` For comparison, pre-8.14 case nodes were defined as follows. ``` Case of case_info * constr * (* annotated return clause *) case_invert * (* SProp inversion data *) constr * (* scrutinee *) constr array (* annotated branches *) ``` ### Typing Rules and Invariants Disregarding the `case_info` cache and the SProp inversion, the typing rules for the case node can be given as follows. Provided - Γ ⊢ c : Ind@{u} pms Indices - Inductive Ind@{i} Δ : forall Θ, Type := cᵢ : forall Ξᵢ, Ind Δ Aᵢ - Γ, Θ@{i := u}{Δ := pms} ⊢ p : Type - Γ, Ξᵢ@{i := u}{Δ := pms} ⊢ snd brᵢ : p{Θ := Aᵢ{Δ := pms}} Then Γ ⊢ Case (_, u, pms, ( _, p), _, c, br) : p{Θ := Indices} In particular, this implies that Γ ⊢ pms : Δ@{i := u}. Parameters are stored in the same order as in the application node. The u universe instance must be a valid instance for the corresponding inductive type, in particular their length must coincide. The `Name.t binder_annot array` appearing both in the return clause and in the branches must satisfy these invariants: - For branches, it must have the same length as the corresponding Ξᵢ context (including let-ins) - For the return clause, it must have the same length as the context Θ, self : Ind@{u} pms Θ (including let-ins). The last variable appears as the term being destructed and corresponds to the variable introduced by the "as" clause of the user-facing syntax. - The relevance annotations must match with the corresponding sort of the variable from the context. Note that the annotated variable array is reversed w.r.t. the context, i.e. variables appear left to right as in standard practice. Let-bindings can appear in Δ, Θ or Ξᵢ, since they are arbitrary contexts. As a general rule, let bindings appear as binders but not as instances. That is, they MUST appear in the variable array, but they MUST NOT appear in the parameter array. Example: ``` Inductive foo (X := tt) : forall (Y := X), Type := Foo : forall (Z := X), foo. Definition case (x : foo) : unit := match x as x₀ in foo with Foo _ z => z end ``` The case node of the `case` function is represented as ``` Case ( _, Instance.empty, [||], ([|(Y, Relevant); (x₀, Relevant)|], unit), (* let (Y := tt) in fun (x₀ : foo) => unit *) NoInvert, #1, [| ([|(z, Relevant)|], #1) (* let z := tt in z *) |] ) ``` This choice of representation for let-bindings requires access to the environment in some cases, e.g. to compute branch reduction. There is a fast-path for non-let-containing inductive types though, which are the vast majority. ### Porting plugins The conversion functions from and to the expanded form are: - `[Inductive, EConstr].expand_case` which goes from the compact to the expanded form and cannot fail (assuming the term was well-typed) - `[Inductive, EConstr].contract_case` which goes the other way and will raise anomalies if the expanded forms are not fully eta-expanded. As such, it is always painless to convert to the old representation. Converting the other way, you must ensure that all the terms you provide the compatibility function with are fully eta-expanded, **including let-bindings**. This works as expected for the common case with eta-expanded branches but will fail for plugins that generate non-eta-expanded branches. Some other useful variants of these functions are: - `Inductive.expand_case_specif` - `EConstr.annotate_case` - `EConstr.expand_branch` coq-8.15.0/dev/doc/changes.md000066400000000000000000002121101417001151100156300ustar00rootroot00000000000000## Changes between Coq 8.14 and Coq 8.15 ### XML protocol See xmlprotocol.md for details. - Added 4 new "db_*" messages to support the Ltac debugger - Modified the "add" request (not backward compatible), adding 3 additional parameters to the request giving the buffer offset of the added statement. The parameters are Loc.bp, Loc.line_nb and Loc.bol_pos, which are needed so the debugger gets back a buffer-relative Loc.t rather than a sentence-relative Loc.t. For other use cases, these can be set to 0. ### Internal representation of the type of constructors The type of constructors in fields `mind_user_lc` and `mind_nf_lc` of an inductive packet (see `declarations.ml`) now directly refer to the inductive type rather than to a `Rel` poimting in a context made of the declaration of the inductive types of the block. Thus, instead of `Rel n`, one finds `Ind((mind,ntypes-n),u)` where `ntypes` is the number of types in the block and `u` is the canonical instance of polymoprhic universes (i.e. `Level.Var 0` ... `Level.Var (nbuniv-1)`). In general, code can be adapted by: - either removing a substitution `Rel`->`Ind` if such substitution was applied - or inserting a call to `Inductive.abstract_constructor_type_relatively_to_inductive_types_context` to restore `Rel`s in place of `Ind`s if `Rel`s were expected. ### Universes - Type `Univ.UContext` now embeds universe user names, generally resulting in more concise code. - Renaming `Univ.Constraint` into `Univ.Constraints` - Renaming `LSet` into `Level.Set` and `LMap` into `Level.Map` ### Concrete syntax - Explicit nodes `CProj` and `GProj` have been added for the syntax of projections `t.(f)` in `constr_expr` and `glob_constr`, while they were previously encoded in the `CApp` and `GApp` nodes. There may be a need for adding a new case in pattern-matching. The types of `CApp` and `CAppExpl` have been simplified accordingly. ### Functions manipulating contexts A few functions in Vars, Context, Termops, EConstr have moved. The deprecation warning tells what to do. ### Build system and infrastructure - The Windows installer CI build has been moved from the custom workers based on Inria cloud to a standard Github Action, see https://github.com/coq/coq/pull/12425 . Fixes https://github.com/coq/coq/issues/6807 https://github.com/coq/coq/issues/7428 https://github.com/coq/coq/issues/8046 https://github.com/coq/coq/issues/8622 https://github.com/coq/coq/issues/9401 https://github.com/coq/coq/issues/11073 . - Location of Coq's runtime environment and files is now handled by a new library, `coq-core.boot`, which provides a more uniform and centralized API to locate files. ## Changes between Coq 8.13 and Coq 8.14 ### Build system and library infrastructure - ocamlfind library names `coq.*` have been renamed to `coq-core.*`. - Dune is now used to build the OCaml parts of Coq, thus: + ML object files live now in `_build`, as standard in Dune world + you can build object files using `make _build/install/default/bin/coqc`, thanks to our implementation of a make-Dune bridge + .vo files live now in `_build_vo/` + `_build_vo` follows a standard "Coq install layout", that is to say: * `_build_vo/default/bin`: coq-core binaries * `_build_vo/default/lib/coq-core`: coq-core libraries * `_build_vo/default/lib/coq`: coq libraries, such as stdlib This greatly simplifies layout as tooling can assume that `_build_vo/default` has the structure of an installed Coq, thus making the `-local` flag obsolete. + Some developer targets have changed or have been removed in favor of Dune's counterparts, for example `byte` and `install-byte` are no longer needed. For the large majority of developers, we recommend using the full dune build, which is accessible by `make -f Makefile.dune` or by setting the `COQ_USE_DUNE` environment variable. - As a consequence of the above, the packing of plugins has changed. Plugins are now packed using modules aliases which is in general safer w.r.t. scoping, as the container module is just a regular OCaml module. ### Gramlib - A few functions change their interfaces to take benefit of a new abstraction level `LStream` for streams with location function. - Grammar extensions now require specifying whether they create a level or they reuse an existing one. In addition to the Gramlib API changes, GRAMMAR EXTEND stanzas may need a few tweaks. Their grammar was changed so that level and associativity arguments that would have been ignored are now forbidden. Furthermore, extensions without an explicit position now expect the entry to be empty. If it is not the case, the extension will fail at runtime with an assertion failure located near the offending entry. To recover the old behaviour, one needs to explicitly add the new TOP position to the extension. This position expects the entry to be non-empty and populates the topmost defined level with the provided rules. Note that this differs from FIRST, which creates a new level and prepends it to the list of levels of the entry. ## Changes between Coq 8.12 and Coq 8.13 ### Code formatting - The automatic code formatting tool `ocamlformat` has been disabled and its git hook removed. If desired, automatic formatting can be achieved by calling the `fmt` target of the dune build system. ### ML API Abstract syntax of tactic: - TacGeneric now takes an argument to tell if it comes from a notation. Use `None` if not and `Some foo` to tell to print such TacGeneric surrounded with `foo:( )`. Printing functions: - `Pp.h` does not take a `int` argument anymore (the argument was not used). In general, where `h n` for `n` non zero was used, `hv n` was instead intended. If cancelling the breaking role of cuts in the box was intended, turn `h n c` into `h c`. Grammar entries: - `Prim.pattern_identref` is deprecated, use `Prim.pattern_ident` which now returns a located identifier. Generic arguments: - Generic arguments: `wit_var` is deprecated, use `wit_hyp`. Dumpglob: - The function `Dumpglob.pause` and `Dumpglob.continue` are replaced by `Dumpglob.push_output` and `Dumpglob.pop_output`. This allows plugins to temporarily change/pause the output of Dumpglob, and then restore it to the original setting. Glob_term: - Removing useless `binding_kind` argument of `GLocalDef` in `extended_glob_local_binder`. ## Changes between Coq 8.11 and Coq 8.12 ### Code formatting - The automatic code formatting tool `ocamlformat` is enabled now for the micromega codebase. Version 0.13.0 is required. See `ocalmformat`'s documentation for more details on integration with your editor. ### ML API Proof state and constant declaration: - A large consolidation of the API handling interactive and non-interactive constant has been performed; low-level APIs are no longer available, and the functionality of the `Proof_global` module has been merged into `Declare`. Notations: - Most operators on numerals have moved to file numTok.ml. - Types `precedence`, `parenRelation`, `tolerability` in `notgram_ops.ml` have been reworked. See `entry_level` and `entry_relative_level` in `constrexpr.ml`. Exception handling: - Coq's custom `Backtrace` module has been removed in favor of OCaml's native backtrace implementation. Please use the functions in `Exninfo.capture` and `iraise` when re-raising inside an exception handler. - Registration of exception printers now follows more closely OCaml's API, thus: + printers are of type `exn -> Pp.t option` [`None` == not handled] + it is forbidden for exception printers to raise. - Refiner.catchable_exception is deprecated, use instead CErrors.noncritical in try-with block. Note that nothing is needed in tclORELSE block since the exceptions there are supposed to be non-critical by construction. Printers: - Functions such as Printer.pr_lconstr_goal_style_env have been removed, use instead functions such as pr_lconstr with label `goal_concl_style:true`. Functions such as Constrextern.extern_constr which were taking a boolean argument for the goal style now take instead a label. Implicit arguments: - The type `Impargs.implicit_kind` was removed in favor of `Glob_term.binding_kind`. ## Changes between Coq 8.10 and Coq 8.11 ### ML API - Function UnivGen.global_of_constr has been removed. - Functions and types deprecated in 8.10 have been removed in Coq 8.11. - Type Decl_kinds.locality has been restructured, see commit message. Main change to do generally is to change the flag "Global" to "Global ImportDefaultBehavior". Proof state: Proofs that are attached to a top-level constant (such as lemmas) are represented by `Lemmas.t`, as they do contain additional information related to the constant declaration. Some functions have been renamed from `start_proof` to `start_lemma` Plugins that require access to the information about currently opened lemmas can add one of the `![proof]` attributes to their `mlg` entry, which will refine the type accordingly. See documentation in `vernacentries` for more information. Proof `terminators` have been removed in favor of a principled proof-saving path. This should not affect the regular API user, but if plugin writes need special handling of the proof term they should now work with Coq upstream to unsure the provided API does work and is principled. Closing `hooks` are still available for simple registration on constant save path, and essentially they do provide the same power as terminators, but don't encourage their use other than for simple tasks [such as adding a constant to a database] Additionally, the API for proof/lemma handling has been refactored, triples have been split into named arguments, and a few bits of duplicated information among layers has been cleaned up. Most proof information is now represented in a direct-style, as opposed to it living inside closures in previous Coq versions; thus, proof manipulation possibilities have been improved. ## Changes between Coq 8.9 and Coq 8.10 ### ML4 Pre Processing - Support for `.ml4` files, processed by camlp5 has been removed in favor of `.mlg` files processed by `coqpp`. Porting is usually straightforward, and involves renaming the `file.ml4` file to `file.mlg` and adding a few brackets. See "Transitioning away from Camlp5" below for update instructions. ### ML API SProp was added, see General deprecation - All functions marked [@@ocaml.deprecated] in 8.8 have been removed. Please, make sure your plugin is warning-free in 8.8 before trying to port it over 8.9. Warnings - Coq now builds plugins with `-warn-error` enabled by default. The amount of dangerous warnings in plugin code was very high, so we now require plugins in the CI to adhere to the Coq warning policy. We _strongly_ recommend against disabling the default set of warnings. If you have special needs, see the documentation of your build system and/or OCaml for further help. Names - Kernel names no longer contain a section path. They now have only two components (module path and label), which led to some changes in the API: KerName.make takes only 2 components KerName.repr returns only 2 components KerName.make2 is now KerName.make Constant.make3 has been removed, use Constant.make2 Constant.repr3 has been removed, use Constant.repr2 - `Names.transparent_state` has been moved to its own module `TransparentState`. This module gathers utility functions that used to be defined in several places. Coqlib: - Most functions from the `Coqlib` module have been deprecated in favor of `register_ref` and `lib_ref`. The first one is available through the vernacular `Register` command; it binds a name to a constant. The second command then enables to locate the registered constant through its name. The name resolution is dynamic. Proof state: - Handling of proof state has been fully functionalized, thus it is not possible to call global functions such as `get_current_context ()`. The main type for functions that need to handle proof state is `Proof_global.t`. Unfortunately, this change was not possible to do in a backwards-compatible way, but in most case the api changes are straightforward, with functions taking and returning an extra argument. Macros: - The RAW_TYPED AS and GLOB_TYPED AS stanzas of the ARGUMENT EXTEND macro are deprecated. Use TYPED AS instead. - coqpp (.mlg) based VERNAC EXTEND accesses attributes through a `#[ x = att ]` syntax, where `att : 'a Attributes.attribute` and `x` will be bound with type `'a` in the expression, unlike the old system where `atts : Vernacexpr.vernac_flags` was bound in the expression and had to be manually parsed. - `PRINTED BY` now binds `env` and `sigma`, and expects printers which take as parameters term printers parametrized by an environment and an `evar_map`. Printers - `Ppconstr.pr_constr_expr`, `Ppconstr.lconstr_expr`, `Ppconstr.pr_constr_pattern_expr` and `Ppconstr.pr_lconstr_pattern_expr` now all take an environment and an `evar_map`. Libobject - A Higher-level API for objects with fixed scope was introduced. It supports the following kinds of objects: * Local objects, meaning that objects cannot be imported from outside. * Global objects, meaning that they can be imported (by importing the module that contains the object). * Superglobal objects, meaning that objects survive to closing a module, and are imported when the file which contains them is Required (even without Import). * Objects that survive section closing or don't (see `nodischarge` variants, however we discourage defining such objects) This API is made of the following functions: * `Libobject.local_object` * `Libobject.local_object_nodischarge` * `Libobject.global_object` * `Libobject.global_object_nodischarge` * `Libobject.superglobal_object` * `Libobject.superglobal_object_nodischarge` AST - Minor changes in the AST have been performed, for example https://github.com/coq/coq/pull/9165 Implicit Arguments - `Impargs.declare_manual_implicits` is restricted to only support declaration of implicit binders at constant declaration time. `Impargs.set_implicits` should be used for redeclaration of implicit arguments. ## Changes between Coq 8.8 and Coq 8.9 ### ML API Names - In `Libnames`, the type `reference` and its two constructors `Qualid` and `Ident` have been removed in favor of `qualid`. `Qualid` is now the identity, `Ident` can be replaced by `qualid_of_ident`. Matching over `reference` can be replaced by a test using `qualid_is_ident`. Extracting the `ident` part of a `qualid` can be done using `qualid_basename`. Misctypes - Syntax for universe sorts and kinds has been moved from `Misctypes` to `Glob_term`, as these are turned into kernel terms by `Pretyping`. Proof engine - More functions have been changed to use `EConstr`, notably the functions in `Evd`, and in particular `Evd.define`. Note that the core function `EConstr.to_constr` now _enforces_ by default that the resulting term is ground, that is to say, free of Evars. This is usually what you want, as open terms should be of type `EConstr.t` to benefit from the invariants the `EConstr` API is meant to guarantee. In case you'd like to violate this API invariant, you can use the `abort_on_undefined_evars` flag to `EConstr.to_constr`, but note that setting this flag to false is deprecated so it is only meant to be used as to help port pre-EConstr code. - A few type alias have been deprecated, in all cases the message should indicate what the canonical form is. An important change is the move of `Globnames.global_reference` to `Names.GlobRef.t`. - Unification API returns `evar_map option` instead of `bool * evar_map` with the guarantee that the `evar_map` was unchanged if the boolean was false. ML Libraries used by Coq - Introduction of a `Smart` module for collecting `smart*` functions, e.g. `Array.Smart.map`. - Uniformization of some names, e.g. `Array.Smart.fold_left_map` instead of `Array.smartfoldmap`. Printer.ml API - The mechanism in `Printer` that allowed dynamically overriding `pr_subgoals`, `pr_subgoal` and `pr_goal` was removed to simplify the code. It was earlier used by PCoq. Kernel - The following renamings happened: - `Context.Rel.t` into `Constr.rel_context` - `Context.Named.t` into `Constr.named_context` - `Context.Compacted.t` into `Constr.compacted_context` - `Context.Rel.Declaration.t` into `Constr.rel_declaration` - `Context.Named.Declaration.t` into `Constr.named_declaration` - `Context.Compacted.Declaration.t` into `Constr.compacted_declaration` Source code organization - We have eliminated / fused some redundant modules and relocated a few interfaces files. The `intf` folder is gone, and now for example `Constrexpr` is located in `interp/`, `Vernacexpr` in `vernac/` and so on. Changes should be compatible, but in a few cases stricter layering requirements may mean that functions have moved. In all cases adapting is a matter of changing the module name. Vernacular commands - The implementation of vernacular commands has been refactored so it is self-contained now, including the parsing and extension mechanisms. This involves a couple of non-backward compatible changes due to layering issues, where some functions have been moved from `Pcoq` to `Pvernac` and from `Vernacexpr` to modules in `tactics/`. In all cases adapting is a matter of changing the module name. Primitive number parsers - For better modularity, the primitive parsers for `positive`, `N` and `Z` have been split over three files (`plugins/syntax/positive_syntax.ml`, `plugins/syntax/n_syntax.ml`, `plugins/syntax/z_syntax.ml`). Parsing - Manual uses of the `Pcoq.Gram` module have been deprecated. Wrapper modules `Pcoq.Entry` and `Pcoq.Parsable` were introduced to replace it. Termops - Internal printing functions have been placed under the `Termops.Internal` namespace. ### Unit testing The test suite now allows writing unit tests against OCaml code in the Coq code base. Those unit tests create a dependency on the OUnit test framework. ### Transitioning away from Camlp5 In an effort to reduce dependency on camlp5, the use of several grammar macros is discouraged. Coq is now shipped with its own preprocessor, called coqpp, which serves the same purpose as camlp5. To perform the transition to coqpp macros, one first needs to change the extension of a macro file from `.ml4` to `.mlg`. Not all camlp5 macros are handled yet. Due to parsing constraints, the syntax of the macros is slightly different, but updating the source code is mostly a matter of straightforward search-and-replace. The main differences are summarized below. #### OCaml code Every piece of toplevel OCaml code needs to be wrapped into braces. For instance, code of the form ``` let myval = 0 ``` should be turned into ``` { let myval = 0 } ``` #### TACTIC EXTEND Steps to perform: - replace the brackets enclosing OCaml code in actions with braces - if not there yet, add a leading `|` to the first rule For instance, code of the form: ``` TACTIC EXTEND my_tac [ "tac1" int_or_var(i) tactic(t) ] -> [ mytac1 ist i t ] | [ "tac2" tactic(t) ] -> [ mytac2 t ] END ``` should be turned into ``` TACTIC EXTEND my_tac | [ "tac1" int_or_var(i) tactic(t) ] -> { mytac1 ist i t } | [ "tac2" tactic(t) ] -> { mytac2 t } END ``` #### VERNAC EXTEND Steps to perform: - replace the brackets enclosing OCaml code in actions and rule classifiers with braces - if not there yet, add a leading `|̀ to the first rule Handwritten classifiers declared through the `CLASSIFIED BY` statement are considered OCaml code, so they also need to be wrapped in braces. For instance, code of the form: ``` VERNAC COMMAND EXTEND my_command CLASSIFIED BY classifier [ "foo" int(i) ] => [ classif' ] -> [ cmd1 i ] | [ "bar" ] -> [ cmd2 ] END ``` should be turned into ``` VERNAC COMMAND EXTEND my_command CLASSIFIED BY { classifier } | [ "foo" int(i) ] => { classif' } -> { cmd1 i } | [ "bar" ] -> { cmd2 } END ``` #### ARGUMENT EXTEND Steps to perform: - replace the brackets enclosing OCaml code in actions with braces - if not there yet, add a leading `|` to the first rule - syntax of `TYPED AS` has been restricted not to accept compound generic arguments as a literal, e.g. `foo_opt` should be rewritten into `foo option` and similarly `foo_list` into `foo list`. - parenthesis around pair types in `TYPED AS` are now mandatory - `RAW_TYPED AS` and `GLOB_TYPED AS` clauses need to be removed `BY` clauses are considered OCaml code, and thus need to be wrapped in braces, but not the `TYPED AS` clauses. For instance, code of the form: ``` ARGUMENT EXTEND my_arg TYPED AS int_opt PRINTED BY printer INTERPRETED BY interp_f GLOBALIZED BY glob_f SUBSTITUTED BY subst_f RAW_TYPED AS int_opt RAW_PRINTED BY raw_printer GLOB_TYPED AS int_opt GLOB_PRINTED BY glob_printer [ "foo" int(i) ] -> [ my_arg1 i ] | [ "bar" ] -> [ my_arg2 ] END ``` should be turned into ``` ARGUMENT EXTEND my_arg TYPED AS { int_opt } PRINTED BY { printer } INTERPRETED BY { interp_f } GLOBALIZED BY { glob_f } SUBSTITUTED BY { subst_f } RAW_PRINTED BY { raw_printer } GLOB_PRINTED BY { glob_printer } | [ "foo" int(i) ] -> { my_arg1 i } | [ "bar" ] -> { my_arg2 } END ``` #### GEXTEND Most plugin writers do not need this low-level interface, but for the sake of completeness we document it. Steps to perform are: - replace `GEXTEND` with `GRAMMAR EXTEND` - wrap every occurrence of OCaml code in actions into braces `{ }` For instance, code of the form ``` GEXTEND Gram GLOBAL: my_entry; my_entry: [ [ x = bar; y = qux -> do_something x y | "("; z = LIST0 my_entry; ")" -> do_other_thing z ] ]; END ``` should be turned into ``` GRAMMAR EXTEND Gram GLOBAL: my_entry; my_entry: [ [ x = bar; y = qux -> { do_something x y } | "("; z = LIST0 my_entry; ")" -> { do_other_thing z } ] ]; END ``` Caveats: - No `GLOBAL` entries mean that they are all local, while camlp5 special-cases this as a shorthand for all global entries. Solution: always define a `GLOBAL` section. - No complex patterns allowed in token naming. Solution: match on it inside the OCaml quotation. ## Changes between Coq 8.7 and Coq 8.8 ### Bug tracker As of 18/10/2017, Coq uses [GitHub issues](https://github.com/coq/coq/issues) as bug tracker. Old bug reports were migrated from Bugzilla to GitHub issues using [this migration script](https://gist.github.com/Zimmi48/d923e52f64fe17c72852d9c148bfcdc6#file-bugzilla2github) as detailed in [this blog post](https://www.theozimmermann.net/2017/10/bugzilla-to-github/). All the bugs with a number below 1154 had to be renumbered, you can find a correspondence table [here](/dev/bugzilla2github_stripped.csv). All the other bugs kept their number. ### ML API General deprecation - All functions marked `[@@ocaml.deprecated]` in 8.7 have been removed. Please, make sure your plugin is warning-free in 8.7 before trying to port it over 8.8. Proof engine - Due to the introduction of `EConstr` in 8.7, it is not necessary to track "goal evar normal form status" anymore, thus the type `'a Proofview.Goal.t` loses its ghost argument. This may introduce some minor incompatibilities at the typing level. Code-wise, things should remain the same. We removed the following functions: - `Universes.unsafe_constr_of_global`: use `Global.constr_of_global_in_context` instead. The returned term contains De Bruijn universe variables. If you don't depend on universes being instantiated, simply drop the context. - `Universes.unsafe_type_of_global`: same as above with `Global.type_of_global_in_context` We changed the type of the following functions: - `Global.body_of_constant_body`: now also returns the abstract universe context. The returned term contains De Bruijn universe variables. - `Global.body_of_constant`: same as above. - `Constrinterp.*`: generally, many functions that used to take an `evar_map ref` have now been switched to functions that will work in a functional way. The old style of passing `evar_map`s as references is not supported anymore. Changes in the abstract syntax tree: - The practical totality of the AST has been nodified using `CAst.t`. This means that all objects coming from parsing will be indeed wrapped in a `CAst.t`. `Loc.located` is on its way to deprecation. Some minor interfaces changes have resulted from this. We have changed the representation of the following types: - `Lib.object_prefix` is now a record instead of a nested tuple. Some tactics and related functions now support static configurability, e.g.: - `injectable`, `dEq`, etc. take an argument `~keep_proofs` which, - if `None`, tells to behave as told with the flag `Keep Proof Equalities` - if `Some b`, tells to keep proof equalities iff `b` is true Declaration of printers for arguments used only in vernac command - It should now use `declare_extra_vernac_genarg_pprule` rather than `declare_extra_genarg_pprule`, otherwise, a failure at runtime might happen. An alternative is to register the corresponding argument as a value, using `Geninterp.register_val0 wit None`. Types Alias deprecation and type relocation. - A few type alias have been deprecated, in all cases the message should indicate what the canonical form is. ### STM API The STM API has seen a general overhaul. The main change is the introduction of a "Coq document" type, which all operations now take as a parameter. This effectively functionalize the STM API and will allow in the future to handle several documents simultaneously. The main remarkable point is that key implicit global parameters such as load-paths and required modules are now arguments to the document creation function. This helps enforcing some key invariants. ### XML IDE Protocol - Before 8.8, `Query` only executed the first command present in the `query` string; starting with 8.8, the caller may include several statements. This is useful for instance for temporarily setting an option and then executing a command. ## Changes between Coq 8.6 and Coq 8.7 ### Ocaml Coq is compiled with `-safe-string` enabled and requires plugins to do the same. This means that code using `String` in an imperative way will fail to compile now. They should switch to `Bytes.t` Configure supports passing flambda options, use `-flambda-opts OPTS` with a flambda-enabled Ocaml to tweak the compilation to your taste. ### ML API - Added two functions for declaring hooks to be executed in reduction functions when some given constants are traversed: * `declare_reduction_effect`: to declare a hook to be applied when some constant are visited during the execution of some reduction functions (primarily `cbv`). * `set_reduction_effect`: to declare a constant on which a given effect hook should be called. - We renamed the following functions: ``` Context.Rel.Declaration.fold -> Context.Rel.Declaration.fold_constr Context.Named.Declaration.fold -> Context.Named.Declaration.fold_constr Printer.pr_var_list_decl -> Printer.pr_compacted_decl Printer.pr_var_decl -> Printer.pr_named_decl Nameops.lift_subscript -> Nameops.increment_subscript ``` - We removed the following functions: * `Termops.compact_named_context_reverse`: practical substitute is `Termops.compact_named_context`. * `Namegen.to_avoid`: equivalent substitute is `Names.Id.List.mem`. - We renamed the following modules: * `Context.ListNamed` -> `Context.Compacted` - The following type aliases where removed * `Context.section_context`: it was just an alias for `Context.Named.t` which is still available. - The module `Constrarg` was merged into `Stdarg`. - The following types have been moved and modified: * `local_binder` -> `local_binder_expr` * `glob_binder` merged with `glob_decl` - The following constructors have been renamed: ``` LocalRawDef -> CLocalDef LocalRawAssum -> CLocalAssum LocalPattern -> CLocalPattern ``` - In `Constrexpr_ops`: Deprecating `abstract_constr_expr` in favor of `mkCLambdaN`, and `prod_constr_expr` in favor of `mkCProdN`. Note: the first ones were interpreting `(x y z:_)` as `(x:_) (y:_) (z:_)` while the second ones were preserving the original sharing of the type. - In `Nameops`: The API has been made more uniform. New combinators added in the `Name` space name. Function `out_name` now fails with `IsAnonymous` rather than with `Failure "Nameops.out_name"`. - Location handling and AST attributes: Location handling has been reworked. First, `Loc.ghost` has been removed in favor of an option type, all objects carrying an optional source code location have been switched to use `Loc.t option`. Storage of location information has been also refactored. The main datatypes representing Coq AST (`constrexpr`, `glob_expr`) have been switched to a generic "node with attributes" representation `'a CAst.ast`, which is a record of the form: ```ocaml type 'a ast = private { v : 'a; loc : Loc.t option; ... } ``` consumers of AST nodes are recommended to use accessor-based pattern matching `{ v; loc }` to destruct `ast` object. Creation is done with `CAst.make ?loc obj`, where the attributes are optional. Some convenient combinators are provided in the module. A typical match: ```ocaml | CCase(loc, a1) -> CCase(loc, f a1) ``` is now done as: ```ocaml | { v = CCase(a1); loc } -> CAst.make ?loc @@ CCase(f a1) ``` or even better, if plan to preserve the attributes you can wrap your top-level function in `CAst.map` to have: ```ocaml | CCase(a1) -> CCase(f a1) ``` This scheme based on records enables easy extensibility of the AST node type without breaking compatibility. Not all objects carrying a location have been converted to the generic node representation, some of them may be converted in the future, for some others the abstraction is not just worth it. Thus, we still maintain a `'a Loc.located == Loc.t option * a'`, tuple type which should be treated as private datatype (ok to match against, but forbidden to manually build), and it is mandatory to use it for objects that carry a location. This policy has been implemented in the whole code base. Matching a located object hasn't changed, however, `Loc.tag ?loc obj` must be used to build one. - In `GOption`: Support for non-synchronous options has been removed. Now all options are handled as a piece of normal document state, and thus passed to workers, etc... As a consequence, the field `Goptions.optsync` has been removed. - In `Coqlib` / reference location: We have removed from Coqlib functions returning `constr` from names. Now it is only possible to obtain references, that must be processed wrt the particular needs of the client. We have changed in constrintern the functions returnin `constr` as well to return global references instead. Users of `coq_constant/gen_constant` can do `Universes.constr_of_global (find_reference dir r)` _however_ note the warnings in the `Universes.constr_of_global` in the documentation. It is very likely that you were previously suffering from problems with polymorphic universes due to using `Coqlib.coq_constant` that used to do this. You must rather use `pf_constr_of_global` in tactics and `Evarutil.new_global` variants when constructing terms in ML (see univpoly.txt for more information). ### Tactic API - `pf_constr_of_global` now returns a tactic instead of taking a continuation. Thus it only generates one instance of the global reference, and it is the caller's responsibility to perform a focus on the goal. - `pf_global`, `construct_reference`, `global_reference`, `global_reference_in_absolute_module` now return a `global_reference` instead of a `constr`. - The `tclWEAK_PROGRESS` and `tclNOTSAMEGOAL` tacticals were removed. Their usecase was very specific. Use `tclPROGRESS` instead. - New (internal) tactical `tclINDEPENDENTL` that combined with enter_one allows to iterate a non-unit tactic on all goals and access their returned values. - The unsafe flag of the `Refine.refine` function and its variants has been renamed and dualized into typecheck and has been made mandatory. ### Ltac API Many Ltac specific API has been moved in its own ltac/ folder. Amongst other important things: - `Pcoq.Tactic` -> `Pltac` - `Constrarg.wit_tactic` -> `Tacarg.wit_tactic` - `Constrarg.wit_ltac` -> `Tacarg.wit_ltac` - API below `ltac/` that accepted a *`_tactic_expr` now accept a *`_generic_argument` instead - Some printing functions were moved from `Pptactic` to `Pputils` - A part of `Tacexpr` has been moved to `Tactypes` - The `TacFun` tactic expression constructor now takes a `Name.t list` for the variable list rather than an `Id.t option list`. The folder itself has been turned into a plugin. This does not change much, but because it is a packed plugin, it may wreak havoc for third-party plugins depending on any module defined in the `ltac/` directory. Namely, even if everything looks OK at compile time, a plugin can fail to load at link time because it mistakenly looks for a module `Foo` instead of `Ltac_plugin.Foo`, with an error of the form: ``` Error: while loading myplugin.cmxs, no implementation available for Foo. ``` In particular, most `EXTEND` macros will trigger this problem even if they seemingly do not use any Ltac module, as their expansion do. The solution is simple, and consists in adding a statement `open Ltac_plugin` in each file using a Ltac module, before such a module is actually called. An alternative solution would be to fully qualify Ltac modules, e.g. turning any call to Tacinterp into `Ltac_plugin.Tacinterp`. Note that this solution does not work for `EXTEND` macros though. ### Additional changes in tactic extensions Entry `constr_with_bindings` has been renamed into `open_constr_with_bindings`. New entry `constr_with_bindings` now uses type classes and rejects terms with unresolved holes. ### Error handling - All error functions now take an optional parameter `?loc:Loc.t`. For functions that used to carry a suffix `_loc`, such suffix has been dropped. - `errorlabstrm` and `error` has been removed in favor of `user_err`. - The header parameter to `user_err` has been made optional. ### Pretty printing Some functions have been removed, see pretty printing below for more details. #### Pretty Printing and XML protocol The type `std_cmdpps` has been reworked and made the canonical "Coq rich document type". This allows for a more uniform handling of printing (specially in IDEs). The main consequences are: - Richpp has been confined to IDE use. Most of previous uses of the `richpp` type should be replaced now by `Pp.std_cmdpps`. Main API has been updated. - The XML protocol will send a new message type of `pp`, which should be rendered client-wise. - `Set Printing Width` is deprecated, now width is controlled client-side. - `Pp_control` has removed. The new module `Topfmt` implements console control for the toplevel. - The impure tag system in `Pp` has been removed. This also does away with the printer signatures and functors. Now printers tag unconditionally. - The following functions have been removed from `Pp`: ```ocaml val stras : int * string -> std_ppcmds val tbrk : int * int -> std_ppcmds val tab : unit -> std_ppcmds val pifb : unit -> std_ppcmds val comment : int -> std_ppcmds val comments : ((int * int) * string) list ref val eval_ppcmds : std_ppcmds -> std_ppcmds val is_empty : std_ppcmds -> bool val t : std_ppcmds -> std_ppcmds val hb : int -> std_ppcmds val vb : int -> std_ppcmds val hvb : int -> std_ppcmds val hovb : int -> std_ppcmds val tb : unit -> std_ppcmds val close : unit -> std_ppcmds val tclose : unit -> std_ppcmds val open_tag : Tag.t -> std_ppcmds val close_tag : unit -> std_ppcmds val msg_with : ... module Tag ``` ### Stm API - We have streamlined the `Stm` API, now `add` and `query` take a `coq_parsable` instead a `string` so clients can have more control over their input stream. As a consequence, their types have been modified. - The main parsing entry point has also been moved to the `Stm`. Parsing is considered a synchronous operation so it will either succeed or raise an exception. - `Feedback` is now only emitted for asynchronous operations. As a consequence, it always carries a valid stateid and the type has changed to accommodate that. - A few unused hooks were removed due to cleanups, no clients known. ### Toplevel and Vernacular API - The components related to vernacular interpretation have been moved to their own folder `vernac/` whereas toplevel now contains the proper toplevel shell and compiler. - Coq's toplevel has been ported to directly use the common `Stm` API. The signature of a few functions has changed as a result. ### XML Protocol - The legacy `Interp` call has been turned into a noop. - The `query` call has been modified, now it carries a mandatory `route_id` integer parameter, that associated the result of such query with its generated feedback. ## Changes between Coq 8.5 and Coq 8.6 ### Parsing `Pcoq.parsable` now takes an extra optional filename argument so as to bind locations to a file name when relevant. ### Files To avoid clashes with OCaml's compiler libs, the following files were renamed: ``` kernel/closure.ml{,i} -> kernel/cClosure.ml{,i} lib/errors.ml{,i} -> lib/cErrors.ml{,i} toplevel/cerror.ml{,i} -> toplevel/explainErr.mli{,i} ``` All IDE-specific files, including the XML protocol have been moved to `ide/` ### Reduction functions In `closure.ml`, we introduced the more precise reduction flags `fMATCH`, `fFIX`, `fCOFIX`. We renamed the following functions: ``` Closure.betadeltaiota -> Closure.all Closure.betadeltaiotanolet -> Closure.allnolet Reductionops.beta -> Closure.beta Reductionops.zeta -> Closure.zeta Reductionops.betaiota -> Closure.betaiota Reductionops.betaiotazeta -> Closure.betaiotazeta Reductionops.delta -> Closure.delta Reductionops.betalet -> Closure.betazeta Reductionops.betadelta -> Closure.betadeltazeta Reductionops.betadeltaiota -> Closure.all Reductionops.betadeltaiotanolet -> Closure.allnolet Closure.no_red -> Closure.nored Reductionops.nored -> Closure.nored Reductionops.nf_betadeltaiota -> Reductionops.nf_all Reductionops.whd_betadelta -> Reductionops.whd_betadeltazeta Reductionops.whd_betadeltaiota -> Reductionops.whd_all Reductionops.whd_betadeltaiota_nolet -> Reductionops.whd_allnolet Reductionops.whd_betadelta_stack -> Reductionops.whd_betadeltazeta_stack Reductionops.whd_betadeltaiota_stack -> Reductionops.whd_all_stack Reductionops.whd_betadeltaiota_nolet_stack -> Reductionops.whd_allnolet_stack Reductionops.whd_betadelta_state -> Reductionops.whd_betadeltazeta_state Reductionops.whd_betadeltaiota_state -> Reductionops.whd_all_state Reductionops.whd_betadeltaiota_nolet_state -> Reductionops.whd_allnolet_state Reductionops.whd_eta -> Reductionops.shrink_eta Tacmach.pf_whd_betadeltaiota -> Tacmach.pf_whd_all Tacmach.New.pf_whd_betadeltaiota -> Tacmach.New.pf_whd_all ``` And removed the following ones: ``` Reductionops.whd_betaetalet Reductionops.whd_betaetalet_stack Reductionops.whd_betaetalet_state Reductionops.whd_betadeltaeta_stack Reductionops.whd_betadeltaeta_state Reductionops.whd_betadeltaeta Reductionops.whd_betadeltaiotaeta_stack Reductionops.whd_betadeltaiotaeta_state Reductionops.whd_betadeltaiotaeta ``` In `intf/genredexpr.mli`, `fIota` was replaced by `FMatch`, `FFix` and `FCofix`. Similarly, `rIota` was replaced by `rMatch`, `rFix` and `rCofix`. ### Notation_ops Use `Glob_ops.glob_constr_eq` instead of `Notation_ops.eq_glob_constr`. ### Logging and Pretty Printing * Printing functions have been removed from `Pp.mli`, which is now a purely pretty-printing interface. Functions affected are: ```` ocaml val pp : std_ppcmds -> unit val ppnl : std_ppcmds -> unit val pperr : std_ppcmds -> unit val pperrnl : std_ppcmds -> unit val pperr_flush : unit -> unit val pp_flush : unit -> unit val flush_all : unit -> unit val msg : std_ppcmds -> unit val msgnl : std_ppcmds -> unit val msgerr : std_ppcmds -> unit val msgerrnl : std_ppcmds -> unit val message : string -> unit ```` which are no more available. Users of `Pp.pp msg` should now use the proper `Feedback.msg_*` function. Clients also have no control over flushing, the back end takes care of it. Also, the `msg_*` functions now take an optional `?loc` parameter for relaying location to the client. * Feedback related functions and definitions have been moved to the `Feedback` module. `message_level` has been renamed to level. Functions moved from `Pp` to `Feedback` are: ```` ocaml val set_logger : logger -> unit val std_logger : logger val emacs_logger : logger val feedback_logger : logger ```` * Changes in the Feedback format/Protocol. - The `Message` feedback type now carries an optional location, the main payload is encoded using the richpp document format. - The `ErrorMsg` feedback type is thus unified now with `Message` at level `Error`. * We now provide several loggers, `log_via_feedback` is removed in favor of `set_logger feedback_logger`. Output functions are: ```` ocaml val with_output_to_file : string -> ('a -> 'b) -> 'a -> 'b val msg_error : ?loc:Loc.t -> Pp.std_ppcmds -> unit val msg_warning : ?loc:Loc.t -> Pp.std_ppcmds -> unit val msg_notice : ?loc:Loc.t -> Pp.std_ppcmds -> unit val msg_info : ?loc:Loc.t -> Pp.std_ppcmds -> unit val msg_debug : ?loc:Loc.t -> Pp.std_ppcmds -> unit ```` with the `msg_*` functions being just an alias for `logger $Level`. * The main feedback functions are: ```` ocaml val set_feeder : (feedback -> unit) -> unit val feedback : ?id:edit_or_state_id -> ?route:route_id -> feedback_content -> unit val set_id_for_feedback : ?route:route_id -> edit_or_state_id -> unit ```` Note that `feedback` doesn't take two parameters anymore. After refactoring the following function has been removed: ```` ocaml val get_id_for_feedback : unit -> edit_or_state_id * route_id ```` ### Kernel API changes - The interface of the `Context` module was changed. Related types and functions were put in separate submodules. The mapping from old identifiers to new identifiers is the following: ``` Context.named_declaration ---> Context.Named.Declaration.t Context.named_list_declaration ---> Context.NamedList.Declaration.t Context.rel_declaration ---> Context.Rel.Declaration.t Context.map_named_declaration ---> Context.Named.Declaration.map_constr Context.map_named_list_declaration ---> Context.NamedList.Declaration.map Context.map_rel_declaration ---> Context.Rel.Declaration.map_constr Context.fold_named_declaration ---> Context.Named.Declaration.fold Context.fold_rel_declaration ---> Context.Rel.Declaration.fold Context.exists_named_declaration ---> Context.Named.Declaration.exists Context.exists_rel_declaration ---> Context.Rel.Declaration.exists Context.for_all_named_declaration ---> Context.Named.Declaration.for_all Context.for_all_rel_declaration ---> Context.Rel.Declaration.for_all Context.eq_named_declaration ---> Context.Named.Declaration.equal Context.eq_rel_declaration ---> Context.Rel.Declaration.equal Context.named_context ---> Context.Named.t Context.named_list_context ---> Context.NamedList.t Context.rel_context ---> Context.Rel.t Context.empty_named_context ---> Context.Named.empty Context.add_named_decl ---> Context.Named.add Context.vars_of_named_context ---> Context.Named.to_vars Context.lookup_named ---> Context.Named.lookup Context.named_context_length ---> Context.Named.length Context.named_context_equal ---> Context.Named.equal Context.fold_named_context ---> Context.Named.fold_outside Context.fold_named_list_context ---> Context.NamedList.fold Context.fold_named_context_reverse ---> Context.Named.fold_inside Context.instance_from_named_context ---> Context.Named.to_instance Context.extended_rel_list ---> Context.Rel.to_extended_list Context.extended_rel_vect ---> Context.Rel.to_extended_vect Context.fold_rel_context ---> Context.Rel.fold_outside Context.fold_rel_context_reverse ---> Context.Rel.fold_inside Context.map_rel_context ---> Context.Rel.map_constr Context.map_named_context ---> Context.Named.map_constr Context.iter_rel_context ---> Context.Rel.iter Context.iter_named_context ---> Context.Named.iter Context.empty_rel_context ---> Context.Rel.empty Context.add_rel_decl ---> Context.Rel.add Context.lookup_rel ---> Context.Rel.lookup Context.rel_context_length ---> Context.Rel.length Context.rel_context_nhyps ---> Context.Rel.nhyps Context.rel_context_tags ---> Context.Rel.to_tags ``` - Originally, rel-context was represented as: ```ocaml type Context.rel_context = Names.Name.t * Constr.t option * Constr.t ``` Now it is represented as: ```ocaml type Context.Rel.Declaration.t = LocalAssum of Names.Name.t * Constr.t | LocalDef of Names.Name.t * Constr.t * Constr.t ``` - Originally, named-context was represented as: ```ocaml type Context.named_context = Names.Id.t * Constr.t option * Constr.t ``` Now it is represented as: ```ocaml type Context.Named.Declaration.t = LocalAssum of Names.Id.t * Constr.t | LocalDef of Names.Id.t * Constr.t * Constr.t ``` - The various `EXTEND` macros do not handle specially the Coq-defined entries anymore. Instead, they just output a name that have to exist in the scope of the ML code. The parsing rules (`VERNAC`) `ARGUMENT EXTEND` will look for variables `$name` of type `Gram.entry`, while the parsing rules of (`VERNAC COMMAND` | `TACTIC`) `EXTEND`, as well as the various `TYPED AS` clauses will look for variables `wit_$name` of type `Genarg.genarg_type`. The small DSL for constructing compound entries still works over this scheme. Note that in the case of (`VERNAC`) `ARGUMENT EXTEND`, the name of the argument entry is bound in the parsing rules, so beware of recursive calls. For example, to get `wit_constr` you must `open Constrarg` at the top of the file. - `Evarutil` was split in two parts. The new `Evardefine` file exposes functions `define_evar_`* mostly used internally in the unification engine. - The `Refine` module was moved out of `Proofview`. ``` Proofview.Refine.* ---> Refine.* ``` - A statically monotonic evarmap type was introduced in `Sigma`. Not all the API has been converted, so that the user may want to use compatibility functions `Sigma.to_evar_map` and `Sigma.Unsafe.of_evar_map` or `Sigma.Unsafe.of_pair` when needed. Code can be straightforwardly adapted in the following way: ```ocaml let (sigma, x1) = ... in ... let (sigma, xn) = ... in (sigma, ans) ``` should be turned into: ```ocaml open Sigma.Notations let Sigma (x1, sigma, p1) = ... in ... let Sigma (xn, sigma, pn) = ... in Sigma (ans, sigma, p1 +> ... +> pn) ``` Examples of `Sigma.Unsafe.of_evar_map` include: ``` Evarutil.new_evar env (Tacmach.project goal) ty ----> Evarutil.new_evar env (Sigma.Unsafe.of_evar_map (Tacmach.project goal)) ty ``` - The `Proofview.Goal.`*`enter` family of functions now takes a polymorphic continuation given as a record as an argument. ```ocaml Proofview.Goal.enter begin fun gl -> ... end ``` should be turned into ```ocaml open Proofview.Notations Proofview.Goal.enter { enter = begin fun gl -> ... end } ``` - `Tacexpr.TacDynamic(Loc.dummy_loc, Pretyping.constr_in c)` ---> `Tacinterp.Value.of_constr c` - `Vernacexpr.HintsResolveEntry(priority, poly, hnf, path, atom)` ---> `Vernacexpr.HintsResolveEntry(Vernacexpr.({hint_priority = priority; hint_pattern = None}), poly, hnf, path, atom)` - `Pretyping.Termops.mem_named_context` ---> `Engine.Termops.mem_named_context_val` - `Global.named_context` ---> `Global.named_context_val` - `Context.Named.lookup` ---> `Environ.lookup_named_val` ### Search API The main search functions now take a function iterating over the results. This allows for clients to use streaming or more economic printing. ### XML Protocol - In several places, flat text wrapped in `` tags now appears as structured text inside `` tags. - The "errormsg" feedback has been replaced by a "message" feedback which contains `` tag, with a message_level attribute of "error". ## Changes between Coq 8.4 and Coq 8.5 ### Refactoring : more mli interfaces and simpler grammar.cma - A new directory intf/ now contains mli-only interfaces : * `Constrexpr` : definition of `constr_expr`, was in `Topconstr` * `Decl_kinds` : now contains `binding_kind = Explicit | Implicit` * `Evar_kinds` : type `Evar_kinds.t` was previously `Evd.hole_kind` * `Extend` : was `parsing/extend.mli` * `Genredexpr` : regroup `Glob_term.red_expr_gen` and `Tacexpr.glob_red_flag` * `Glob_term` : definition of `glob_constr` * `Locus` : definition of occurrences and stuff about clauses * `Misctypes` : `intro_pattern_expr`, `glob_sort`, `cast_type`, `or_var`, ... * `Notation_term` : contains `notation_constr`, was `Topconstr.aconstr` * `Pattern` : contains `constr_pattern` * `Tacexpr` : was `tactics/tacexpr.ml` * `Vernacexpr` : was `toplevel/vernacexpr.ml` - Many files have been divided : * vernacexpr: vernacexpr.mli + Locality * decl_kinds: decl_kinds.mli + Kindops * evd: evar_kinds.mli + evd * tacexpr: tacexpr.mli + tacops * glob_term: glob_term.mli + glob_ops + genredexpr.mli + redops * topconstr: constrexpr.mli + constrexpr_ops + notation_expr.mli + notation_ops + topconstr * pattern: pattern.mli + patternops * libnames: libnames (qualid, reference) + globnames (global_reference) * egrammar: egramml + egramcoq - New utility files : miscops (cf. misctypes.mli) and redops (cf genredexpr.mli). - Some other directory changes : * grammar.cma and the source files specific to it are now in grammar/ * pretty-printing files are now in printing/ - Inner-file changes : * aconstr is now notation_constr, all constructors for this type now start with a N instead of a A (e.g. NApp instead of AApp), and functions about aconstr may have been renamed (e.g. match_aconstr is now match_notation_constr). * occurrences (now in Locus.mli) is now an algebraic type, with - AllOccurrences instead of all_occurrences_expr = (false,[]) - (AllOccurrencesBut l) instead of (all_occurrences_expr_but l) = (false,l) - NoOccurrences instead of no_occurrences_expr = (true,[]) - (OnlyOccurrences l) instead of (no_occurrences_expr_but l) = (true,l) * move_location (now in Misctypes) has two new constructors MoveFirst and MoveLast replacing (MoveToEnd false) and (MoveToEnd true) - API of pretyping.ml and constrintern.ml has been made more uniform * Parametrization of understand_* functions is now made using "inference flags" * Functions removed: - interp_constr_judgment (inline its former body if really needed) - interp_casted_constr, interp_type: use instead interp_constr with expected_type set to OfType or to IsType - interp_gen: use any of interp_constr, interp_casted_constr, interp_type - interp_open_constr_patvar - interp_context: use interp_context_evars (with a "evar_map ref") and call solve_remaining_evars afterwards with a failing flag (e.g. all_and_fail_flags) - understand_type, understand_gen: use understand with appropriate parameters * Change of semantics: - Functions interp_*_evars_impls have a different interface and do not any longer check resolution of evars by default; use check_evars_are_solved explicitly to check that evars are solved. See also the corresponding commit log. - Tactics API: new_induct -> induction; new_destruct -> destruct; letin_pat_tac do not accept a type anymore - New file find_subterm.ml for gathering former functions `subst_closed_term_occ_modulo`, `subst_closed_term_occ_decl` (which now take and outputs also an `evar_map`), and `subst_closed_term_occ_modulo`, `subst_closed_term_occ_decl_modulo` (now renamed into `replace_term_occ_modulo` and `replace_term_occ_decl_modulo`). - API of Inductiveops made more uniform (see commit log or file itself). - API of intros_pattern style tactic changed; "s" is dropped in "intros_pattern" and "intros_patterns" is not anymore behaving like tactic "intros" on the empty list. - API of cut tactics changed: for instance, cut_intro should be replaced by "assert_after Anonymous" - All functions taking an env and a sigma (or an evdref) now takes the env first. ## Changes between Coq 8.3 and Coq 8.4 - Functions in unification.ml have now the evar_map coming just after the env - Removal of Tacinterp.constr_of_id Use instead either global_reference or construct_reference in constrintern.ml. - Optimizing calls to Evd functions Evars are split into defined evars and undefined evars; for efficiency, when an evar is known to be undefined, it is preferable to use specific functions about undefined evars since these ones are generally fewer than the defined ones. - Type changes in TACTIC EXTEND rules Arguments bound with tactic(_) in TACTIC EXTEND rules are now of type glob_tactic_expr, instead of glob_tactic_expr * tactic. Only the first component is kept, the second one can be obtained via Tacinterp.eval_tactic. - ARGUMENT EXTEND It is now forbidden to use TYPED simultaneously with {RAW,GLOB}_TYPED in ARGUMENT EXTEND statements. - Renaming of rawconstr to glob_constr The "rawconstr" type has been renamed to "glob_constr" for consistency. The "raw" in everything related to former rawconstr has been changed to "glob". For more details about the rationale and scripts to migrate code using Coq's internals, see commits 13743, 13744, 13755, 13756, 13757, 13758, 13761 (by glondu, end of December 2010) in Subversion repository. Contribs have been fixed too, and commit messages there might also be helpful for migrating. ## Changes between Coq 8.2 and Coq 8.3 ### Light cleaning in evaruil.ml whd_castappevar is now whd_head_evar obsolete whd_ise disappears ### Restructuration of the syntax of binders ``` binders_let -> binders binders_let_fixannot -> binders_fixannot binder_let -> closed_binder (and now covers only bracketed binders) binder was already obsolete and has been removed ``` ### Semantical change of h_induction_destruct Warning, the order of the isrec and evar_flag was inconsistent and has been permuted. Tactic induction_destruct in tactics.ml is unchanged. ### Internal tactics renamed There is no more difference between bindings and ebindings. The following tactics are therefore renamed ``` apply_with_ebindings_gen -> apply_with_bindings_gen left_with_ebindings -> left_with_bindings right_with_ebindings -> right_with_bindings split_with_ebindings -> split_with_bindings ``` and the following tactics are removed - apply_with_ebindings (use instead apply_with_bindings) - eapply_with_ebindings (use instead eapply_with_bindings) ### Obsolete functions in typing.ml For mtype_of, msort_of, mcheck, now use type_of, sort_of, check ### Renaming functions renamed ``` concrete_name -> compute_displayed_name_in concrete_let_name -> compute_displayed_let_name_in rename_rename_bound_var -> rename_bound_vars_as_displayed lookup_name_as_renamed -> lookup_name_as_displayed next_global_ident_away true -> next_ident_away_in_goal next_global_ident_away false -> next_global_ident_away ``` ### Cleaning in command.ml Functions about starting/ending a lemma are in lemmas.ml Functions about inductive schemes are in indschemes.ml Functions renamed: ``` declare_one_assumption -> declare_assumption declare_assumption -> declare_assumptions Command.syntax_definition -> Metasyntax.add_syntactic_definition declare_interning_data merged with add_notation_interpretation compute_interning_datas -> compute_full_internalization_env implicits_env -> internalization_env full_implicits_env -> full_internalization_env build_mutual -> do_mutual_inductive build_recursive -> do_fixpoint build_corecursive -> do_cofixpoint build_induction_scheme -> build_mutual_induction_scheme build_indrec -> build_induction_scheme instantiate_type_indrec_scheme -> weaken_sort_scheme instantiate_indrec_scheme -> modify_sort_scheme make_case_dep, make_case_nodep -> build_case_analysis_scheme make_case_gen -> build_case_analysis_scheme_default ``` Types: decl_notation -> decl_notation option ### Cleaning in libnames/nametab interfaces Functions: ``` dirpath_prefix -> pop_dirpath extract_dirpath_prefix pop_dirpath_n extend_dirpath -> add_dirpath_suffix qualid_of_sp -> qualid_of_path pr_sp -> pr_path make_short_qualid -> qualid_of_ident sp_of_syntactic_definition -> path_of_syntactic_definition sp_of_global -> path_of_global id_of_global -> basename_of_global absolute_reference -> global_of_path locate_syntactic_definition -> locate_syndef path_of_syntactic_definition -> path_of_syndef push_syntactic_definition -> push_syndef ``` Types: section_path -> full_path ### Cleaning in parsing extensions (commit 12108) Many moves and renamings, one new file (Extrawit, that contains wit_tactic). ### Cleaning in tactical.mli ``` tclLAST_HYP -> onLastHyp tclLAST_DECL -> onLastDecl tclLAST_NHYPS -> onNLastHypsId tclNTH_DECL -> onNthDecl tclNTH_HYP -> onNthHyp onLastHyp -> onLastHypId onNLastHyps -> onNLastDecls onClauses -> onClause allClauses -> allHypsAndConcl ``` and removal of various unused combinators on type "clause" ## Changes between Coq 8.1 and Coq 8.2 ### Datatypes List of occurrences moved from "int list" to "Termops.occurrences" (an alias to "bool * int list") ETIdent renamed to ETName ### Functions ``` Eauto: e_resolve_constr, vernac_e_resolve_constr -> simplest_eapply Tactics: apply_with_bindings -> apply_with_bindings_wo_evars Eauto.simplest_apply -> Hiddentac.h_simplest_apply Evarutil.define_evar_as_arrow -> define_evar_as_product Old version of Tactics.assert_tac disappears Tactics.true_cut renamed into Tactics.assert_tac Constrintern.interp_constrpattern -> intern_constr_pattern Hipattern.match_with_conjunction is a bit more restrictive Hipattern.match_with_disjunction is a bit more restrictive ``` ### Universe names (univ.mli) ```ocaml base_univ -> type0_univ (* alias of Set is the Type hierarchy *) prop_univ -> type1_univ (* the type of Set in the Type hierarchy *) neutral_univ -> lower_univ (* semantic alias of Prop in the Type hierarchy *) is_base_univ -> is_type1_univ is_empty_univ -> is_lower_univ ``` ### Sort names (term.mli) ``` mk_Set -> set_sort mk_Prop -> prop_sort type_0 -> type1_sort ``` ## Changes between Coq 8.0 and Coq 8.1 ### Functions - Util: option_app -> option_map - Term: substl_decl -> subst_named_decl - Lib: library_part -> remove_section_part - Printer: prterm -> pr_lconstr - Printer: prterm_env -> pr_lconstr_env - Ppconstr: pr_sort -> pr_rawsort - Evd: in_dom, etc got standard ocaml names (i.e. mem, etc) - Pretyping: - understand_gen_tcc and understand_gen_ltac merged into understand_ltac - type_constraints can now say typed by a sort (use OfType to get the previous behavior) - Library: import_library -> import_module ### Constructors * Declarations: mind_consnrealargs -> mind_consnrealdecls * NoRedun -> NoDup * Cast and RCast have an extra argument: you can recover the previous behavior by setting the extra argument to "CastConv DEFAULTcast" and "DEFAULTcast" respectively * Names: "kernel_name" is now "constant" when argument of Term.Const * Tacexpr: TacTrueCut and TacForward(false,_,_) merged into new TacAssert * Tacexpr: TacForward(true,_,_) branched to TacLetTac ### Modules * module Decl_kinds: new interface * module Bigint: new interface * module Tacred spawned module Redexpr * module Symbols -> Notation * module Coqast, Ast, Esyntax, Termast, and all other modules related to old syntax are removed * module Instantiate: integrated to Evd * module Pretyping now a functor: use Pretyping.Default instead ### Internal names OBJDEF and OBJDEF1 -> CANONICAL-STRUCTURE ### Tactic extensions * printers have an extra parameter which is a constr printer at high precedence * the tactic printers have an extra arg which is the expected precedence * level is now a precedence in declare_extra_tactic_pprule * "interp" functions now of types the actual arg type, not its encapsulation as a generic_argument ## Changes between Coq 7.4 and Coq 8.0 See files in dev/syntax-v8 ## Main changes between Coq 7.4 and Coq 8.0 ### Changes due to introduction of modules #### Kernel The module level has no effect on constr except for the structure of section_path. The type of unique names for constructions (what section_path served) is now called a kernel name and is defined by ```ocaml type uniq_ident = int * string * dir_path (* int may be enough *) type module_path = | MPfile of dir_path (* reference to physical module, e.g. file *) | MPbound of uniq_ident (* reference to a module parameter in a functor *) | MPself of uniq_ident (* reference to one of the containing module *) | MPdot of module_path * label type label = identifier type kernel_name = module_path * dir_path * label ^^^^^^^^^^^ ^^^^^^^^ ^^^^^ | | \ | | the base name | \ / the (true) section path example: (non empty only inside open sections) L = (* i.e. some file of logical name L *) struct module A = struct Def a = ... end end M = (* i.e. some file of logical name M *) struct Def t = ... N = functor (X : sig module T = struct Def b = ... end end) -> struct module O = struct Def u = ... end Def x := ... .t ... .O.u ... X.T.b ... L.A.a ``` and are self-references, X is a bound reference and L is a reference to a physical module. Notice that functor application is not part of a path: it must be named by a "module M = F(A)" declaration to be used in a kernel name. Notice that Jacek chose a practical approach, making directories not modules. Another approach could have been to replace the constructor MPfile by a constant constructor MProot representing the root of the world. Other relevant informations are in kernel/entries.ml (type module_expr) and kernel/declarations.ml (type module_body and module_type_body). #### Library 1. tables [Summaries] - the only change is the special treatment of the global environmet. 2. objects [Libobject] declares persistent objects, given with methods: * cache_function specifying how to add the object in the current scope; * load_function, specifying what to do when the module containing the object is loaded; * open_function, specifying what to do when the module containing the object is opened (imported); * classify_function, specyfying what to do with the object, when the current module (containing the object) is ended. * subst_function * export_function, to signal end_section survival (Almost) Each of these methods is called with a parameter of type object_name = section_path * kernel_name where section_path is the full user name of the object (such as Coq.Init.Datatypes.Fst) and kernel_name is its substitutive internal version such as (MPself,[],"Fst") (see above) #### What happens at the end of an interactive module ? (or when a file is stored and reloaded from disk) All summaries (except Global environment) are reverted to the state from before the beginning of the module, and: 1. the objects (again, since last Declaremods.start_module or Library.start_library) are classified using the classify_function. To simplify consider only those who returned Substitute _ or Keep _. 2. If the module is not a functor, the subst_function for each object of the first group is called with the substitution [MPself "" |-> MPfile "Coq.Init.Datatypes"]. Then the load_function is called for substituted objects and the "keep" object. (If the module is a library the substitution is done at reloading). 3. The objects which returned substitute are stored in the modtab together with the self ident of the module, and functor argument names if the module was a functor. They will be used (substituted and loaded) when a command like Module M := F(N) or Module Z := N is evaluated #### The difference between "substitute" and "keep" objects 1. The "keep" objects can _only_ reference other objects by section_paths and qualids. They do not need the substitution function. They will work after end_module (or reloading a compiled library), because these operations do not change section_path's They will obviously not work after Module Z:=N. These would typically be grammar rules, pretty printing rules etc. 2. The "substitute" objects can _only_ reference objects by kernel_names. They must have a valid subst_function. They will work after end_module _and_ after Module Z:=N or Module Z:=F(M). Other kinds of objects: 3. "Dispose" - objects which do not survive end_module As a consequence, objects which reference other objects sometimes by kernel_names and sometimes by section_path must be of this kind... 4. "Anticipate" - objects which must be treated individually by end_module (typically "REQUIRE" objects) #### Writing subst_thing functions The subst_thing should not copy the thing if it hasn't actually changed. There are some cool emacs macros in dev/objects.el to help writing subst functions this way quickly and without errors. Also there are *_smartmap functions in Util. The subst_thing functions are already written for many types, including constr (Term.subst_mps), global_reference (Libnames.subst_global), rawconstr (Rawterm.subst_raw) etc They are all (apart from constr, for now) written in the non-copying way. #### Nametab Nametab has been made more uniform. For every kind of thing there is only one "push" function and one "locate" function. #### Lib library_segment is now a list of object_name * library_item, where object_name = section_path * kernel_name (see above) New items have been added for open modules and module types #### Declaremods Functions to declare interactive and noninteractive modules and module types. #### Library Uses Declaremods to actually communicate with Global and to register objects. ### Other changes Internal representation of tactics bindings has changed (see type Rawterm.substitution). New parsing model for tactics and vernacular commands - Introduction of a dedicated type for tactic expressions (Tacexpr.raw_tactic_expr) - Introduction of a dedicated type for vernac expressions (Vernacexpr.vernac_expr) - Declaration of new vernacular parsing rules by a new camlp4 macro GRAMMAR COMMAND EXTEND ... END to be used in ML files - Declaration of new tactics parsing/printing rules by a new camlp4 macro TACTIC EXTEND ... END to be used in ML files New organisation of THENS: - tclTHENS tac tacs : tacs is now an array - tclTHENSFIRSTn tac1 tacs tac2 : apply tac1 then, apply the array tacs on the first n subgoals and tac2 on the remaining subgoals (previously tclTHENST) - tclTHENSLASTn tac1 tac2 tacs : apply tac1 then, apply tac2 on the first subgoals and apply the array tacs on the last n subgoals - tclTHENFIRSTn tac1 tacs = tclTHENSFIRSTn tac1 tacs tclIDTAC (prev. tclTHENSI) - tclTHENLASTn tac1 tacs = tclTHENSLASTn tac1 tclIDTAC tacs - tclTHENFIRST tac1 tac2 = tclTHENFIRSTn tac1 [|tac2|] - tclTHENLAST tac1 tac2 = tclTHENLASTn tac1 [|tac2|] (previously tclTHENL) - tclTHENS tac1 tacs = tclTHENSFIRSTn tac1 tacs (fun _ -> error "wrong number") - tclTHENSV same as tclTHENS but with an array - tclTHENSi : no longer available Proof_type: subproof field in type proof_tree glued with the ref field Tacmach: no more echo from functions of module Refiner Files plugins/*/g_*.ml4 take the place of files plugins/*/*.v. Files parsing/{vernac,tac}extend.ml{4,i} implements TACTIC EXTEND andd VERNAC COMMAND EXTEND macros File syntax/PPTactic.v moved to parsing/pptactic.ml Tactics about False and not now in tactics/contradiction.ml Tactics depending on Init now tactics/*.ml4 (no longer in tactics/*.v) File tacinterp.ml moved from proofs to directory tactics ## Changes between Coq 7.1 and Coq 7.2 The core of Coq (kernel) has meen minimized with the following effects: - kernel/term.ml split into kernel/term.ml, pretyping/termops.ml - kernel/reduction.ml split into kernel/reduction.ml, pretyping/reductionops.ml - kernel/names.ml split into kernel/names.ml, library/nameops.ml - kernel/inductive.ml split into kernel/inductive.ml, pretyping/inductiveops.ml the prefixes "Is" ans "IsMut" have been dropped from kind_of_term constructors, e.g. IsRel is now Rel, IsMutCase is now Case, etc. coq-8.15.0/dev/doc/coq-src-description.txt000066400000000000000000000040631417001151100203350ustar00rootroot00000000000000 Coq main source components (in link order) ------------------------------------------ clib : Basic files in lib/, such as util.ml lib : Other files in lib/ kernel library pretyping interp proofs printing parsing tactics toplevel Special components ------------------ grammar : Camlp5 syntax extensions. The file grammar/grammar.cma is used to pre-process .mlg files containing EXTEND constructions, either TACTIC EXTEND, ARGUMENTS EXTEND or VERNAC ... EXTEND. This grammar.cma incorporates many files from other directories (mainly parsing/), plus some specific files in grammar/. The other syntax extension grammar/q_constr.cmo is a addition to grammar.cma with a constr PATTERN quotation. Hierarchy of A.S.T. ------------------- *** Terms *** ... ... | /\ parsing | | printing | | V | Constrexpr.constr_expr | /\ constrintern | | constrextern (in interp) | | (in interp) globalization | | V | Glob_term.glob_constr | /\ pretyping | | detyping | | (in pretyping) V | Term.constr | /\ safe_typing | | (validation | | by kernel) |______| *** Patterns *** | | parsing V constr_pattern_expr = constr_expr | | Constrintern.interp_constr_pattern (in interp) | reverse way in Constrextern V Pattern.constr_pattern | ---> used for instance by Matching.matches (in pretyping) *** Notations *** Notation_term.notation_constr Conversion from/to glob_constr in Notation_ops TODO... *** Tactics *** | | parsing V Tacexpr.raw_tactic_expr | | Tacinterp.intern_pure_tactic (?) V Tacexpr.glob_tactic_expr | | Tacinterp.eval_tactic (?) V Proofview.V82.tac TODO: check with Hugo *** Vernac expressions *** Vernacexpr.vernac_expr, produced by parsing, used in Vernacentries and Vernac coq-8.15.0/dev/doc/critical-bugs000066400000000000000000000514361417001151100163650ustar00rootroot00000000000000Preliminary compilation of critical bugs in stable releases of Coq ================================================================== WORK IN PROGRESS WITH SEVERAL OPEN QUESTIONS To add: #7723 (vm_compute universe polymorphism), #7695 (modules and algebraic universes), #7615 (lost functor substitutions) Typing constructions component: "match" summary: substitution missing in the body of a let introduced: ? impacted released versions: V8.3-V8.3pl2, V8.4-V8.4pl4 impacted development branches: none impacted coqchk versions: ? fixed in: master/trunk/v8.5 (e583a79b5, 22 Nov 2015, Herbelin), v8.4 (525056f1, 22 Nov 2015, Herbelin), v8.3 (4bed0289, 22 Nov 2015, Herbelin) found by: Herbelin exploit: test-suite/success/Case22.v GH issue number: ? risk: ? component: fixpoint, guard summary: missing lift in checking guard introduced: probably from V5.10 impacted released versions: probably V5-V7, V8.0-V8.0pl4, V8.1-V8.1pl4 impacted development branches: v8.0 ? impacted coqchk versions: ? fixed in: master/trunk/v8.2 (ff45afa8, r11646, 2 Dec 2008, Barras), v8.1 (f8e7f273, r11648, 2 Dec 2008, Barras) found by: Barras exploit: test-suite/failure/guard.v GH issue number: none risk: unprobable by chance component: cofixpoint, guard summary: de Bruijn indice bug in checking guard of nested cofixpoints introduced: after V6.3.1, before V7.0 impacted released versions: V8.0-V8.0pl4, V8.1-V8.1pl4, V8.2-V8.2pl2, V8.3-V8.3pl2, V8.4-V8.4pl4 impacted development branches: none impacted coqchk versions: ? fixed in: master (9f81e2c36, 10 Apr 2014, Dénès), v8.4 (f50ec9e7d, 11 Apr 2014, Dénès), v8.3 (40c0fe7f4, 11 Apr 2014, Dénès), v8.2 (06d66df8c, 11 Apr 2014, Dénès), v8.1 (977afae90, 11 Apr 2014, Dénès), v8.0 (f1d632992, 29 Nov 2015, Herbelin, backport) found by: Dénès exploit: ? GH issue number: none ? risk: ? component: inductive types, elimination principle summary: de Bruijn indice bug in computing allowed elimination principle introduced: 23 May 2006, 9c2d70b, r8845, Herbelin (part of universe polymorphism) impacted released versions: V8.1-V8.1pl4, V8.2-V8.2pl2, V8.3-V8.3pl2, V8.4-V8.4pl4 impacted development branches: none impacted coqchk versions: ? fixed in: master (8a01c3685, 24 Jan 2014, Dénès), v8.4 (8a01c3685, 25 Feb 2014, Dénès), v8.3 (2b3cc4f85, 25 Feb 2014, Dénès), v8.2 (459888488, 25 Feb 2014, Dénès), v8.1 (79aa20872, 25 Feb 2014, Dénès) found by: Dénès exploit: see GH#3211 GH issue number: #3211 risk: ? component: universe subtyping summary: bug in Prop<=Set conversion which made Set identifiable with Prop, preventing a proof-irrelevant interpretation of Prop introduced: V8.2 (bba897d5f, 12 May 2008, Herbelin) impacted released versions: V8.2-V8.2pl2 impacted development branches: none impacted coqchk versions: ? fixed in: master/trunk (679801, r13450, 23 Sep 2010, Glondu), v8.3 (309a53f2, r13449, 22 Sep 2010, Glondu), v8.2 (41ea5f08, r14263, 6 Jul 2011, Herbelin, backport) found by: Georgi Guninski exploit: test-suite/failure/prop_set_proof_irrelevance.v GH issue number: none? risk: ? Module system component: modules, universes summary: missing universe constraints in typing "with" clause of a module type introduced: ? impacted released versions: V8.3-V8.3pl2, V8.4-V8.4pl6; unclear for V8.2 and previous versions impacted development branches: none impacted coqchk versions: ? fixed in: master/trunk (d4869e059, 2 Oct 2015, Sozeau), v8.4 (40350ef3b, 9 Sep 2015, Sozeau) found by: Dénès exploit: test-suite/bugs/closed/bug_4294.v GH issue number: #4294 risk: ? Module system component: modules, universes summary: universe constraints for module subtyping not stored in vo files introduced: presumably 8.2 (b3d3b56) impacted released versions: 8.2, 8.3, 8.4 impacted development branches: v8.5 impacted coqchk versions: none fixed in: v8.2 (c1d9889), v8.3 (8056d02), v8.4 (a07deb4), trunk (0cd0a3e) Mar 5, 2014, Tassi found by: Tassi by running coqchk on the mathematical components library exploit: requires multiple files, no test provided GH issue number: #3243 risk: could be exploited by mistake Universes component: template polymorphism summary: issue with two parameters in the same universe level introduced: 23 May 2006, 9c2d70b, r8845, Herbelin impacted released versions: V8.1-V8.1pl4, V8.2-V8.2pl2, V8.3-V8.3pl2 impacted development branches: none impacted coqchk versions: ? fixed in: trunk/master/v8.4 (8082d1faf, 5 Oct 2011, Herbelin), V8.3pl3 (bb582bca2, 5 Oct 2011, Herbelin), v8.2 branch (3333e8d3, 5 Oct 2011, Herbelin), v8.1 branch (a8fc2027, 5 Oct 2011, Herbelin), found by: Barras exploit: test-suite/failure/inductive.v GH issue number: none risk: unlikely to be activated by chance component: universe polymorphism summary: universe polymorphism can capture global universes impacted released versions: V8.5 to V8.8 impacted coqchk versions: V8.5 to V8.9 fixed in: ec4aa4971f (58e1d0f200 for the checker) found by: Gaëtan Gilbert exploit: test-suite/misc/poly-capture-global-univs GH issue number: #8341 risk: unlikely to be activated by chance (requires a plugin) component: template polymorphism summary: template polymorphism not collecting side constrains on the universe level of a parameter; this is a general form of the previous issue about template polymorphism exploiting other ways to generate untracked constraints introduced: morally at the introduction of template polymorphism, 23 May 2006, 9c2d70b, r8845, Herbelin impacted released versions: at least V8.4-V8.4pl6, V8.5-V8.5pl3, V8.6-V8.6pl2, V8.7.0-V8.7.1, V8.8.0-V8.8.1, V8.9.0-V8.9.1, in theory also V8.1-V8.1pl4, V8.2-V8.2pl2, V8.3-V8.3pl2 but not exploit found there yet (an exploit using a plugin to force sharing of universe level is in principle possible though) impacted development branches: all from 8.4 to 8.9 at the time of writing and suspectingly also all from 8.1 to 8.4 if a way to create untracked constraints can be found impacted coqchk versions: a priori all (tested with V8.4 and V8.9 which accept the exploit) fixed in: soon in master and V8.10.0 (PR #9918, Aug 2019, Dénès and Sozeau) found by: Gilbert using explicit sharing of universes, exploit found for 8.5-8.9 by Pédrot, other variants generating sharing using sections, or using ltac tricks by Sozeau, exploit in 8.4 by Herbelin and Jason Gross by adding new tricks to Sozeau's variants exploit: test-suite/failure/Template.v GH issue number: #9294 risk: moderate risk to be activated by chance component: template polymorphism summary: using the same universe in the parameters and the constructor arguments of a template polymorphic inductive (using named universes in modern Coq, or unification tricks in older Coq) produces implicit equality constraints not caught by the previous template polymorphism fix. introduced: same as the previous template polymorphism bug, morally from V8.1, first verified impacted version V8.5 (the universe unification is sufficiently different in V8.4 to prevent our trick from working) fixed in: expected in 8.10.2, 8.11+beta, master (#11128, Nov 2019, Gilbert) found by: Gilbert exploit: test-suite/bugs/closed/bug_11039.v GH issue number: #11039 risk: moderate risk (found by investigating #10504) component: universe polymorphism, asynchronous proofs summary: universe constraints erroneously discarded when forcing an asynchronous proof containing delayed monomorphic constraints inside a universe polymorphic section introduced: between 8.4 and 8.5 by merging the asynchronous proofs feature branch and universe polymorphism one impacted released versions: V8.5-V8.10 impacted development branches: none impacted coqchk versions: immune fixed in: PR#10664 found by: Pédrot exploit: no test GH issue number: none risk: unlikely to be triggered in interactive mode, not present in batch mode (i.e. coqc) component: algebraic universes summary: Set+2 was incorrectly simplified to Set+1 introduced: V8.10 (with the SProp commit 75508769762372043387c67a9abe94e8f940e80a) impacted released versions: V8.10.0 V8.10.1 V8.10.2 impacted coqchk versions: same fixed in: PR#11422 found by: Gilbert exploit: see PR (custom application of Hurkens to get around the refreshing at elaboration) GH issue number: see PR risk: unlikely to be triggered through the vernacular (the system "refreshes" algebraic universes such that +2 increments do not appear), mild risk from plugins which manipulate algebraic universes. Primitive projections component: primitive projections, guard condition summary: check of guardedness of extra arguments of primitive projections missing introduced: 6 May 2014, a4043608f, Sozeau impacted released versions: V8.5-V8.5pl2, impacted development branches: none impacted coqchk versions: ? fixed in: trunk/master/v8.5 (ba00867d5, 25 Jul 2016, Sozeau) found by: Sozeau, by analyzing bug report #4876 exploit: to be done (?) GH issue number: #4876 risk: consequence of bug found by chance, unlikely to be exploited by chance (MS?) component: primitive projections, guard condition summary: records based on primitive projections became possibly recursive without the guard condition being updated introduced: 10 Sep 2014, 6624459e4, Sozeau (?) impacted released versions: V8.5 impacted development branches: none impacted coqchk versions: ? fixed in: trunk/master/v8.5 (120053a50, 4 Mar 2016, Dénès) found by: Dénès exploiting bug #4588 exploit: test-suite/bugs/closed/bug_4588.v GH issue number: #4588 risk: ? Conversion machines component: "lazy machine" (lazy krivine abstract machine) summary: the invariant justifying some optimization was wrong for some combination of sharing side effects introduced: prior to V7.0 impacted released versions: V8.0-V8.0pl4, V8.1-V8.1pl3 impacted development branches: none impacted coqchk versions: (eefe63d52, Barras, 20 May 2008), was in beta-development for 8.2 at this time fixed in: master/trunk/8.2 (f13aaec57/a8b034513, 15 May 2008, Barras), v8.1 (e7611477a, 15 May 2008, Barras), v8.0 (6ed40a8bc, 29 Nov 2016, Herbelin, backport) found by: Gonthier exploit: by Gonthier GH issue number: none risk: unrealistic to be exploited by chance component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: collision between constructors when more than 256 constructors in a type introduced: V8.1 impacted released versions: V8.1-V8.5pl3, V8.2-V8.2pl2, V8.3-V8.3pl3, V8.4-V8.4pl5 impacted development branches: none impacted coqchk versions: none (no virtual machine in coqchk) fixed in: master/trunk/v8.5 (00894adf6/596a4a525, 26-39 Mar 2015, Grégoire), v8.4 (cd2101a39, 1 Apr 2015, Grégoire), v8.3 (a0c7fc05b, 1 Apr 2015, Grégoire), v8.2 (2c6189f61, 1 Apr 2015, Grégoire), v8.1 (bb877e5b5, 29 Nov 2015, Herbelin, backport) found by: Dénès, Pédrot exploit: test-suite/bugs/closed/bug_4157.v GH issue number: #4157 risk: component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: wrong universe constraints introduced: possibly exploitable from V8.1; exploitable at least from V8.5 impacted released versions: V8.1-V8.4pl5 unknown, V8.5-V8.5pl3, V8.6-V8.6.1, V8.7.0-V8.7.1 impacted development branches: unknown for v8.1-v8.4, none from v8.5 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: master (c9f3a6cbe, 12 Feb 2018, PR#6713, Dénès), v8.7 (c058a4182, 15 Feb 2018, Zimmermann, backport), v8.6 (a2cc54c64, 21 Feb 2018, Herbelin, backport), v8.5 (d4d550d0f, 21 Feb 2018, Herbelin, backport) found by: Dénès exploit: test-suite/bugs/closed/bug_6677.v GH issue number: #6677 risk: component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: missing pops in executing 31bit arithmetic introduced: V8.5 impacted released versions: V8.1-V8.4pl5 impacted development branches: v8.1 (probably) impacted coqchk versions: none (no virtual machine in coqchk) fixed in: master/trunk/v8.5 (a5e04d9dd, 6 Sep 2015, Dénès), v8.4 (d5aa3bf6, 9 Sep 2015, Dénès), v8.3 (5da5d751, 9 Sep 2015, Dénès), v8.2 (369e82d2, 9 Sep 2015, Dénès), found by: Catalin Hritcu exploit: lost? GH issue number: ? risk: component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: primitive integer emulation layer on 32 bits not robust to garbage collection introduced: master (before v8.10 in GH pull request #6914) impacted released versions: none impacted development branches: impacted coqchk versions: none (no virtual machine in coqchk) fixed in: found by: Roux, Melquiond exploit: GH issue number: #9925 risk: component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: broken long multiplication primitive integer emulation layer on 32 bits introduced: e43b176 impacted released versions: 8.10.0, 8.10.1, 8.10.2 impacted development branches: 8.11 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: 4e176a7 found by: Soegtrop, Melquiond exploit: test-suite/bugs/closed/bug_11321.v GH issue number: #11321 risk: critical, as any BigN computation on 32-bit architectures is wrong component: "native" conversion machine (translation to OCaml which compiles to native code) summary: translation of identifier from Coq to OCaml was not bijective, leading to identify True and False introduced: V8.5 impacted released versions: V8.5-V8.5pl1 impacted development branches: none impacted coqchk versions: none (no native computation in coqchk) fixed in: master/trunk/v8.6 (244d7a9aa, 19 May 2016, letouzey), v8.5 (088b3161c, 19 May 2016, letouzey), found by: Letouzey, Dénès exploit: see commit message for 244d7a9aa GH issue number: ? risk: component: primitive projections, native_compute summary: stuck primitive projections computed incorrectly by native_compute introduced: 1 Jun 2018, e1e7888a, ppedrot impacted released versions: 8.9.0 impacted coqchk versions: none found by: maximedenes exploiting bug #9684 exploit: test-suite/bugs/closed/bug_9684.v GH issue number: #9684 component: lazy machine summary: incorrect De Bruijn handling when inferring the relevance mark for a lambda introduced: 2019-03-15, 23f84f37c674a07e925925b7e0d50d7ee8414093 and 71b9ad8526155020c8451dd326a52e391a9a8585, SkySkimmer impacted released versions: 8.10.0 impacted coqchk versions: 8.10.0 found by: ppedrot investigating unexpected conversion failures with SProp exploit: test-suite/bugs/closed/bug_10904.v GH issue number: #10904 risk: none without using -allow-sprop (off by default in 8.10.0), otherwise could be exploited by mistake component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: buffer overflow on large accumulators introduced: 8.1 impacted released versions: 8.1-8.12.1 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: 8.13.0 found by: Dolan, Roux, Melquiond GH issue number: ocaml/ocaml#6385, #11170 risk: medium, as it can happen for large irreducible applications component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: buffer overflow on large records and closures introduced: 8.1 impacted released versions: 8.1-now impacted coqchk versions: none (no virtual machine in coqchk) fixed in: found by: Dolan, Roux, Melquiond GH issue number: ocaml/ocaml#6385, #11170 risk: unlikely to be activated by chance, might happen for autogenerated code component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: buffer overflow, arbitrary code execution on floating-point operations introduced: 8.13 impacted released versions: 8.13.0 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: 8.13.1 found by: Melquiond GH issue number: #13867 risk: none, unless using floating-point operations; high otherwise; noticeable if activated by chance, since it usually breaks control-flow integrity component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: arbitrary code execution on irreducible PArray.set introduced: 8.13 impacted released versions: 8.13.0, 8.13.1 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: 8.13.2 found by: Melquiond GH issue number: #13998 risk: none, unless using primitive array operations; systematic otherwise component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: arbitrary code execution on arrays of floating point numbers introduced: 8.13 impacted released versions: 8.13.0, 8.13.1, 8.14.0 impacted coqchk versions: none (no virtual machine in coqchk) fixed in: 8.14.1 found by: Melquiond GH issue number: #15070 risk: none, unless mixing open terms and primitive floats inside primitive arrays; critical otherwise Side-effects component: side-effects summary: polymorphic side-effects inside monomorphic definitions incorrectly handled as not inlined introduced: ? impacted released versions: at least from 8.6 to 8.12.0 impacted coqchk versions: none (no side-effects in the checker) found by: ppedrot exploit: test-suite/bugs/closed/bug_13330.v GH issue number: #13330 risk: unlikely to be exploited by mistake, requires the use of unsafe tactics Forgetting unsafe flags component: sections summary: unsafe typing flags used inside a section would not be reported by Print Assumptions after closing the section introduced: abab878b8d8b5ca85a4da688abed68518f0b17bd (#10291, 8.11) technically available earlier through plugins impacted coqchk versions: none (coqchk rejects affected files) found by: Anton Trunov GH issue number: #14317 risk: low as it needs the use of explicit unsafe flags Conflicts with axioms in library component: library of real numbers summary: axiom of description and decidability of equality on real numbers in library Reals was inconsistent with impredicative Set introduced: 67c75fa01, 20 Jun 2002 impacted released versions: 7.3.1, 7.4 impacted coqchk versions: fixed by deciding to drop impredicativity of Set: bac707973, 28 Oct 2004 found by: Herbelin & Werner exploit: need to find the example again GH issue number: no risk: unlikely to be exploited by chance component: library of extensional sets, guard condition summary: guard condition was unknown to be inconsistent with propositional extensionality in library Sets introduced: not a bug per se but an incompatibility discovered late impacted released versions: technically speaking from V6.1 with the introduction of the Sets library which was then inconsistent from the very beginning without we knew it impacted coqchk versions: ? fixed by constraining the guard condition: (9b272a8, ccd7546c 28 Oct 2014, Barras, Dénès) found by: Schepler, Dénès, Azevedo de Amorim exploit: ? GH issue number: none risk: unlikely to be exploited by chance (?) component: library for axiom of choice and excluded-middle summary: incompatibility axiom of choice and excluded-middle with elimination of large singletons to Set introduced: not a bug but a change of intended "model" impacted released versions: strictly before 8.1 impacted coqchk versions: ? fixed by constraining singleton elimination: b19397ed8, r9633, 9 Feb 2007, Herbelin found by: Benjamin Werner exploit: GH issue number: none risk: component: primitive floating-points summary: Incorrect specification of PrimFloat.leb introduced: 8.11 impacted released versions: 8.11.0, 8.11.1, 8.11.2 fixed by fixing the spec: #12484 found by: Pierre Roux exploit: test-suite/bugs/closed/bug_12483.v GH issue number: #12483 risk: proof of false when using the incorrect axiom There were otherwise several bugs in beta-releases, from memory, bugs with beta versions of primitive projections or template polymorphism or native compilation or guard (e7fc96366, 2a4d714a1). There were otherwise maybe unexploitable kernel bugs, e.g. 2df88d83 (Require overloading), 0adf0838 ("Univs: uncovered bug in strengthening of opaque polymorphic definitions."), 5122a398 (#3746 about functors), #4346 (casts in VM), a14bef4 (guard condition in 8.1), 6ed40a8 ("Georges' bug" with ill-typed lazy machine), and various other bugs in 8.0 or 8.1 w/o knowing if they are critical. Another non exploitable bug? component: "virtual machine" (compilation to bytecode ran by a C-interpreter) summary: bug in 31bit arithmetic introduced: V8.1 impacted released versions: none impacted development branches: impacted coqchk versions: none (no virtual machine in coqchk) fixed in: master/trunk/v8.5 (0f8d1b92c, 6 Sep 2015, Dénès) found by: Dénès, from a bug report by Tahina Ramananandro exploit: ? GH issue number: ? risk: coq-8.15.0/dev/doc/debugging.md000066400000000000000000000036621417001151100161650ustar00rootroot00000000000000Debugging from Coq toplevel using Caml trace mechanism ====================================================== 1. Launch bytecode version of Coq (coqtop.byte) 2. Access Ocaml toplevel using vernacular command 'Drop.' 3. Install load paths and pretty printers for terms, idents, ... using Ocaml command '#use "base_include";;' (use '#use "include";;' for installing the advanced term pretty printers) 4. Use #trace to tell which function(s) to trace 5. Go back to Coq toplevel with 'go();;' 6. Test your Coq command and observe the result of tracing your functions 7. Freely switch from Coq to Ocaml toplevels with 'Drop.' and 'go();;' You can avoid typing #use "include" (or "base_include") after Drop by adding the following lines in your $HOME/.ocamlinit : if Filename.basename Sys.argv.(0) = "coqtop.byte" then ignore (Toploop.use_silently Format.std_formatter "include") Hints: To remove high-level pretty-printing features (coercions, notations, ...), use "Set Printing All". It will affect the #trace printers too. Debugging with ocamldebug from Emacs or command line ==================================================== See [build-system.dune.md#ocamldebug](build-system.dune.md#ocamldebug) Global gprof-based profiling ============================ Coq must be configured with option -profile 1. Run native Coq which must end normally (use Quit or option -batch) 2. gprof ./coqtop gmon.out Per function profiling ====================== To profile function foo in file bar.ml, add the following lines, just after the definition of the function: let fookey = CProfile.declare_profile "foo";; let foo a b c = CProfile.profile3 fookey foo a b c;; where foo is assumed to have three arguments (adapt using Profile.profile1, Profile. profile2, etc). This has the effect to cumulate the time passed in foo under a line of name "foo" which is displayed at the time coqtop exits. coq-8.15.0/dev/doc/drop.txt000066400000000000000000000025431417001151100154120ustar00rootroot00000000000000When you start byte-compiled Coq toplevel: rlwrap bin/coqtop.byte then if you type: Drop. you will decend from Coq toplevel down to Ocaml toplevel. So if you want to learn: - the current values of some global variables you are interested in - or see what happens when you invoke certain functions this is the place where you can do that. When you try to print values belonging to abstract data types: # let sigma, env = Lemmas.get_current_context ();; val sigma : Evd.evar_map = val env : Environ.env = # Typeops.infer env (snd (Pretyping.understand_tcc env sigma (Constrintern.intern_constr env (Pcoq.parse_string Pcoq.Constr.lconstr "plus"))));; - : Environ.unsafe_judgment = {Environ.uj_val = ; uj_type = } the printed values are not very helpful. One way how to deal with that is to load the corresponding printers: # #use "dev/include";; Consequently, the result of: # Typeops.infer env (snd (Pretyping.understand_tcc env sigma (Constrintern.intern_constr env (Pcoq.parse_string Pcoq.Constr.lconstr "plus"))));; will be printed as: - : Environ.unsafe_judgment = Nat.add : nat -> nat -> nat which makes more sense. To be able to understand the meaning of the data types, sometimes the best option is to turn those data types from abstract to concrete and look at them without any kind of pretty printing. coq-8.15.0/dev/doc/econstr.md000066400000000000000000000130041417001151100156760ustar00rootroot00000000000000# Evar-insensitive terms (EConstr) Evar-insensitive terms were introduced in 8.7, following [CEP #10](https://github.com/coq/ceps/blob/master/text/010-econstr.md). We will not recap the motivations in this document and rather summarize the code changes to perform. ## Overview The essential datastructures are defined in [the `EConstr` module](/engine/eConstr.mli) module. It defines the tactic counterparts of kernel data structures such as terms (`EConstr.constr`), universes (`EConstr.ESorts.t`) and contexts (`EConstr.*_context`). The main difference with kernel-side types is that observing them requires an evar-map at hand in order to normalize evars on the fly. The basic primitive to observe an `EConstr.t` is the following function: ``` val kind : Evd.evar_map -> t -> (t, t, ESorts.t, EInstance.t) Constr.kind_of_term (** Same as {!Constr.kind} except that it expands evars and normalizes universes on the fly. *) ``` Essentially, each time it sees an evar which happens to be defined in the provided evar-map, it replaces it with the corresponding body and carries on. Due to universe unification occurring at the tactic level, the same goes for universe instances and sorts. See the `ESort` and `EInstance` modules in `EConstr`. This normalization is critical for the soundness of tactics. Before EConstr, a lot of bugs were lurking in the code base, a few still are (most notably in meta-based unification) and failure to respect the guidelines thereafter may result in nasal demons. ## Transition path ### Types As a rule of thumb, all functions living at the tactic level should manipulate `EConstr.t` instead of `Constr.t`, and similarly for the other data structures. To ease the transition, the `EConstr` module defines a handful of aliases to shadow the type names from the kernel. It is recommended to perform the following replacement in headers. ```ocaml (** Kernel types. You may remove the two following opens if you want. Beware that [kind_of_term] needs to be in scope if you use [EConstr.kind] so that you may still need to open one of the two. *) open Term open Constr (** Tactic types. Open this after to shadow kernel types. *) open EConstr ``` Note that the `EConstr` module also redefines a `Vars` submodule. ### Evar-map-passing All functions deconstructing an econstr need to take an evar-map as a parameter. Therefore, you need to pass one as an argument virtually everywhere. In the Coq source code, it is recommended to take the evar-map as a first argument called `sigma`, except if the function also takes an environment in which case it is passed second. Namely, the two typical instances are: ```ocaml let foo sigma c = mycode val foo : Evd.evar_map -> EConstr.t -> Foo.t let bar env sigma c = mycode val bar : Environ.env -> Evd.evar_map -> EConstr.t -> Bar.t ``` The EConstr API makes the code much more sensitive to evar-maps, because a lot of now useless normalizations were removed. Thus one should be cautious of **not** dropping the evar-map when it has been updated, and should rather stick to a strict state-passing discipline. Unsound primitives like `Typing.unsafe_type_of` are also a known source of problems, so you should replace them with the corresponding evar-map-returning function and thread it properly. ### Functions Many functions from `Constr` and `Term` are redefined to work on econstr in the `EConstr` module, so that it is often enough to perform the `open` as described above to replace them. Their type may differ though, because they now need access to an evar-map. A lot of econstr-manipulating functions are also defined in [`Termops`](/engine/termops.mli). Functions manipulating tactic terms and kernel terms share the same name if they are the equivalent one of the other. Do not hesitate to grep Coq mli files to find the equivalent of a function you want to port if it is neither in `EConstr` nor in `Termops` (this should be very rare). ### Conversion Sometimes you do not have any other choice than calling kernel-side functions on terms, and conversely to turn a kernel term into a tactic term. There are two functions to do so. * `EConstr.of_constr` turns kernel terms into tactic terms. It is currently the physical identity, and thus O(1), but this may change in the future. * `EConstr.to_constr` turns tactic terms into kernel terms. It performs a full-blown normalization of the given term, which is O(n) and potentially costly. For performance reasons, avoiding to jump back and forth between kernel and tactic terms is recommended. There are also a few unsafe conversion functions that take advantage of the fact that `EConstr.t` is internally the same as `Constr.t`. Namely, `EConstr.Unsafe.to_constr` is the physical identity. It should **not** be used in typical code and is instead provided for efficiency **when you know what you are doing**. Either use it to reimplement low-level functions that happen to be insensitive externally, or use it to provide backward compatibility with broken code that relies on evar-sensitivity. **Do not use it because it is easier than stuffing evar-maps everywhere.** You've been warned. ## Notes The EConstr branch fixed a lot of eisenbugs linked to lack of normalization everywhere, most notably in unification. It may also have introduced a few, so if you see a change in behaviour *that looks like a bug*, please report it. Obviously, unification is not specified, so it's hard to tell apart, but still. Efficiency has been affected as well. We now pay an overhead when observing a term, but at the same time a lot of costly upfront normalizations were removed. coq-8.15.0/dev/doc/parsing.md000066400000000000000000000372461417001151100157020ustar00rootroot00000000000000# Parsing Coq's parser is based on Camlp5 using an extensible grammar. Somewhat helpful Camlp5 documentation is available [here](http://camlp5.github.io/doc/htmlc/grammars.html). However, the Camlp5 code has been copied into the Coq source tree and may differ from the Camlp5 release. Notable attributes of the parser include: * The grammar is extensible at run time. This is essential for supporting notations and optionally-loaded plugins that extend the grammar. * The grammar is split into multiple source files. Nonterminals can be local to a file or global. * While 95% of the nonterminals and almost all the productions are defined in the grammar, a few are defined directly in OCaml code. Since many developers have worked on the parser over the years, this code can be idiosyncratic, reflecting various coding styles. * The parser is a recursive descent parser that, by default, only looks at the next token to make a parsing decision. It's possible to hand-code additional lookahead where necessary by writing OCaml code. * There's no code that checks whether a grammar is ambiguous or whether every production can be recognized. Developers who modify the grammar may, in some cases, need to structure their added productions in specific ways to ensure that their additions are parsable and that they don't break existing productions. ## Contents ## - [Grammars: `*.mlg` File Structure](#grammars-mlg-file-structure) - [Grammars: Nonterminals and Productions](#grammars-nonterminals-and-productions) - [Alternate production syntax](#alternate-production-syntax) - [Usage notes](#usage-notes) - [Other components](#other-components) - [Parsing productions](#parsing-productions) - [Lookahead](#lookahead) ## Grammars: `*.mlg` File Structure ## Grammars are defined in `*.mlg` files, which `coqpp` compiles into `*.ml` files at build time. `coqpp` code is in the `coqpp` directory. `coqpp` uses yacc and lex to parse the grammar files. You can examine its yacc and lex input files in `coqpp_lex.mll` and `coqpp_parse.mly` for details not fully covered here. In addition, there is a `doc_grammar` build utility that uses the `coqpp` parser to extract the grammar, then edits and inserts it into the documentation. This is described in [`doc/tools/docgram/README.md`](../../doc/tools/docgram/README.md). `doc_grammar` generates [`doc/tools/docgram/fullGrammar`](../../doc/tools/docgram/fullGrammar), which has the full grammar for Coq (not including some optionally-loaded plugins). This may be easier to read since everything is in one file and the parser action routines and other OCaml code are omitted. `*.mlg` files contain the following types of nodes (See `node` in the yacc grammar). This part is very specific to Coq (not so similar to Camlp5): * OCaml code - OCaml code enclosed in curly braces, which is copied verbatim to the generated `*.ml` file * Comments - comments in the `*.mlg` file in the form `(* … *)`, which are not copied to the generated `*.ml` file. Comments in OCaml code are preserved. * `DECLARE_PLUGIN "*_plugin"` - associates the file with a specific plugin, for example "ltac_plugin" * `GRAMMAR EXTEND` - adds additional nonterminals and productions to the grammar and declares global nonterminals referenced in the `GRAMMAR EXTEND`: ``` GRAMMAR EXTEND Gram GLOBAL: bignat bigint …; END ``` Global nonterminals are declared in `pcoq.ml`, e.g. `let bignat = Entry.create "bignat"`. All the `*.mlg` files include `open Pcoq` and often its modules, e.g. `open Pcoq.Prim`. `GRAMMAR EXTEND` should be used only for large syntax additions. To add new commands and tactics, use these instead: - `VERNAC COMMAND EXTEND` to add new commands - `TACTIC EXTEND` to add new tactics - `ARGUMENT EXTEND` to add new nonterminals These constructs provide essential semantic information that's provided in a more complex, less readable way with `GRAMMAR EXTEND`. * `VERNAC COMMAND EXTEND` - adds new command syntax by adding productions to the `command` nonterminal. For example: ``` VERNAC COMMAND EXTEND ExtractionLibrary CLASSIFIED AS QUERY | [ "Extraction" "Library" ident(m) ] -> { extraction_library false m } END ``` Productions here are represented with alternate syntax, described later. New commands should be added using this construct rather than `GRAMMAR EXTEND` so they are correctly registered, such as having the correct command classifier. TODO: explain "ExtractionLibrary", CLASSIFIED AS, CLASSIFIED BY, "{ tactic_mode }", STATE * `VERNAC { … } EXTEND` - TODO. A variant. The `{ … }` is a block of OCaml code. * `TACTIC EXTEND` - adds new tactic syntax by adding productions to `simple_tactic`. For example: ``` TACTIC EXTEND btauto | [ "btauto" ] -> { Refl_btauto.Btauto.tac } END ``` adds a new nonterminal `btauto`. New tactics should be added using this construct rather than `GRAMMAR EXTEND`. TODO: explain DEPRECATED, LEVEL (not shown) * `ARGUMENT EXTEND` - defines a new nonterminal ``` ARGUMENT EXTEND ast_closure_term PRINTED BY { pp_ast_closure_term } INTERPRETED BY { interp_ast_closure_term } GLOBALIZED BY { glob_ast_closure_term } SUBSTITUTED BY { subst_ast_closure_term } RAW_PRINTED BY { pp_ast_closure_term } GLOB_PRINTED BY { pp_ast_closure_term } | [ term_annotation(a) constr(c) ] -> { mk_ast_closure_term a c } END ``` See comments in `tacentries.mli` for partial information on the various arguments. * `VERNAC ARGUMENT EXTEND` - (part of `argument_extend` in the yacc grammar) defines productions for a single nonterminal. For example: ``` VERNAC ARGUMENT EXTEND language PRINTED BY { pr_language } | [ "Ocaml" ] -> { let _ = warn_deprecated_ocaml_spelling () in Ocaml } | [ "OCaml" ] -> { Ocaml } | [ "Haskell" ] -> { Haskell } | [ "Scheme" ] -> { Scheme } | [ "JSON" ] -> { JSON } END ``` TODO: explain PRINTED BY, CODE * DOC_GRAMMAR - Used in `doc_grammar`-generated files to permit simplified syntax Note that you can reverse engineer many details by comparing the `.mlg` input file with the `.ml` generated by `coqpp`. ## Grammars: Nonterminals and Productions Here's a simple nonterminal definition in the Camlp5 format: ``` universe: [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> { ids } | u = universe_expr -> { [u] } ] ] ; ``` In which: * `universe` is the nonterminal being defined * productions are separated by `|` and, as a group, are enclosed in `[ [ … ] ];` * `u = universe_expr` refers to the `universe_expr` nonterminal. `u` is bound to the value returned by that nonterminal's action routine, which can be referred to in the action routine. For `ids = LIST1 universe_expr SEP ","`, `ids` is bound to the list of values returned by `universe_expr`. * `-> { … }` contains the OCaml action routine, which is executed when the production is recognized and returns a value * Semicolons separate adjacent grammatical elements (nonterminals, strings or other constructs) Grammatical elements that appear in productions are: - nonterminal names - identifiers in the form `[a-zA-Z0-9_]*`. These correspond to variables in the generated `.ml` code. In some cases a qualified name, such as `Prim.name`, is used. - `"…"` - a literal string that becomes a keyword and cannot be used as an `ident`. The string doesn't have to be a valid identifier; frequently the string will contain only punctuation characters. Generally we try to avoid adding new keywords that are also valid identifiers--though there is an unresolved debate among the developers about whether having more such keywords in general is good (e.g. it makes it easier to highlight keywords in GUIs) or bad (more keywords for the user to avoid and new keywords may require changes to existing proof files). - `IDENT "…"` - a literal string that has the form of an `ident` that doesn't become a keyword - `OPT element` - optionally include `element` (e.g. a nonterminal, IDENT "…" or "…"). The value is of type `'a option`. - `LIST1 element` - a list of one or more `element`s. The value is of type `'a list`. - `LIST0 element` - an optional list of `element`s - `LIST1 element SEP sep` - a list of `element`s separated by `sep` - `LIST0 element SEP sep` - an optional list of `element`s separated by `sep` - `( elements )` - grouping to represent a series of elements as a unit, useful within `OPT` and `LIST*`. - `[ elements1 | elements2 | … ]` - alternatives (either `elements1` or `elements2` or …), actually nested productions, each of which can have its own action routines Nonterminals can also be defined with multiple levels to specify precedence and associativity of its productions. This is described in the Coq documentation under the `Print Grammar` command. The first square bracket around a nonterminal definition is for grouping level definitions, which are separated with `|`, for example: ``` ltac_expr: [ "5" RIGHTA [ te = binder_tactic -> { te } ] | "4" LEFTA : ``` Grammar extensions can specify what level they are modifying, for example: ``` ltac_expr: LEVEL "1" [ RIGHTA [ tac = ltac_expr; intros = ssrintros_ne -> { tclintros_expr ~loc tac intros } ] ]; ``` ### Alternate production syntax ### Except for `GRAMMAR EXTEND`, the `EXTEND` nodes in the `*.mlg`s use simplified syntax in productions that's similar to what's used in the `Tactic Notation` and `Ltac2 Notation` commands. For example: ``` TACTIC EXTEND cc | [ "congruence" ] -> { congruence_tac 1000 [] } | [ "congruence" integer(n) ] -> { congruence_tac n [] } | [ "congruence" "with" ne_constr_list(l) ] -> { congruence_tac 1000 l } | [ "congruence" integer(n) "with" ne_constr_list(l) ] -> { congruence_tac n l } END ``` Nonterminals appearing in the alternate production syntax are accessed through `wit_*` symbols defined in the OCaml code. Some commonly used symbols are defined in `stdarg.ml`. Others are defined in the code generated by `ARGUMENT EXTEND` and `VERNAC ARGUMENT EXTEND` constructs. References to nonterminals that don't have `wit_*` symbols cause compilation errors. The differences are: * The outer `: [ … ];` is omitted. Each production is enclosed in `| [ … ]`. * The action routine is outside the square brackets * Literal strings that are valid identifiers don't become reserved keywords * No semicolons separating elements of the production * `integer(n)` is used to bind a nonterminal value to a variable instead of `n = integer` * Alternate forms of constructs are used: * `ne_entry_list` for `LIST1 entry` * `entry_list` for `LIST0 entry` * `ne_entry_list_sep(var, sep)` for `LIST1 entry SEP sep` where the list is bound to `var` * `entry_list_sep(var, sep)` for `LIST0 entry SEP sep` where the list is bound to `var` * `entry_opt` for OPT entry * There's no way to define `LEVEL`s * There's no equivalent to `( elements )` or `[ elements1 | elements2 | … ]`, which may require repeating similar syntax several times. For example, this single production is equivalent to 8 productions in `TACTIC EXTEND` representing all possible expansions of three `OPT`s: ``` | IDENT "Add"; IDENT "Parametric"; IDENT "Relation"; LIST0 binder; ":"; constr; constr; OPT [ IDENT "reflexivity"; IDENT "proved"; IDENT "by"; constr -> { … } ]; OPT [ IDENT "symmetry"; IDENT "proved"; IDENT "by"; constr -> { … } ]; OPT [ IDENT "transitivity"; IDENT "proved"; IDENT "by"; constr -> { … } ]; IDENT "as"; ident -> { … } ``` ## Usage notes ### Other components Coq's lexer is in `clexer.ml`. Its 10 token types are defined in `tok.ml`. The parser is in `grammar.ml`. The extensive use of GADT (generalized algebraic datatypes) makes it harder for the uninitiated to understand it. When the parser is invoked, the call tells the parser which nonterminal to parse. `vernac_control` is the start symbol for commands. `tactic_mode` is the start symbol for tactics. Tactics give syntax errors if Coq is not in proof mode. There are additional details not mentioned here. ### Parsing productions Some thoughts, not to be taken as identifying all the issues: Since the parser examines only the next token to make a parsing decision (and perhaps because of other potentially fixable limitations), some productions have to be ordered or structured in a particular way to parse correctly in all cases. For example, consider these productions: ``` command: [ [ | IDENT "Print"; p = printable -> { VernacPrint p } | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> { VernacPrint (PrintName (qid,l)) } | IDENT "Print"; IDENT "Module"; "Type"; qid = global -> { VernacPrint (PrintModuleType qid) } | IDENT "Print"; IDENT "Module"; qid = global -> { VernacPrint (PrintModule qid) } | IDENT "Print"; IDENT "Namespace" ; ns = dirpath -> { VernacPrint (PrintNamespace ns) } : printable: [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> { PrintName (qid,l) } | IDENT "All" -> { PrintFullContext } | IDENT "Section"; s = global -> { PrintSectionContext s } : ``` Reversing the order of the first two productions in `command` causes the `All` in `Print All` to be parsed incorrectly as a `smart_global`, making that command unavailable. `Print Namespace nat.` still works correctly, though. Similarly, the production for `Print Module Type` has to appear before `Print Module ` in order to be reachable. Internally, the parser generates a tree that represents the possible prefixes for the productions of a nonterminal as described in [the Camlp5 documentation](http://camlp5.github.io/doc/htmlc/grammars.html#b:Rules-insertion). Here's another example in which the way the productions are written matters. `OPT` at the beginning of a production doesn't always work well: ``` command: [ [ | IDENT "Foo"; n = natural -> { VernacBack 1 } | OPT (IDENT "ZZ"); IDENT "Foo" -> { VernacBack 1 } : ``` `Foo.` looks like it should be accepted, but it gives a parse error: ``` Unnamed_thm < Foo. Toplevel input, characters 3-4: > Foo. > ^ Error: Syntax error: [prim:natural] expected after 'Foo' (in [vernac:command]). ``` Reversing the order of the productions doesn't help, but splitting the 'OPT' production into 2 productions works: ``` | IDENT "Foo" -> { VernacBack 1 } | IDENT "ZZ"; IDENT "Foo" -> { VernacBack 1 } | IDENT "Foo"; n = natural -> { VernacBack 1 } ``` On the other hand, `OPT` works just fine when the parser has already found the right production. For example `Back` and `Back ` can be combined using an `OPT`: ``` | IDENT "Back"; n = OPT natural -> { VernacBack (Option.default 1 n) } ``` ### Lookahead It's possible to look ahead more than one symbol using OCaml code. Generally we avoid doing this unless there's a strong reason to do so. For example, this code defines a new nonterminal `local_test_lpar_id_colon` that checks that the next 3 tokens are `"("` `ident` and `":"` without consuming any input: ``` let local_test_lpar_id_colon = let open Pcoq.Lookahead in to_entry "lpar_id_colon" begin lk_kw "(" >> lk_ident >> lk_kw ":" end ``` This one checks that the next 2 tokens are `"["` and `"|"` with no space between. This is a special case: intropatterns can have sequences like `"[|]"` that are 3 different tokens with empty nonterminals between them. Making `"[|"` a keyword would break existing code with "[|]": ``` let test_array_opening = let open Pcoq.Lookahead in to_entry "test_array_opening" begin lk_kw "[" >> lk_kw "|" >> check_no_space end ``` TODO: how to add a tactic or command coq-8.15.0/dev/doc/primproj.md000066400000000000000000000044601417001151100160710ustar00rootroot00000000000000Primitive Projections --------------------- | Proj of Projection.t * constr Projections are always applied to a term, which must be of a record type (i.e. reducible to an inductive type `I params`). Type-checking, reduction and conversion are fast (not as fast as they could be yet) because we don't keep parameters around. As you can see, it's currently a `constant` that is used here to refer to the projection, that will change to an abstract `projection` type in the future. Basically a projection constant records which inductive it is a projection for, the number of params and the actual position in the constructor that must be projected. For compatibility reason, we also define an eta-expanded form (accessible from user syntax `@f`). The constant_entry of a projection has both informations. Declaring a record (under `Set Primitive Projections`) will generate such definitions. The API to declare them is not stable at the moment, but the inductive type declaration also knows about the projections, i.e. a record inductive type decl contains an array of terms representing the projections. This is used to implement eta-conversion for record types (with at least one field and having all projections definable). The canonical value being `Build_R (pn x) ... (pn x)`. Unification and conversion work up to this eta rule. The records can also be universe polymorphic of course, and we don't need to keep track of the universe instance for the projections either. Projections are reduced _eagerly_ everywhere, and introduce a new `Zproj` constructor in the abstract machines that obeys both the delta (for the constant opacity) and iota laws (for the actual reduction). Refolding works as well (afaict), but there is a slight hack there related to universes (not projections). For the ML programmer, the biggest change is that pattern-matchings on kind_of_term require an additional case, that is handled usually exactly like an `App (Const p) arg`. There are slight hacks related to hints is well, to use the primitive projection form of f when one does `Hint Resolve f`. Usually hint resolve will typecheck the term, resulting in a partially applied projection (disallowed), so we allow it to take `constr_or_global_reference` arguments instead and special-case on projections. Other tactic extensions might need similar treatment. coq-8.15.0/dev/doc/profiling.txt000066400000000000000000000074521417001151100164430ustar00rootroot00000000000000# How to profile Coq? I (Pierre-Marie Pédrot) mainly use two OCaml branches to profile Coq, whether I want to profile time or memory consumption. AFAIK, this only works for Linux. ## Time In Coq source folder: opam switch 4.05.0+trunk+fp ./configure -profile devel -debug make perf record -g bin/coqc file.v perf report -g fractal,callee --no-children To profile only part of a file, first load it using bin/coqtop -l file.v and plug into the process perf record -g -p PID ### Per-component [flame graphs](https://github.com/brendangregg/FlameGraph) I (Andres Erbsen) have found it useful to look at library-wide flame graphs of coq time consumption. As the Ltac interpreter stack is reflected in the OCaml stack, calls to the same primitive can appear on top of multiple essentially equivalent stacks. To make the profiles more readable, one could either try to edit the stack trace to merge "equivalent" frames, or simply look at the aggregate profile on a component-by-component basis. Here is how to do the second for the standard library ([example output](https://cdn.rawgit.com/andres-erbsen/b29b29cb6480dfc6a662062e4fcd0ae3/raw/304fc3fea9630c8e453929aa7920ca8a2a570d0b/stdlib_categorized_outermost.svg)). ~~~~~ #!/bin/bash make -f Makefile.dune clean make -f Makefile.dune states perf record -F99 `# ~1GB of data` --call-graph=dwarf -- make -f Makefile.dune world perf script --time '0%-100%' | stackcollapse-perf.pl | grep Coqtop__compile | sed -rf <(cat <<'EOF' s/;caml/;/g s/_[0-9]*;/;/g s/Logic_monad__fun;//g s/_apply[0-9];//g s/;System/@&@/ s/;Hashcons/@&@/ s/;Grammar/@&@/ s/;Declaremods/@&@/ s/;Tactics/@&@/ s/;Pretyping/@&@/ s/;Typeops/@&@/ s/;Reduction/@&@/ s/;Unification/@&@/ s/;Evarutil/@&@/ s/;Evd/@&@/ s/;EConstr/@&@/ s/;Constr/@&@/ s/;Univ/@&@/ s/;Ugraph/@&@/ s/;UState/@&@/ s/;Micromega/@&@/ s/;Omega/@&@/ s/;Auto/@&@/ s/;Ltac_plugin__Tacinterp/@&@/ s/;Ltac_plugin__Rewrite/@&@/ s/[^@]*@;([^@]*)@/\1;\1/ s/@//g :a; s/;([^;]+);\1;/;\1;/g;ta EOF ) | flamegraph.pl ~~~~~ ## Memory You first need a few commits atop trunk for this to work. git remote add ppedrot https://github.com/ppedrot/coq.git git fetch ppedrot git checkout ppedrot/allocation-profiling git rebase master Then: opam switch 4.00.1+alloc-profiling ./configure -profile devel -debug make Note that linking the coqtop binary takes quite an amount of time with this branch, so do not worry too much. There are more recent branches of alloc-profiling on mshinwell's repo which can be found at: https://github.com/mshinwell/opam-repo-dev ### For memory dump: CAMLRUNPARAM=T,mj bin/coqc file.v In another terminal: pkill -SIGUSR1 $COQTOPPID ... pkill -SIGUSR1 $COQTOPPID dev/decode-major-heap.sh heap.$COQTOPPID.$N bin/coqtop where $COQTOPPID is coqtop pid and $N the index of the call to pkill. First column is the memory taken by the objects (in words), second one is the number of objects and third is the place where the objects where allocated. ### For complete memory graph: CAMLRUNPARAM=T,gr bin/coqc file.v In another terminal: pkill -SIGUSR1 $COQTOPPID ... pkill -SIGUSR1 $COQTOPPID ocaml dev/decodegraph.ml edge.$COQTOPPID.$N bin/coqtop > memory.dot dot -Tpdf -o memory.pdf memory.dot where $COQTOPPID is coqtop pid and $N the index of the call to pkill. The pdf produced by the last command gives a compact graphical representation of the various objects allocated. coq-8.15.0/dev/doc/proof-engine.md000066400000000000000000000155261417001151100166240ustar00rootroot00000000000000Tutorial on the new proof engine for ML tactic writers ====================================================== Starting from Coq 8.5, a new proof engine has been introduced, replacing the old meta-based engine which had a lot of drawbacks, ranging from expressivity to soundness, the major one being that the type of tactics was transparent. This was pervasively abused and made virtually impossible to tweak the implementation of the engine. The old engine is deprecated and is slowly getting removed from the source code. The new engine relies on a monadic API defined in the `Proofview` module. Helper functions and higher-level operations are defined in the `Tacmach` and `Tacticals` modules, and end-user tactics are defined amongst other in the `Tactics` module. At the root of the engine is a representation of proofs as partial terms that can contain typed holes, called evars, short for *existential variable*. An evar is essentially defined by its context and return type, that we will write `?e : [Γ ⊢ _ : A]`. An evar `?e` must be applied to a substitution `σ` of type `Γ` (i.e. a list of terms) to produce a term of type `A`, which is done by applying `EConstr.mkEvar`, and which we will write `?e{σ}`. The engine monad features a notion of global state called `evar_map`, defined in the `Evd` module, which is the structure containing the incremental refinement of evars. `Evd` is a low-level API and its use is discouraged in favour of the `Evarutil` module which provides more abstract primitives. In addition to this state, the monad also features a goal state, that is an ordered list of current holes to be filled. While these holes are referred to as goals at a high-enough level, they are actually no more than evars. The API provided to deal with these holes can be found in the `Proofview.Goal` module. Tactics are naturally operating on several goals at once, so that it is usual to use the `Proofview.Goal.enter` function and its variants to dispatch a tactic to each of the goals under focus. Primitive tactics by term refining ------------------------------------- A typical low-level tactic will be defined by plugging partial terms in the goal holes thanks to the `Refine` module, and in particular to the `Refine.refine` primitive. ```ocaml val refine : typecheck:bool -> (Evd.evar_map -> Evd.evar_map * EConstr.t) -> unit tactic (** In [refine ~typecheck t], [t] is a term with holes under some [evar_map] context. The term [t] is used as a partial solution for the current goal (refine is a goal-dependent tactic), the new holes created by [t] become the new subgoals. Exceptions raised during the interpretation of [t] are caught and result in tactic failures. If [typecheck] is [true] [t] is type-checked beforehand. *) ``` What the function does is first evaluate the `t` argument in the current proof state, and then use the resulting term as a filler for the proof under focus. All evars that have been created by the invocation of this thunk are then turned into new goals added in the order of their creation. To see how we can use it, let us have a look at an idealized example, the `cut` tactic. Assuming `X` is a type, `cut X` fills the current goal `[Γ ⊢ _ : A]` with a term `let x : X := ?e2{Γ} in ?e1{Γ} x` where `x` is a fresh variable and `?e1 : [Γ ⊢ _ : X -> A]` and `?e2 : [Γ ⊢ _ : X]`. The current goal is solved and two new holes `[e1, e2]` are added to the goal state in this order. ```ocaml let cut c = Proofview.Goal.enter begin fun gl -> (** In this block, we focus on one goal at a time indicated by gl *) let env = Proofview.Goal.env gl in (** Get the context of the goal, essentially [Γ] *) let concl = Proofview.Goal.concl gl in (** Get the conclusion [A] of the goal *) let hyps = Tacmach.New.pf_ids_of_hyps gl in (** List of hypotheses from the context of the goal *) let id = Namegen.next_name_away Anonymous hyps in (** Generate a fresh identifier *) let t = mkArrow c (Vars.lift 1 concl) in (** Build [X -> A]. Note the lifting of [A] due to being on the right hand side of the arrow. *) Refine.refine begin fun sigma -> (** All evars generated by this block will be added as goals *) let sigma, f = Evarutil.new_evar env sigma t in (** Generate ?e1 : [Γ ⊢ _ : X -> A], add it to sigma, and return the term [f := Γ ⊢ ?e1{Γ} : X -> A] with the updated sigma. The identity substitution for [Γ] is extracted from the [env] argument, so that one must be careful to pass the correct context here in order for the resulting term to be well-typed. The [p] return value is a proof term used to enforce sigma monotonicity. *) let sigma, x = Evarutil.new_evar env sigma c in (** Generate ?e2 : [Γ ⊢ _ : X] in sigma and return [x := Γ ⊢ ?e2{Γ} : X]. *) let r = mkLetIn (Name id, x, c, mkApp (Vars.lift 1 r, [|mkRel 1|])) in (** Build [r := Γ ⊢ let id : X := ?e2{Γ} in ?e1{Γ} id : A] *) end end ``` The `Evarutil.new_evar` function is the preferred way to generate evars in tactics. It returns a ready-to-use term, so that one does not have to call the `mkEvar` primitive. There are lower-level variants whose use is dedicated to special use cases, *e.g.* whenever one wants a non-identity substitution. One should take care to call it with the proper `env` argument so that the evar and term it generates make sense in the context they will be plugged in. For the sake of completeness, the old engine was relying on the `Tacmach.refine` function to provide a similar feature. Nonetheless, it was using untyped metas instead of evars, so that it had to mangle the argument term to actually produce the term that would be put into the hole. For instance, to work around the untypedness, some metas had to be coerced with a cast to enforce their type, otherwise leading to runtime errors. This was working for very simple instances, but was unreliable for everything else. High-level composition of tactics ------------------------------------ It is possible to combine low-level refinement tactics to create more powerful abstractions. While it was the standard way of doing things in the old engine to overcome its technical limitations (namely that one was forced to go through a limited set of derivation rules), it is recommended to generate proofs as much as possible by refining in ML tactics when it is possible and easy enough. Indeed, this prevents dependence on fragile constructions such as unification. Obviously, it does not forbid the use of tacticals to mimic what one would do in Ltac. Each Ltac primitive has a corresponding ML counterpart with simple semantics. A list of such tacticals can be found in the `Tacticals` module. Most of them are a porting of the tacticals from the old engine to the new one, so that if they share the same name they are expected to have the same semantics. coq-8.15.0/dev/doc/release-process.md000066400000000000000000000262211417001151100173220ustar00rootroot00000000000000# Release checklist # ## When the release managers for version `X.X` get nominated ## - [ ] Create a new issue to track the release process where you can copy-paste the present checklist from `dev/doc/release-process.md`. - [ ] Decide the release calendar with the team (date of branching, preview and final release). - [ ] Create a wiki page that you link to from https://github.com/coq/coq/wiki/Release-Plan with this information and the link to the issue. ## About one month before the branching date ## - [ ] Create both the upcoming final release (`X.X.0`) and the following major release (`Y.Y+rc1`) milestones if they do not already exist. - [ ] Send an announcement of the upcoming branching date on Coqdev + the Coq development category on Discourse (coqdev@inria.fr + coq+coq-development@discoursemail.com) and ask people to remove from the `X.X+rc1` milestone any feature and clean up PRs that they already know won't be ready on time. - [ ] In a PR on `master`, call [`dev/tools/update-compat.py`](../tools/update-compat.py) with the `--release` flag; this sets up Coq to support three `-compat` flag arguments including the upcoming one (instead of four). To ensure that CI passes, you will have to decide what to do about all test-suite files that mention `-compat U.U` or `Coq.Compat.CoqUU` (which is no longer valid, since we only keep compatibility against the two previous versions), and you may have to ping maintainers of projects that are still relying on the old compatibility flag so that they fix this. - [ ] Make sure that this change is merged in time for the branching date. - [ ] Prepare a PR on `master` (not yet to be merged) changing the version name to the next major version and both magic numbers in [`tools/configure/configure.ml`](../../tools/configure/configure.ml). For example, for `8.5`, the version name will be `8.5+alpha` while the magic numbers will end with `80490`. Additionally, in the same commit, update the compatibility infrastructure, which consists of invoking [`dev/tools/update-compat.py`](../tools/update-compat.py) with the `--master` flag. Note that the `update-compat.py` script must be run twice: once in preparation of the release with the `--release` flag (see earlier in this section) and once on the branching date with the `--master` flag to mark the start of the next version. This PR should be opened before the branching date to have time to deal with CI issues, but should not be merged until branching. ## On the branching date ## - [ ] Merge the above PR and create the `vX.X` branch from the last merge commit before this one (using this name will ensure that the branch will be automatically protected). - [ ] Set the next major version alpha tag using `git tag -s`. The `VY.Y+alpha` tag marks the first commit to be in `master` and not in the `vX.X` release branch. Note that this commit is the first commit in the first PR merged in master, not the merge commit for that PR. Therefore, if you proceeded as described above, this should be the commit updating the version, magic numbers and compatibility infrastructure. After tagging, double-check that `git describe` picks up the tag you just made (if not, you tagged the wrong commit). - [ ] Push the new tag with `git push upstream VY.Y+alpha --dry-run` (remove the `--dry-run` and redo if everything looks OK). - [ ] Start a new project to track PR backporting. The project should have a `Request X.X+rc1 inclusion` column for the PRs that were merged in `master` that are to be considered for backporting, and a `Shipped in X.X+rc1` columns to put what was backported. A message to `@coqbot` in the milestone description tells it to automatically add merged PRs to the `Request ... inclusion` column and backported PRs to the `Shipped in ...` column. See previous milestones for examples. When moving to the next milestone (e.g. `X.X.0`), you can clear and remove the `Request X.X+rc1 inclusion` column and create new `Request X.X.0 inclusion` and `Shipped in X.X.0` columns. The release manager is the person responsible for merging PRs that target the release branch and backporting appropriate PRs (mostly safe bug fixes, user message improvements and documentation updates) that are merged into `master`. - [ ] Pin the versions of libraries and plugins in [`dev/ci/ci-basic-overlay.sh`](../ci/ci-basic-overlay.sh) to use commit hashes. You can use the [`dev/tools/pin-ci.sh`](../tools/pin-ci.sh) script to do this semi-automatically. - [ ] In a PR on `master` to be backported, add a new link to the `'versions'` list of the refman (in `html_context` in [`doc/sphinx/conf.py`](../../doc/sphinx/conf.py)). ## In the days following the branching ## - [ ] Make sure that all the last feature PRs that you want to include in the release are finished and backported quickly and clean up the milestone. We recommend backporting as few feature PRs as possible after branching. In particular, any PR with overlays will require manually bumping the pinned commits when backporting. - [ ] Delay non-blocking issues to the appropriate milestone and ensure blocking issues are solved. If required to solve some blocking issues, it is possible to revert some feature PRs in the release branch only (but in this case, the blocking issue should be postponed to the next major release instead of being closed). - [ ] Once the final list of features is known, in a PR on `master` to be backported, generate the release changelog by calling [`dev/tools/generate-release-changelog.sh`](../tools/generate-release-changelog.sh) and include it in a new section in [`doc/sphinx/changes.rst`](../../doc/sphinx/changes.rst). At the moment, the script doesn't do it automatically, but we recommend reordering the entries to show first the **Changed**, then the **Removed**, **Deprecated**, **Added** and last the **Fixed**. - [ ] Ping the development coordinator (`@mattam82`) to get him started on writing the release summary. The [`dev/tools/list-contributors.sh`](../tools/list-contributors.sh) script computes the number and list of contributors between Coq revisions. Typically used with `VX.X+alpha..vX.X` to check the contributors of version `VX.X`. Note that this script relies on [`.mailmap`](../../.mailmap) to merge multiple identities. If you notice anything incorrect while using it, use the opportunity to fix the `.mailmap` file. Same thing if you want to have the full name of a contributor shown instead of a pseudonym. ## For each release (preview, final, patch-level) ## - [ ] Ensure that there exists a milestone for the following version. - [ ] Ensure the release changelog has been merged (the release summary is required for the final release). - [ ] In a PR against `vX.X` (for testing): - Update the version number in [`tools/configure/configure.ml`](../../tools/configure/configure.ml). - Only update the magic numbers for the final release. - Set `is_a_released_version` to `true`. - [ ] Set the tag `VX.X...` using `git tag -s`. - [ ] Push the new tag with `git push upstream VX.X... --dry-run` (remove the `--dry-run` and redo if everything looks OK). - [ ] Set `is_a_released_version` to `false` (if you forget about it, you'll be reminded by the test-suite failing whenever you try to backport a PR with a changelog entry). - [ ] Close the milestone. - [ ] Send an e-mail on Coqdev + the Coq development category on Discourse (coqdev@inria.fr + coq+coq-development@discoursemail.com) announcing that the tag has been set so that package managers can start preparing package updates (including a `coq-bignums` compatible version). - [ ] In particular, ensure that someone is working on providing an opam package (either in the main [ocaml/opam-repository](https://github.com/ocaml/opam-repository) for standard releases or in the `core-dev` category of the [coq/opam-coq-archive](https://github.com/coq/opam-coq-archive) for preview releases. - [ ] Make sure to cc `@erikmd` to request that he prepares the necessary configuration for the Docker release in [`coqorg/coq`](https://hub.docker.com/r/coqorg/coq) (namely, he'll need to make sure a `coq-bignums` opam package is available in [`extra-dev`](https://github.com/coq/opam-coq-archive/tree/master/extra-dev), respectively [`released`](https://github.com/coq/opam-coq-archive/tree/master/released), so the Docker image gathering `coq` and `coq-bignums` can be built). - [ ] Publish a release on GitHub with the PDF version of the reference manual attached. The PDF can be recovered from the artifacts of the `doc:refman-pdf:dune` job from continuous integration. ## For the first release candidate release ## - [ ] In coordination with platform maintainers, announce the release candidate to Coq-Club and Discourse (coq-club@inria.fr + coq+announcements@discoursemail.com) with the message that: - the release candidate is stable: library authors can safely start preparing compatible releases; - in particular, the authors of packages that are included in the platform should do so as soon as possible to avoid delaying the platform release (and remind the expected calendar); - a further announcement will follow when the platform is ready to install this version of Coq. ## For each non-preview release ## - [ ] Modify the version number in the file [`incl/macros.html`](https://github.com/coq/www/blob/master/incl/macros.html) on the website. - [ ] Ping `@Zimmi48` to switch the default version of the reference manual on the website. This is done by logging into the server (`vps697916.ovh.net`), editing two `ProxyPass` lines (one for the refman and one for the stdlib doc) with `sudo vim /etc/apache2/sites-available/000-coq.inria.fr.conf`, then running `sudo systemctl reload apache2`. *TODO:* automate this or make it doable through the `www` git repository. See [coq/www#111](https://github.com/coq/www/issues/111) and [coq/www#131](https://github.com/coq/www/issues/131). ## Only for the final release of each major version ## - [ ] Ping `@Zimmi48` to publish a new version on Zenodo. *TODO:* automate this with coqbot. ## This is now delegated to the platform maintainers ## - [ ] Sign the Windows and MacOS packages and upload them on GitHub. + The Windows packages must be signed by the Inria IT security service. They should be sent as a link to the binary (via [filesender](https://filesender.renater.fr) for example) together with its SHA256 hash in a signed e-mail to `dsi.securite` @ `inria.fr` putting `@maximedenes` in carbon copy. + The MacOS packages should be signed with our own certificate. A detailed step-by-step guide can be found [on the wiki](https://github.com/coq/coq/wiki/SigningReleases). + The Snap package has to be built and uploaded to the snap store by running a [platform CI job manually](https://github.com/coq/platform/tree/2021.02/linux/snap/github_actions). Then ask `@gares` to publish the upload or give you the password for the `coq-team` account on the store so that you can do it yourself. - [ ] Announce the release to Coq-Club and Discourse (coq-club@inria.fr + coq+announcements@discoursemail.com). coq-8.15.0/dev/doc/shield-icon.png000066400000000000000000000206061417001151100166110ustar00rootroot00000000000000PNG  IHDRMFasBITO IDATx^ TSYO:g˓u9NO%\BݩYm^*ūaA(\ ((Ay)" ""(\j!0{g[ϟ?Ё "0~2:JD@ʰh "0_P/H/" (â1 |@v"" " Ee"""MϞ~Ï?`$ N-Gq{_w&e>fP"(ְY.AjEr" |o_ " #0)ٟ.㊤Dpd Ϸz BD-װ@#Z+GDp("R" s 2&"8DeX0!!D@@e9@CMDpʰaBB"́ʰs "5됦mw `60ȏyxd @_U~ϩ,Tu{/@bh4fN7I   =ΔahldC}9Vk::}2ȼϞ=G~|){A:D@3ÂQL'Cz * xot?Rʛ!~(9!䌌biߓJ\arOZEөI ù~N *lN#~ '3"fotr#ekfg㌷`h:J˴ 7ۛk'#ql\xʙvx涚tiYw-c0мGљʫWvh+զ_c"eYYe?V;|&sz DCT|py @;oݠy$3Hr2xw%fӿ>XY4`?-#i9,CU@p^ow#\)Q-@ǟ0 6$mn201[g|F}C$ fJcʓ壡  EhCU8%ؔ'3),_]؍Iؽ=6W+@U!^_,76"@mN,ԀlH1<}B3&)uM r#muE>IG[ui?lz?l'N5N 6 fI\ 9fX.AjGZ jĪX8aLW{5RkH 6iE 6%[ oXab:Z%3#N01:emxP@bҢRԭ#z̍)e"S:Y$٦W\2݆&Bނb0SɑCO~Ƈk~ѹ2!A/ai<˓Nn]w8 >ma IN>ԟTT)B> Zr]6+G 06P0RJ;- 9VʸLg90|*`.M'0-g8Zp}9Mz-/~=Sg.0a8,tp0:<1Xw@hi#" PK)j)g :ef]$vpsrPyb٣GRL{ 1(M}VI7Ʒ_U|/eaf˯%t<|M.Wq Iǎo *naO`knҩk.{ 2覂[ƻFR) {s͓}FAcb[_S^[6 V$sG`+$Ԭ;tKyF(dhN W  8cxKkv ̇!1ICrnBK'$ i#D"dVS\R~ެC"p}X[&^2Ľy o g]ÚWgJM(ND8e5O(:ըC"o ˰2ZQXh;wU_|g9ev[JD@wݶuK8j3q rŸ-Q9" _.-}7.AC(6T 7SчEan~#D@^->֋.74Zj:" m_reof;ED1_- ٳWJ GDQ3Ea !" 8w #}ypVnSY?+] gKPl6aIΦU -ml#'_v 2M[_{i~4iiU|mT1?uq9CMs/Ӎ #Yvb#x&܄{w?iB|viw`83 _ےlP ` 8$ N6Pv T$JS\Sɹ Ė'bFדvH:wmvN!/?)Wϒ%֧ ػa! P!{ ֛gI6/Z8,̾ N:!6\˰\ݷI upֆJP̍**ئ,UpwqΕv27hAi&"I8i{p=Gz]CEdŸsYHi1`!68 !7v-|'*oЪ!&Vvbf$):TW~(c;6' {e)$JN ij&c`(9{+տ%o*۪*RzS;+ۓiְ_BJI-9uZ&_vgTO9z Fgh~.+Wg=( ޒ%Q*7Q+fIY+ ʁl?s;.A"\ka)D:Kct"L(:5}DYۋ*)pAʹXͩCO$KFBmbFf⻿D9Xq雱|ƸAHk g *Gg7grږ^PU[uEC,LެQF`&L끤$JHۊjkKp ީ&xB$Ykym8B9ךz WP7dhPSf{֘1dZvG3} 9lq !(i^vO\׿JzjAXiZF'O w3O.~jX= ~[mPDx37`x{`NaLtN}3@<)U_!ih;{ t 0#OOU>#Qad؅ I6$5|‚$I'wJΉ7V޸omΪKᡜ04?֘07D ^m p?t ID_kF'NI |얐 EX@ihaސ#JA ]W;\(F[z$p8Gx8N)m3%NO  "útll] ܿН+C M(mHzq*4y;68l^e 0'+-iJlf[1N"wddw4f{XRtcJX<(mkD+\>!#WR% >;r LX:"hY$tcI%5CM7mZ|7B&3wuj `E%pՋAڝ9.AF@^F,lR\ hcw]r'@4~tCuDO];3Tս)[ڦ>5T-4?Q ZT&+0cwUu~TpJ;GE .wit}+(Pfl<ʎl\MfSExK:@$ha>3tYQ~1ɤnXf;k0fg*T+9IZ+9],̩;ǺP[}{6$m?R*0tЪ m'lN+t9فzuf^eCku6R=^(Yw rd{vDLrҍʔLvZ15y@O@6VJmw9w\Xtw BҤ=R'DC78;?hRY{NDmmᚮjţ=#lRٞ5V(R7CIjI܈VzhYS*p [OGfL}@LcbΫh-o*{69-39RT&d?52:ᢆRU݉8Nّic 03O㕜dS)آ;.Ql('&@l T9<GƤWp, N oWp{] GIbE'& _tpF"r(¾(;~_BZ|\uS2IDAT~ @&{.{ Ffn+'xqEmez@D`nIX|neXouwGJn~ -t@lE*ԿI\jV/jJ Z8[./lάŶww*olojSoP{t`9DʰeXH;TP?-)>OY`|o@G8I(B: eXT 'ty@EMeE=9Dpj(:u!D`Q@vQwr@Hgݠ1IENDB`coq-8.15.0/dev/doc/style.txt000066400000000000000000000142101417001151100156000ustar00rootroot00000000000000<< Style uniformity is more important than style itself >> (Kernigan & Pike, The Practice of Programming) OCaml Style: - Spacing and indentation - indent your code (using tuareg default) - no strong constraints in formatting "let in"; possible styles are: "let x = ... in" "let x = ... in" "let x = ... in" - but: no extra indentation before a "in" coming on next line, otherwise, it first shifts further and further on the right, reducing the amount of space available; second, it is not robust to insertion of a new "let" - it is established usage to have space around "|" as in "match c with | [] | [a] -> ... | a::b::l -> ..." - in a one-line "match", it is preferred to have no "|" in front of the first case (this saves spaces for the match to hold in the line) - from about 8.2, the tendency is to use the following format which limit excessive indentation while providing an interesting "block" aspect type t = | A | B of machin let f expr = match expr with | A -> ... | B x -> ... let f expr = function | A -> ... | B x -> ... - add spaces around = and == (make the code "breathe") - the common usage is to write "let x,y = ... in ..." rather than "let (x,y) = ... in ..." - parenthesizing with either "(" and ")" or with "begin" and "end" is common practice - preferred layout for conditionals: if condition then first-case else second-case - in case of effects in branches, use "begin ... end" rather than parentheses if condition then begin instr1; instr2 end else begin instr3; instr4 end - if the first branch raises an exception, avoid the "else", i.e.: if condition then if condition then error "foo"; error "foo" -----> bar else bar - it is the usage not to use ;; to end OCaml sentences (however, inserting ";;" can be useful for debugging syntax errors crossing the boundary of functions) - relevant options in tuareg: (setq tuareg-in-indent 2) (setq tuareg-with-indent 0) (setq tuareg-function-indent 0) (setq tuareg-let-always-indent nil) - Coding methodology - no "try ... with _ -> ..." which catches even Sys.Break (Ctrl-C), Out_of_memory, Stack_overflow, etc. at least, use "try with e when Errors.noncritical e -> ..." (to be detailed, Pierre L. ?) - do not abuse fancy combinators: sometimes what a "let rec" loop does is more readable and simpler to grasp than what a "fold" does - do not break abstractions: if an internal property is hidden behind an interface, do no rely on it in code which uses this interface (e.g. do not use List.map thinking it is left-to-right, use map_left) - in particular, do not use "=" on abstract types: there is no reason a priori that it is the intended equality on this type; use the "equal" function normally provided with the abstract type - avoid polymorphically typed "=" whose implementation is not optimized in OCaml and which has moreover no reason to be the intended implementation of the equality when it comes to be instantiated on a particular type (e.g. use List.mem_f, List.assoc_f, rather than List.mem, List.assoc, etc, unless it is absolutely clear that "=" will implement the intended equality, and with the right complexity) - any new general-purpose enough combinator on list should be put in cList.ml, on type option in cOpt.ml, etc. - unless for a good reason not to do so, follow the style of the surrounding code in the same file as much as possible, the general guidelines are otherwise "let spacing breathe" (we have large screen nowadays), "make your code easy to read and to understand" - document what is tricky, but do not overdocument, sometimes the choice of names and the structure of the code are better documentation than a long discourse; use of unicode in comments is welcome if it can make comments more readable (then "toggle-enable-multibyte-characters" can help when using the debugger in emacs) - all of initial "open File", or of small-scope File.(...), or per-ident File.foo are common practices - Choice of variable names - be consistent when naming from one function to another - be consistent with the naming adopted in the functions from the same file, or with the naming used elsewhere by similar functions - use variable names which express meaning - keep "cst" for constants and avoid it for constructors which is otherwise a source of confusion - for constructors, use "cstr" in type constructor (resp. "cstru" in constructor puniverse); avoid "constr" for "constructor" which could be think as the name of an arbitrary Constr.t - for inductive types, use "ind" in the type inductive (resp "indu" in inductive puniverse) - for env, use "env" - for evar_map, use "sigma", with tolerance into "evm" and "evd" - for named_context or rel_context, use "ctxt" or "ctx" (or "sign") - for formal/actual indices of inductive types: "realdecls", "realargs" - for formal/actual parameters of inductive types: "paramdecls", "paramargs" - for terms, use e.g. c, b, a, ... - if a term is known to be a function: f, ... - if a term is known to be a type: t, u, typ, ... - for a declaration, use d or "decl" - for errors, exceptions, use e - Common OCaml pitfalls - in "match ... with Case1 -> try ... with ... -> ... | Case2 -> ...", or in "match ... with Case1 -> match ... with SubCase -> ... | Case2 -> ...", or in parentheses are needed around the "try" and the inner "match" - even if stream are lazy, the Pp.(++) combinator is strict and forces the evaluation of its arguments (use a "lazy" or a "fun () ->") to make it lazy explicitly - in "if ... then ... else ... ++ ...", the default parenthesizing is somehow counter-intuitive; use "(if ... then ... else ...) ++ ..." - in "let myspecialfun = mygenericfun args", be sure that it does no do side-effect; prefer otherwise "let mygenericfun arg = mygenericfun args arg" to ensure that the function is evaluated at runtime coq-8.15.0/dev/doc/unification.txt000066400000000000000000000122251417001151100167540ustar00rootroot00000000000000Some notes about the use of unification in Coq ---------------------------------------------- There are several applications of unification and pattern-matching ** Unification of types ** - For type inference, inference of implicit arguments * this basically amounts to solve problems of the form T <= U or T = U where T and U are types coming from a given typing problem * this kind of problem has to succeed and all the power of unification is a priori expected (full beta/delta/iota/zeta/nu/mu, pattern-unification, pruning, imitation/projection heuristics, ...) - For lemma application (apply, auto, ...) * these are also problems of the form T <= U on types but with T coming from a lemma and U from the goal * it is not obvious that we always want unification and not matching * it is not clear which amounts of delta one wants to use ** Looking for subterms ** - For tactics applying on subterms: induction, destruct, rewrite - As part of unification of types in the presence of higher-order evars (e.g. when applying a lemma of conclusion "?P t") ---------------------------------------------------------------------- Here are examples of features one may want or not when looking for subterms A- REWRITING 1- Full conversion on closed terms 1a- Full conversion on closed terms in the presence of at least one evars (meta) Section A1. Variable y: nat. Hypothesis H: forall x, x+2 = 0. Goal y+(1+1) = 0. rewrite H. (* 0 = 0 *) Abort. Goal 2+(1+1) = 0. rewrite H. (* 0 = 0 *) Abort. (* This exists since the very beginning of Chet's unification for tactics *) (* But this fails for setoid rewrite *) 1b- Full conversion on closed terms without any evars in the lemma 1b.1- Fails on rewrite (because Unification.w_unify_to_subterm_list replaces unification by check for a syntactic subterm if terms has no evar/meta) Goal 0+1 = 0 -> 0+(1+0) = 0. intros H; rewrite H. (* fails *) Abort. 1b.2- Works with setoid rewrite Require Import Setoid. Goal 0+1 = 0 -> 0+(1+0) = 0. intros H; rewrite H at 1. (* 0 = 0 *) Abort. 2- Using known instances in full conversion on closed terms Section A2. Hypothesis H: forall x, x+(2+x) = 0. Goal 1+(1+2) = 0. rewrite H. Abort. End A2. (* This exists since 8.2 (HH) *) 3- Pattern-unification on Rels Section A3a. Variable F: (nat->nat->nat)->nat. Goal exists f, F (fun x y => f x y) = 0 -> F (fun x y => plus y x) = 0. eexists. intro H; rewrite H. (* 0 = 0 *) Abort. End A3a. (* Works since pattern unification on Meta applied to Rel was introduced *) (* in unification.ml (8.1, Sep 2006, HH) *) Section A3b. Variables x y: nat. Variable H: forall f, f x y = 0. Goal plus y x = 0. rewrite H. (* 0 = 0 *) Abort. End A3b. (* Works since pattern unification on all Meta was supported *) (* in unification.ml (8.4, Jun 2011, HH) *) 4- Unification with open terms Section A4. Hypothesis H: forall x, S x = 0. Goal S 0 = 0. rewrite (H _). (* 0 = 0 *) Abort. End A4. (* Works since unification on Evar was introduced so as to support rewriting *) (* with open terms (8.2, MS, r11543, Unification.w_unify_to_subterm_list ) *) 5- Unification of pre-existing evars 5a- Basic unification of pre-existing evars Section A4. Variables x y: nat. Goal exists z, S z = 0 -> S (plus y x) = 0. eexists. intro H; rewrite H. (* 0 = 0 *) Abort. End A4. (* This worked in 8.2 and 8.3 as a side-effect of support for rewriting *) (* with open terms (8.2, MS, r11543) *) 5b- Pattern-unification of pre-existing evars in rewriting lemma Goal exists f, forall x y, f x y = 0 -> plus y x = 0. eexists. intros x y H; rewrite H. (* 0 = 0 *) Abort. (* Works since pattern-unification on Evar was introduced *) (* in unification.ml (8.3, HH, r12229) *) (* currently governed by a flag: use_evars_pattern_unification *) 5c- Pattern-unification of pre-existing evars in goal Goal exists f, forall x y, plus x y = 0 -> f y x = 0. eexists. intros x y H; rewrite H. (* 0 = 0 *) Abort. (* This worked in 8.2 and 8.3 but was removed for autorewrite in 8.4 *) 5d- Mixing pattern-unification of pre-existing evars in goal and evars in lemma Goal exists f, forall x, (forall y, plus x y = 0) -> forall y:nat, f y x = 0. eexists. intros x H y. rewrite H. (* 0 = 0 *) Abort. (* This worked in 8.2 and 8.3 but was removed for autorewrite in 8.4 *) 6- Multiple non-identical but convertible occurrences Tactic rewrite only considers the first one, from left-to-right, e.g.: Section A6. Variable y: nat. Hypothesis H: forall x, x+2 = 0. Goal (y+(2+0))+(y+(1+1)) = (y+(1+1))+(y+(2+0)). rewrite H. (* 0+(y+(1+1)) = y+(1+1)+0 *) Abort. End A6. Tactic setoid rewrite first looks for syntactically equal terms and if not uses the leftmost occurrence modulo delta. Require Import Setoid. Section A6. Variable y: nat. Hypothesis H: forall x, x+2 = 0. Goal (y+(2+0))+(y+2) = (y+2)+(y+(2+0)). rewrite H at 1 2 3 4. (* (y+(2+0))+0 = 0+(y+(2+0)) *) Abort. Goal (y+(2+0))+(y+(1+1)) = (y+(1+1))+(y+(2+0)). rewrite H at 1 2 3 4. (* 0+(y+(1+1)) = y+(1+1)+0 *) Abort. End A6. 7- Conversion Section A6. Variable y: nat. Hypothesis H: forall x, S x = 0. Goal id 1 = 0. rewrite H. B- ELIMINATION (INDUCTION / CASE ANALYSIS) This is simpler because open terms are not allowed and no unification is involved (8.3). coq-8.15.0/dev/doc/universes.md000066400000000000000000000245531417001151100162570ustar00rootroot00000000000000Notes on universe polymorphism ------------------------------ The implementation of universe polymorphism introduces a few changes to the API of Coq. First and foremost, the term language changes, as global references now carry a universe level substitution: ~~~ocaml type 'a puniverses = 'a * Univ.Instance.t type pconstant = constant puniverses type pinductive = inductive puniverses type pconstructor = constructor puniverses type constr = ... | Const of puniverses | Ind of pinductive | Constr of pconstructor ~~~ Universes --------- Universe instances (an array of levels) gets substituted when unfolding definitions, are used to typecheck and are unified according to the rules in the ITP'14 paper on universe polymorphism in Coq. ~~~ocaml type Level.t = Set | Prop | Level of int * dirpath (* hashconsed *) type Instance.t = Level.t array type Universe.t = Level.t list (* hashconsed *) ~~~ The universe module defines modules and abstract types for levels, universes etc.. Structures are hashconsed (with a hack to take care of the fact that deserialization breaks sharing). Definitions (constants, inductives) now carry around not only constraints but also the universes they introduced (a Univ.UContext.t). There is another kind of contexts `Univ.ContextSet.t`, the latter has a set of universes, while the former has serialized the levels in an array, and is used for polymorphic objects. Both have "reified" constraints depending on global and local universes. A polymorphic definition is abstract w.r.t. the variables in this context, while a monomorphic one (or template polymorphic) just adds the universes and constraints to the global universe context when it is put in the environment. No other universes than the global ones and the declared local ones are needed to check a declaration, hence the kernel does not produce any constraints anymore, apart from module subtyping.... There are hence two conversion functions now: `check_conv` and `infer_conv`: the former just checks the definition in the current env (in which we usually push_universe_context of the associated context), and `infer_conv` which produces constraints that were not implied by the ambient constraints. Ideally, that one could be put out of the kernel, but currently module subtyping needs it. Inference of universes is now done during refinement, and the evar_map carries the incrementally built universe context, starting from the global universe constraints (see `Evd.from_env`). `Evd.conversion` is a wrapper around `infer_conv` that will do the bookkeeping for you, it uses `evar_conv_x`. There is a universe substitution being built incrementally according to the constraints, so one should normalize at the end of a proof (or during a proof) with that substitution just like we normalize evars. There are some nf_* functions in library/universes.ml to do that. Additionally, there is a minimization algorithm in there that can be applied at the end of a proof to simplify the universe constraints used in the term. It is heuristic but validity-preserving. No user-introduced universe (i.e. coming from a user-written anonymous Type) gets touched by this, only the fresh universes generated for each global application. Using ~~~ocaml val pf_constr_of_global : Globnames.global_reference -> (constr -> tactic) -> tactic ~~~ Is the way to make a constr out of a global reference in the new API. If they constr is polymorphic, it will add the necessary constraints to the evar_map. Even if a constr is not polymorphic, we have to take care of keeping track of its universes. Typically, using: ~~~ocaml mkApp (coq_id_function, [| A; a |]) ~~~ and putting it in a proof term is not enough now. One has to somehow show that A's type is in cumululativity relation with id's type argument, incurring a universe constraint. To do this, one can simply call Typing.resolve_evars env evdref c which will do some infer_conv to produce the right constraints and put them in the evar_map. Of course in some cases you might know from an invariant that no new constraint would be produced and get rid of it. Anyway the kernel will tell you if you forgot some. As a temporary way out, `Universes.constr_of_global` allows you to make a constr from any non-polymorphic constant, but it will fail on polymorphic ones. Other than that, unification (w_unify and evarconv) now take account of universes and produce only well-typed evar_maps. Some syntactic comparisons like the one used in `change` have to be adapted to allow identification up-to-universes (when dealing with polymorphic references), `make_eq_univs_test` is there to help. In constr, there are actually many new comparison functions to deal with that: ~~~ocaml (** [equal a b] is true if [a] equals [b] modulo alpha, casts, and application grouping *) val equal : constr -> constr -> bool (** [eq_constr_univs u a b] is [true] if [a] equals [b] modulo alpha, casts, application grouping and the universe equalities in [u]. *) val eq_constr_univs : constr Univ.check_function (** [leq_constr_univs u a b] is [true] if [a] is convertible to [b] modulo alpha, casts, application grouping and the universe inequalities in [u]. *) val leq_constr_univs : constr Univ.check_function (** [eq_constr_universes a b] [true, c] if [a] equals [b] modulo alpha, casts, application grouping and the universe equalities in [c]. *) val eq_constr_universes : constr -> constr -> bool Univ.universe_constrained (** [leq_constr_universes a b] [true, c] if [a] is convertible to [b] modulo alpha, casts, application grouping and the universe inequalities in [c]. *) val leq_constr_universes : constr -> constr -> bool Univ.universe_constrained (** [eq_constr_univs a b] [true, c] if [a] equals [b] modulo alpha, casts, application grouping and ignoring universe instances. *) val eq_constr_nounivs : constr -> constr -> bool ~~~ The `_univs` versions are doing checking of universe constraints according to a graph, while the `_universes` are producing (non-atomic) universe constraints. The non-atomic universe constraints include the `ULub` constructor: when comparing `f (* u1 u2 *) c` and `f (* u1' u2' *) c` we add ULub constraints on `u1, u1'` and `u2, u2'`. These are treated specially: as unfolding `f` might not result in these unifications, we need to keep track of the fact that failure to satisfy them does not mean that the term are actually equal. This is used in unification but probably not necessary to the average programmer. Another issue for ML programmers is that tables of constrs now usually need to take a `constr Univ.in_universe_context_set` instead, and properly refresh the universes context when using the constr, e.g. using Clenv.refresh_undefined_univs clenv or: ~~~ocaml (** Get fresh variables for the universe context. Useful to make tactics that manipulate constrs in universe contexts polymorphic. *) val fresh_universe_context_set_instance : universe_context_set -> universe_level_subst * universe_context_set ~~~ The substitution should be applied to the constr(s) under consideration, and the context_set merged with the current evar_map with: ~~~ocaml val merge_context_set : rigid -> evar_map -> Univ.universe_context_set -> evar_map ~~~ The `rigid` flag here should be `Evd.univ_flexible` most of the time. This means the universe levels of polymorphic objects in the constr might get instantiated instead of generating equality constraints (Evd.univ_rigid does that). On this issue, I recommend forcing commands to take `global_reference`s only, the user can declare his specialized terms used as hints as constants and this is cleaner. Alas, backward-compatibility-wise, this is the only solution I found. In the case of global_references only, it's just a matter of using `Evd.fresh_global` / `pf_constr_of_global` to let the system take care of universes. The universe graph ------------------ To accommodate universe polymorphic definitions, the graph structure in kernel/univ.ml was modified. The new API forces every universe to be declared before it is mentioned in any constraint. This forces to declare every universe to be >= Set or > Set. Every universe variable introduced during elaboration is >= Set. Every _global_ universe is now declared explicitly > Set, _after_ typechecking the definition. In polymorphic definitions Type@{i} ranges over Set and any other universe j. However, at instantiation time for polymorphic references, one can try to instantiate a universe parameter with Prop as well, if the instantiated constraints allow it. The graph invariants ensure that no universe i can be set lower than Set, so the chain of universes always bottoms down at Prop < Set. Modules ------- One has to think of universes in modules as being globally declared, so when including a module (type) which declares a type i (e.g. through a parameter), we get back a copy of i and not some fresh universe. Incompatibilities ----------------- Old-style universe polymorphic definitions were implemented by taking advantage of the fact that elaboration (i.e., pretyping and unification) were _not_ universe aware, so some of the constraints generated during pretypechecking would be forgotten. In the current setting, this is not possible, as unification ensures that the substitution is built is entirely well-typed, even w.r.t universes. This means that some terms that type-checked before no longer do, especially projections of the pair: ~~~coq @fst ?x ?y : prod ?x ?y : Type (max(Datatypes.i, Datatypes.j)). ~~~ The "template universe polymorphic" variables i and j appear during typing without being refreshed, meaning that they can be lowered (have upper constraints) with user-introduced universes. In most cases this won't work, so ?x and ?y have to be instantiated earlier, either from the type of the actual projected pair term (some t : prod A B) or the typing constraint. Adding the correct type annotations will always fix this. Unification semantics --------------------- In Ltac, matching with: - a universe polymorphic constant `c` matches any instance of the constant. - a variable ?x already bound to a term `t` (non-linear pattern) uses strict equality of universes (e.g., Type@{i} and Type@{j} are not equal). In tactics: - `change foo with bar`, `pattern foo` will unify all instances of `foo` (and convert them with `bar`). This might incur unifications of universes. `change` uses conversion while `pattern` only does syntactic matching up-to unification of universes. - `apply`, `refine` use unification up to universes. coq-8.15.0/dev/doc/xml-protocol.md000066400000000000000000000714561417001151100166770ustar00rootroot00000000000000# Coq XML Protocol This document is based on documentation originally written by CJ Bell for his [vscoq](https://github.com/coq-community/vscoq/) project. Here, the aim is to provide a "hands on" description of the XML protocol that coqtop and IDEs use to communicate. The protocol first appeared with Coq 8.5, and is used by CoqIDE, [vscoq](https://github.com/coq-community/vscoq/), and other user interfaces. A somewhat out-of-date description of the async state machine is [documented here](https://github.com/ejgallego/jscoq/blob/v8.10/etc/notes/coq-notes.md). OCaml types for the protocol can be found in the [`ide/protocol/interface.ml` file](/ide/protocol/interface.ml). Changes to the XML protocol are documented as part of [`dev/doc/changes.md`](/dev/doc/changes.md). * [Commands](#commands) - [About](#command-about) - [Add](#command-add) - [EditAt](#command-editAt) - [Init](#command-init) - [Goal](#command-goal) - [Status](#command-status) - [Query](#command-query) - [Evars](#command-evars) - [Hints](#command-hints) - [Search](#command-search) - [GetOptions](#command-getoptions) - [SetOptions](#command-setoptions) - [MkCases](#command-mkcases) - [StopWorker](#command-stopworker) - [PrintAst](#command-printast) - [Annotate](#command-annotate) - [Db_cmd](#command-db_cmd) - [Db_upd_bpts](#command-db_upd_bpts) - [Db_continue](#command-db_continue) - [Db_stack](#command-db_stack) - [Db_vars](#command-db_vars) * [Feedback messages](#feedback) - [Added Axiom](#feedback-addedaxiom) - [Processing](#feedback-processing) - [Processed](#feedback-processed) - [Incomplete](#feedback-incomplete) - [Complete](#feedback-complete) - [GlobRef](#feedback-globref) - [Error](#feedback-error) - [InProgress](#feedback-inprogress) - [WorkerStatus](#feedback-workerstatus) - [File Dependencies](#feedback-filedependencies) - [File Loaded](#feedback-fileloaded) - [Message](#feedback-message) - [Custom](#feedback-custom) * [Ltac-debug messages](ltac_debug) * [Highlighting Text](#highlighting) Sentences: each command sent to Coqtop is a "sentence"; they are typically terminated by ".\s" (followed by whitespace or EOF). Examples: "Lemma a: True.", "(* asdf *) Qed.", "auto; reflexivity." In practice, the command sentences sent to Coqtop are terminated at the "." and start with any previous whitespace. Each sentence is assigned a unique stateId after being sent to Coq (via Add). States: * Processing: has been received by Coq and has no obvious syntax error (that would prevent future parsing) * Processed: * InProgress: * Incomplete: the validity of the sentence cannot be checked due to a prior error * Complete: * Error: the sentence has an error State ID 0 is reserved as a 'dummy' state. -------------------------- ## Commands ### **About(unit)** Returns information about the protocol and build dates for Coqtop. ``` ``` #### *Returns* ```html 8.6 20150913 December 2016 Dec 23 2016 16:16:30 ``` The string fields are the Coq version, the protocol version, the release date, and the compile time of Coqtop. The protocol version is a date in YYYYMMDD format, where "20150913" corresponds to Coq 8.6. An IDE that wishes to support multiple Coq versions can use the protocol version information to know how to handle output from Coqtop. ### **Add(command: string, editId: integer, stateId: integer, verbose: boolean, bp: integer, line_nb: integer, bol_pos: integer)** Adds a toplevel command (e.g. vernacular, definition, tactic) to the given state. `verbose` controls whether out-of-band messages will be generated for the added command (e.g. "foo is assumed" in response to adding "Axiom foo: nat."). `bp`, `line_nb` and `bol_pos` are the `Loc.t` values relative to the IDE buffer. ```html ${command} ${editId} ${bp} ${line_nb} ${bol_pos} ``` #### *Returns* * The added command is given a fresh `stateId` and becomes the next "tip". ```html ``` * When closing a focused proof (in the middle of a bunch of interpreted commands), the `Qed` will be assigned a prior `stateId` and `nextStateId` will be the id of an already-interpreted state that should become the next tip. ```html ${message} ``` * Failure: - Syntax error. Error offsets are byte offsets (not character offsets) with respect to the start of the sentence, starting at 0. ```html ${errorMessage} ``` - Another kind of error, for example, Qed with a pending goal. ```html ${errorMessage} ``` ------------------------------- ### **EditAt(stateId: integer)** Moves current tip to `${stateId}`, such that commands may be added to the new state ID. ```html ``` #### *Returns* * Simple backtrack; focused stateId becomes the parent state ```html ``` * New focus; focusedQedStateId is the closing Qed of the new focus; sentences between the two should be cleared ```html ``` * Failure: If `stateId` is in an error-state and cannot be jumped to, `errorFreeStateId` is the parent state of `stateId` that should be edited instead. ```html ${errorMessage} ``` ------------------------------- ### **Init()** * No options. ```html ``` * With options. Looking at [ide_slave.ml](https://github.com/coq/coq/blob/c5d0aa889fa80404f6c291000938e443d6200e5b/ide/ide_slave.ml#L355), it seems that `options` is just the name of a script file, whose path is added via `Add LoadPath` to the initial state. ```html ``` Providing the script file enables Coq to use .aux files created during compilation. Those file contain timing information that allow Coq to choose smartly between asynchronous and synchronous processing of proofs. #### *Returns* * The initial stateId (not associated with a sentence) ```html ``` ------------------------------- ### **Goal()** ```html ``` #### *Returns* * If there is a goal. `shelvedGoals` and `abandonedGoals` have the same structure as the first set of (current/foreground) goals. `backgroundGoals` contains a list of pairs of lists of goals (list ((list Goal)*(list Goal))); it represents a "focus stack" ([see code for reference](https://github.com/coq/coq/blob/trunk/engine/proofview.ml#L113)). Each time a proof is focused, it will add a new pair of lists-of-goals. The first pair is the most nested set of background goals, the last pair is the top level set of background goals. The first list in the pair is in reverse order. Each time you focus the goal (e.g. using `Focus` or a bullet), a new pair will be prefixed to the list. ```html ``` For example, this script: ```coq Goal P -> (1=1/\2=2) /\ (3=3 /\ (4=4 /\ 5=5) /\ 6=6) /\ 7=7. intros. split; split. (* current visible goals are [1=1, 2=2, 3=3/\(4=4/\5=5)/\6=6, 7=7] *) Focus 3. (* focus on 3=3/\(4=4/\5=5)/\6=6; bg-before: [1=1, 2=2], bg-after: [7=7] *) split; [ | split ]. (* current visible goals are [3=3, 4=4/\5=5, 6=6] *) Focus 2. (* focus on 4=4/\5=5; bg-before: [3=3], bg-after: [6=6] *) * (* focus again on 4=4/\5=5; bg-before: [], bg-after: [] *) split. (* current visible goals are [4=4,5=5] *) ``` should generate the following goals structure: ``` goals: [ P|-4=4, P|-5=5 ] background: [ ( [], [] ), (* bullet with one goal has no before or after background goals *) ( [ P|-3=3 ], [ P|-6=6 ] ), (* Focus 2 *) ( [ P|-2=2, P|-1=1 ], [ P|-7=7 ] ) (* Focus 3; notice that 1=1 and 2=2 are reversed *) ] ``` Pseudocode for listing all of the goals in order: `rev (flat_map fst background) ++ goals ++ flat_map snd background`. * No goal: ```html ``` ------------------------------- ### **Status(force: bool)** Returns information about the current proofs. CoqIDE typically sends this message with `force = false` after each sentence, and with `force = true` if the user wants to force the checking of all proofs (wheels button). In terms of the STM API, `force` triggers a `Join`. ```html ``` #### *Returns* * ```html ${path} ${proofName} ${allProofs} ${proofNumber} ``` ------------------------------- ### **Query(route_id: integer, query: string, stateId: integer)** `routeId` can be used to distinguish the result of a particular query, `stateId` should be set to the state the query should be run. ```html ${query} ``` #### *Returns* * ```html ${message} ``` Before 8.8, `Query` only executed the first command present in the `query` string; starting with 8.8, the caller may include several statements. This is useful for instance for temporarily setting an option and then executing a command. ------------------------------- ### **Evars()** ```html ``` #### *Returns* * ```html ``` ------------------------------- ### **Hints()** ```html ``` #### *Returns* * ```html ``` ------------------------------- ### **Search([(constraintTypeN: string, constraintValueN: string, positiveConstraintN: boolean)])** Searches for objects that satisfy a list of constraints. If `${positiveConstraint}` is `false`, then the constraint is inverted. ```html ${constraintValue1} ... bool_rect ``` #### *Returns* * ```html ${metaInfo} ... ${name} ${definition} ... ``` ##### Types of constraints: * Name pattern: `${constraintType} = "name_pattern"`; `${constraintValue}` is a regular expression string. * Type pattern: `${constraintType} = "type_pattern"`; `${constraintValue}` is a pattern (???: an open gallina term) string. * SubType pattern: `${constraintType} = "subtype_pattern"`; `${constraintValue}` is a pattern (???: an open gallina term) string. * In module: `${constraintType} = "in_module"`; `${constraintValue}` is a list of strings specifying the module/directory structure. * Include blacklist: `${constraintType} = "include_blacklist"`; `${constraintValue}` *is omitted*. ------------------------------- ### **GetOptions()** ```html ``` #### *Returns* * ```html ${string1}... ${sync} ${deprecated} ${name} ${option_value} ... ``` ------------------------------- ### **SetOptions(options)** Sends a list of option settings, where each setting roughly looks like: `([optionNamePart1, ..., optionNamePartN], value)`. ```html optionNamePart1 ... optionNamePartN ... Printing Width ``` CoqIDE sends the following settings (defaults in parentheses): ``` Printing Width : (60), Printing Coercions : (), Printing Matching : (...true...) Printing Notations : (...true...) Printing Existential Instances : (...false...) Printing Implicit : (...false...) Printing All : (...false...) Printing Universes : (...false...) ``` #### *Returns* * ```html ``` ------------------------------- ### **MkCases(...)** ```html ... ``` #### *Returns* * ```html ${string1}... ... ``` ------------------------------- ### **StopWorker(worker: string)** ```html ${worker} ``` #### *Returns* * ```html ``` ------------------------------- ### **PrintAst(stateId: integer)** ```html ``` #### *Returns* * ```html ... ${token} ... ... ... ``` ------------------------------- ### **Annotate(annotation: string)** ```html ${annotation} ``` #### *Returns* * take `Theorem plus_0_r : forall n : nat, n + 0 = n.` as an example. ```html Theorem  plus_0_r :  forall  n :  nat n  +   0  =   n . ``` ------------------------------- ### **Db_cmd(user_input: string)** ```html ${user_input} ``` #### *Returns* * `h` directs Coq to process the debugger command "h". It returns unit. This call is processed only when the debugger is stopped and has just sent a `prompt` message. ------------------------------- ### **Db_upd_bpts(...)** The call passes a list of breakpoints to set or clear. The string is the absolute pathname of the .v file (or "ToplevelInput"), the int is the byte offset within the file and the boolean is true to set a breakpoint and false to clear it. Breakpoints can be updated when Coq is not busy or when Coq is stopped in the debugger. If this message is sent in other states, it will be received and processed when Coq is no longer busy or execution stops in the debugger. ```html /home/proj/coq/ide/coqide/debug.v 22 ``` #### *Returns* * Unit. ------------------------------- ### **Db_continue(option: integer)** Tells Coq to continue processing the proof when it is stopped in the debugger. The integer indicates when the debugger should stop again: ``` 0: StepIn - step one tactic. If it is an Ltac tactic, stop at the first tactic within it 1: StepOver - step over one tactic. if it is an Ltac tactic, don't stop within it 2: StepOut - stop on the first tactic after exiting the current Ltac tactic 3: Continue - continue running until the next breakpoint or the debugger exits 4: Interrupt - generate a User interrupt (for use when stopped in the debugger; otherwise interrupt is sent as a signal) ``` If the debugger encounters a breakpoint during a StepOver or a StepOut, it will stop at the breakpoint. ```html 1 ``` #### *Returns* * Unit. ### **Db_stack()** Returns the Ltac call stack. Each entry has a description of what was called (e.g. the tactic name) plus the absolute pathname of the file and the offset of the call therein. The top of stack is the first entry in the list. Offsets are in bytes, not counts of unicode characters. ```html ``` #### *Returns* ```html vars2.z : ``` ### **Db_vars(frame: integer)** Returns a list of the names and values of the local variables defined in the specified frame of the Ltac call stack. (0 = top of stack, 1, 2, ...). ```html 0 ``` #### *Returns* ```html w 0 : ``` ------------------------------- ## Feedback messages Feedback messages are issued out-of-band, giving updates on the current state of sentences/stateIds, worker-thread status, etc. In the descriptions of feedback syntax below, wherever a `state_id` tag may occur, there may instead be an `edit_id` tag. * Added Axiom: in response to `Axiom`, `admit`, `Admitted`, etc. ```html ``` * Processing ```html ${workerName} ``` * Processed ```html ``` * Incomplete ```html ``` * Complete * GlobRef * Error. Issued, for example, when a processed tactic has failed or is unknown. The error offsets may both be 0 if there is no particular syntax involved. * InProgress ```html 1 ``` * WorkerStatus Ex: `workername = "proofworker:0"` Ex: `status = "Idle"` or `status = "proof: myLemmaName"` or `status = "Dead"` ```html ${workerName} ${status} ``` * File Dependencies. Typically in response to a `Require`. Dependencies are *.vo files. - State `stateId` directly depends on `dependency`: ```html ``` - State `stateId` depends on `dependency` via dependency `sourceDependency` ```xml ${dependency} ``` * File Loaded. For state `stateId`, module `module` is being loaded from `voFileName` ```xml ${module} ${voFileName`} ``` * Message. `level` is one of `{info,warning,notice,error,debug}`. For example, in response to an add `"Axiom foo: nat."` with `verbose=true`, message `foo is assumed` will be emitted in response. ```xml ${message} ``` * Custom. A feedback message that Coq plugins can use to return structured results, including results from Ltac profiling. Optionally, `startPos` and `stopPos` define a range of offsets in the document that the message refers to; otherwise, they will be 0. `customTag` is intended as a unique string that identifies what kind of payload is contained in `customXML`. ```xml ${customTag} ${customXML} ``` ------------------------------- ## Ltac-debug messages Ltac-debug messages are issued out-of-band, similar to Feedback messages. The response contains an identifying tag and a ``. Currently these tags are used: * **output** - ordinary output for display in the Messages panel * **goal** - the current goal for the debugger, for display in the Messages panel or elsewhere * **prompt** - output for display in the Messages panel prompting the user to enter a debug command, allowing CoqIDE to display it without appending a newline. It also signals that coqidetop is waiting to receive a debugger-specific message such as [Db_cmd](#command-db_cmd). ```xml prompt : ``` ------------------------------- ## Highlighting Text [Proof diffs](https://coq.inria.fr/distrib/current/refman/proof-engine/proof-handling.html#showing-differences-between-proof-steps) highlight differences between the current and previous proof states in the displayed output. These are represented by tags embedded in output fields of the XML document. There are 4 tags that indicate how the enclosed text should be highlighted: - diff.added - added text - diff.removed - removed text - diff.added.bg - unchanged text in a line that has additions ("bg" for "background") - diff.removed.bg - unchanged text in a line that has removals CoqIDE, Proof General and coqtop currently use 2 shades of green and 2 shades of red as the background color for highlights. Coqtop and CoqIDE also apply underlining and/or strikeout highlighting for the sake of the color blind. For example, `ABC` indicates that "ABC" should be highlighted as added text. Tags can be nested, such as: `A + 1 + B`. IDE code displaying highlighted strings should maintain a stack for nested tags and the associated highlight. Currently the diff code only nests at most 2 tags deep. If an IDE uses other highlights such as text foreground color or italic text, it may need to merge the background color with those other highlights to give the desired (IDE dependent) behavior. The current implementations avoid highlighting white space at the beginning or the end of a line. This gives a better appearance. There may be additional text that is marked with other tags in the output text. IDEs probably should ignore and not display tags they don't recognize. Some internal details about generating tags within Coq (e.g. if you want to add additional tags): Tagged output strings are generated from Pp.t's. Use Pp.tag to highlight a Pp.t using one of the tags listed above. A span of tokens can be marked by using "start." on the first token and "end." on the last token. (Span markers are needed because a span of tokens in the output may not match nesting of layout boxes in the Pp.t.) The conversion from the Pp.t to the XML-tagged string replaces the "start.\*" and "end.\*" tags with the basic tags. coq-8.15.0/dev/dune000066400000000000000000000027461417001151100140230ustar00rootroot00000000000000(library (name top_printers) (public_name coq-core.top_printers) (synopsis "Coq's Debug Printers") (wrapped false) (modules top_printers) (optional) (libraries coq-core.toplevel coq-core.plugins.ltac)) (rule (targets dune-dbg) (deps dune-dbg.in ../checker/coqchk.bc ../topbin/coqc_bin.bc ../ide/coqide/coqide_main.bc ; We require all the OCaml libs to be in place and searchable ; by OCamlfind, this is a bit of a hack but until Dune gets ; proper ocamldebug support we have to live with that. %{lib:coq-core.config:config.cma} %{lib:coq-core.clib:clib.cma} %{lib:coq-core.lib:lib.cma} %{lib:coq-core.kernel:kernel.cma} %{lib:coq-core.vm:coqrun.cma} %{lib:coq-core.vm:../../stublibs/dllcoqrun_stubs.so} %{lib:coq-core.library:library.cma} %{lib:coq-core.engine:engine.cma} %{lib:coq-core.pretyping:pretyping.cma} %{lib:coq-core.gramlib:gramlib.cma} %{lib:coq-core.interp:interp.cma} %{lib:coq-core.proofs:proofs.cma} %{lib:coq-core.parsing:parsing.cma} %{lib:coq-core.printing:printing.cma} %{lib:coq-core.tactics:tactics.cma} %{lib:coq-core.vernac:vernac.cma} %{lib:coq-core.stm:stm.cma} %{lib:coq-core.sysinit:sysinit.cma} %{lib:coq-core.toplevel:toplevel.cma} %{lib:coq-core.plugins.ltac:ltac_plugin.cma} %{lib:coq-core.top_printers:top_printers.cmi} %{lib:coq-core.top_printers:top_printers.cma} %{lib:coq-core.top_printers:../META}) (action (copy dune-dbg.in dune-dbg))) coq-8.15.0/dev/dune-dbg.in000077500000000000000000000012331417001151100151530ustar00rootroot00000000000000#!/usr/bin/env bash # Run in a proper install dune env. case $1 in checker) shift exe=_build/default/checker/coqchk.bc ;; coqide) shift exe=_build/default/ide/coqide/coqide_main.bc ;; coqc) shift exe=_build/default/topbin/coqc_bin.bc ;; coqtop) shift exe=_build/default/topbin/coqtop_byte_bin.bc ;; *) echo "First argument must be one of {coqc,coqtop,checker,coqide}" exit 1 ;; esac emacs="${INSIDE_EMACS:+-emacs}" ocamldebug $emacs $(ocamlfind query -recursive -i-format coq-core.top_printers) -I +threads -I dev $exe "$@" coq-8.15.0/dev/dune-workspace.all000066400000000000000000000003371417001151100165600ustar00rootroot00000000000000(lang dune 2.0) ; Add custom flags here. Default developer profile is `dev` (context (opam (switch 4.05.0))) (context (opam (switch 4.05.0+32bit))) (context (opam (switch 4.12.0))) (context (opam (switch 4.12.0+flambda))) coq-8.15.0/dev/dune_db000066400000000000000000000001521417001151100144550ustar00rootroot00000000000000source core_dune.dbg load_printer ltac_plugin.cma load_printer top_printers.cma source top_printers.dbg coq-8.15.0/dev/dune_db_408000066400000000000000000000011761417001151100150570ustar00rootroot00000000000000load_printer threads.cma load_printer str.cma load_printer zarith.cma load_printer config.cma load_printer boot.cma load_printer clib.cma load_printer dynlink.cma load_printer lib.cma load_printer gramlib.cma load_printer coqrun.cma load_printer kernel.cma load_printer library.cma load_printer engine.cma load_printer pretyping.cma load_printer interp.cma load_printer proofs.cma load_printer parsing.cma load_printer printing.cma load_printer tactics.cma load_printer vernac.cma load_printer sysinit.cma load_printer stm.cma load_printer toplevel.cma load_printer ltac_plugin.cma load_printer top_printers.cma source top_printers.dbg coq-8.15.0/dev/dune_db_409000066400000000000000000000011451417001151100150540ustar00rootroot00000000000000load_printer threads.cma load_printer str.cma load_printer zarith.cma load_printer config.cma load_printer boot.cma load_printer clib.cma load_printer lib.cma load_printer gramlib.cma load_printer coqrun.cma load_printer kernel.cma load_printer library.cma load_printer engine.cma load_printer pretyping.cma load_printer interp.cma load_printer proofs.cma load_printer parsing.cma load_printer printing.cma load_printer tactics.cma load_printer vernac.cma load_printer sysinit.cma load_printer stm.cma load_printer toplevel.cma load_printer ltac_plugin.cma load_printer top_printers.cma source top_printers.dbg coq-8.15.0/dev/dynlink.ml000066400000000000000000000033141417001151100151370ustar00rootroot00000000000000 (** Some architectures may have a native ocaml compiler but no native dynlink.cmxa (e.g. ARM). If you still want to build a native coqtop there, you'll need this dummy implementation of Dynlink. Compile it and install with: ocamlopt -a -o dynlink.cmxa dynlink.ml sudo cp -i dynlink.cmxa `ocamlopt -where` Then build coq this way: ./configure -natdynlink no && make world *) let is_native = true (* This file will only be given to the native compiler *) type linking_error = | Undefined_global of string | Unavailable_primitive of string | Uninitialized_global of string type error = | Not_a_bytecode_file of string | Inconsistent_import of string | Unavailable_unit of string | Unsafe_file | Linking_error of string * linking_error | Corrupted_interface of string | File_not_found of string | Cannot_open_dll of string | Inconsistent_implementation of string exception Error of error let error_message = function | Not_a_bytecode_file s -> "Native dynamic link not supported (module "^s^")" | _ -> "Native dynamic link not supported" let loadfile : string -> unit = fun s -> raise (Error (Not_a_bytecode_file s)) let loadfile_private = loadfile let adapt_filename s = s let init () = () let allow_only : string list -> unit = fun _ -> () let prohibit : string list -> unit = fun _ -> () let default_available_units : unit -> unit = fun _ -> () let allow_unsafe_modules : bool -> unit = fun _ -> () let add_interfaces : string list -> string list -> unit = fun _ _ -> () let add_available_units : (string * Digest.t) list -> unit = fun _ -> () let clear_available_units : unit -> unit = fun _ -> () let digest_interface : string -> string list -> Digest.t = fun _ _ -> failwith "digest_interface" coq-8.15.0/dev/header.c000066400000000000000000000012431417001151100145300ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* &2 echo "usage: $CALLNAME " >&2 echo "The order of commits is as given to 'git diff'" } if [ "$#" != 2 ]; then usage exit 1 fi BASE_COMMIT="$1" HEAD_COMMIT="$2" tmp=$(mktemp -d) git worktree add "$tmp" "$HEAD_COMMIT" pushd "$tmp" bad_ws=() bad_compile=() while IFS= read -r commit; do echo Checking "$commit" git checkout "$commit" # git diff --check # uses .gitattributes to know what to check if ! git diff --check "${commit}^" "$commit"; then bad_ws+=("$commit") fi if ! make -f Makefile.dune check then bad_compile+=("$commit") fi done < <(git rev-list "$HEAD_COMMIT" --not "$BASE_COMMIT" --) popd git worktree remove "$tmp" # report errors CODE=0 if [ "${#bad_ws[@]}" != 0 ] then >&2 echo "Whitespace errors!" >&2 echo "In commits ${bad_ws[*]}" >&2 echo "If you use emacs, you can prevent this kind of error from reoccurring by installing ws-butler and enabling ws-butler-convert-leading-tabs-or-spaces." >&2 echo CODE=1 fi if [ "${#bad_compile[@]}" != 0 ] then >&2 echo "Compilation errors!" >&2 echo "In commits ${bad_compile[*]}" >&2 echo CODE=1 fi exit $CODE coq-8.15.0/dev/lint-repository.sh000077500000000000000000000026231417001151100166610ustar00rootroot00000000000000#!/usr/bin/env bash # A script to check prettyness over the repository. # lint-commits.sh seeks to prevent the worsening of already present # problems, such as tab indentation in ml files. lint-repository.sh # also seeks to prevent the (re-)introduction of solved problems, such # as newlines at the end of .v files. CODE=0 if [[ $(git log -n 1 --pretty='format:%s') == "[CI merge]"* ]]; then # The second parent of bot merges is from the PR, the first is # current master head=$(git rev-parse HEAD^2) else head=$(git rev-parse HEAD) fi # We assume that all non-bot merge commits are from the main branch # For Coq it is extremely rare for this assumption to be broken read -r base < <(git log -n 1 --merges --pretty='format:%H' "$head") dev/lint-commits.sh "$base" "$head" || CODE=1 # Check that the files with 'whitespace' gitattribute end in a newline. # xargs exit status is 123 if any file failed the test echo Checking end of file newlines find . "(" -path ./.git -prune ")" -o -type f -print0 | xargs -0 dev/tools/check-eof-newline.sh || CODE=1 echo Checking overlays dev/tools/check-overlays.sh || CODE=1 echo Checking CACHEKEY dev/tools/check-cachekey.sh || CODE=1 # Check that doc/tools/docgram/fullGrammar is up-to-date echo Checking grammar files { ./configure -profile devel >/dev/null && \ make -j "$NJOBS" -f Makefile.make SHOW='@true ""' doc_gram_verify; } || CODE=1 exit $CODE coq-8.15.0/dev/macosify_accel.sh000077500000000000000000000001641417001151100164350ustar00rootroot00000000000000#!/usr/bin/sed -f s/^;\{0,1\} *\(.*\)\(.*\)$/\1\2/ s/^;\{0,1\} *\(.*\)\(.*\)$/\1\2/ coq-8.15.0/dev/nixpkgs.nix000066400000000000000000000003011417001151100153310ustar00rootroot00000000000000import (fetchTarball { url = "https://github.com/NixOS/nixpkgs/archive/a6a0964eacef611f364ca22256d6882d8670721c.tar.gz"; sha256 = "0h2cv4zdlmf1di55i64vcavc0rfh7cvbnax4wll2vbjj7bahawyc"; }) coq-8.15.0/dev/ocamldebug-coq.run000066400000000000000000000031641417001151100165500ustar00rootroot00000000000000#!/bin/sh # Wrapper around ocamldebug for Coq # This file is to be launched via the generated script ocamldebug-coq, # which will set the env variables $OCAMLDEBUG, $CAMLP5LIB, $COQTOP # Anyway, just in case someone tries to use this script directly, # here are some reasonable default values [ -z "$OCAMLDEBUG" ] && OCAMLDEBUG=ocamldebug [ -z "$COQTOP" -a -d "$PWD/kernel" ] && COQTOP=$PWD [ -z "$COQTOP" -a -d "$PWD/../kernel" ] && COQTOP=`dirname $PWD` export CAML_LD_LIBRARY_PATH=$COQTOP/kernel/byterun:$CAML_LD_LIBRARY_PATH exec $OCAMLDEBUG \ -I +threads \ -I $COQTOP \ -I $COQTOP/config -I $COQTOP/printing -I $COQTOP/grammar -I $COQTOP/clib \ -I $COQTOP/gramlib/.pack \ -I $COQTOP/lib -I $COQTOP/kernel -I $COQTOP/kernel/byterun \ -I $COQTOP/library -I $COQTOP/engine -I $COQTOP/sysinit \ -I $COQTOP/pretyping -I $COQTOP/parsing -I $COQTOP/vernac \ -I $COQTOP/interp -I $COQTOP/proofs -I $COQTOP/tactics -I $COQTOP/stm \ -I $COQTOP/toplevel -I $COQTOP/dev -I $COQTOP/config -I $COQTOP/ltac \ -I $COQTOP/plugins/cc -I $COQTOP/plugins/dp \ -I $COQTOP/plugins/extraction -I $COQTOP/plugins/field \ -I $COQTOP/plugins/firstorder \ -I $COQTOP/plugins/funind -I $COQTOP/plugins/groebner \ -I $COQTOP/plugins/interface -I $COQTOP/plugins/micromega \ -I $COQTOP/plugins/omega -I $COQTOP/plugins/quote \ -I $COQTOP/plugins/ring \ -I $COQTOP/plugins/rtauto \ -I $COQTOP/plugins/subtac -I $COQTOP/plugins/syntax \ -I $COQTOP/plugins/xml -I $COQTOP/plugins/ltac \ -I $COQTOP/ide \ $(ocamlfind query -recursive -i-format zarith) \ "$@" coq-8.15.0/dev/shim/000077500000000000000000000000001417001151100140745ustar00rootroot00000000000000coq-8.15.0/dev/shim/dune000066400000000000000000000052311417001151100147530ustar00rootroot00000000000000(rule (targets coqtop-prelude) (deps %{bin:coqtop} %{project_root}/theories/Init/Prelude.vo) (action (with-stdout-to coqtop-prelude (progn (echo "#!/usr/bin/env bash\n") (bash "echo '\"$(dirname \"$0\")\"/%{bin:coqtop} -coqlib \"$(dirname \"$0\")/%{project_root}\" \"$@\"'") (run chmod +x %{targets}))))) (rule (targets coqc-prelude) (deps %{bin:coqc} %{project_root}/theories/Init/Prelude.vo) (action (with-stdout-to coqc-prelude (progn (echo "#!/usr/bin/env bash\n") (bash "echo '\"$(dirname \"$0\")\"/%{bin:coqc} -coqlib \"$(dirname \"$0\")\"/%{project_root} \"$@\"'") (run chmod +x %{targets}))))) (rule (targets coqbyte-prelude) (deps %{project_root}/theories/Init/Prelude.vo %{bin:coqtop.byte} %{lib:coq-core.config:config.cma} %{lib:coq-core.clib:clib.cma} %{lib:coq-core.lib:lib.cma} %{lib:coq-core.kernel:kernel.cma} %{lib:coq-core.vm:coqrun.cma} %{lib:coq-core.vm:../../stublibs/dllcoqrun_stubs.so} %{lib:coq-core.library:library.cma} %{lib:coq-core.engine:engine.cma} %{lib:coq-core.pretyping:pretyping.cma} %{lib:coq-core.gramlib:gramlib.cma} %{lib:coq-core.interp:interp.cma} %{lib:coq-core.proofs:proofs.cma} %{lib:coq-core.parsing:parsing.cma} %{lib:coq-core.printing:printing.cma} %{lib:coq-core.tactics:tactics.cma} %{lib:coq-core.vernac:vernac.cma} %{lib:coq-core.stm:stm.cma} %{lib:coq-core.sysinit:sysinit.cma} %{lib:coq-core.toplevel:toplevel.cma} %{lib:coq-core.plugins.number_string_notation:number_string_notation_plugin.cma} %{lib:coq-core.plugins.tauto:tauto_plugin.cma} %{lib:coq-core.plugins.cc:cc_plugin.cma} %{lib:coq-core.plugins.firstorder:firstorder_plugin.cma} %{lib:coq-core.plugins.ltac:ltac_plugin.cma}) (action (with-stdout-to %{targets} (progn (echo "#!/usr/bin/env bash\n") (bash "echo '\"$(dirname \"$0\")\"/%{bin:coqtop.byte} -coqlib \"$(dirname \"$0\")\"/%{project_root} \"$@\"'") (run chmod +x %{targets}))))) (rule (targets coqide-prelude) (deps ; without this if the gtk libs are not available dune can try to use ; coqide from PATH instead of giving a nice error ; there is no problem with the other shims since they don't depend on optional build products %{project_root}/ide/coqide/coqide_main.exe %{bin:coqqueryworker.opt} %{bin:coqtacticworker.opt} %{bin:coqproofworker.opt} %{project_root}/theories/Init/Prelude.vo %{project_root}/coqide-server.install %{project_root}/coqide.install) (action (with-stdout-to coqide-prelude (progn (echo "#!/usr/bin/env bash\n") (bash "echo '\"$(dirname \"$0\")\"/%{bin:coqide} -coqlib \"$(dirname \"$0\")\"/%{project_root} \"$@\"'") (run chmod +x %{targets}))))) coq-8.15.0/dev/tools/000077500000000000000000000000001417001151100142745ustar00rootroot00000000000000coq-8.15.0/dev/tools/backport-pr.sh000077500000000000000000000066011417001151100170620ustar00rootroot00000000000000#!/usr/bin/env bash set -e if [[ $# == 0 ]]; then echo "Usage: $0 [--no-conflict] [--no-signature-check] [--stop-before-merging] prnum" exit 1 fi while [[ $# -gt 0 ]]; do case "$1" in --no-conflict) NO_CONFLICTS="true" shift ;; --no-signature-check) NO_SIGNATURE_CHECK="true" shift ;; --stop-before-merging) STOP_BEFORE_MERGING="true" shift ;; *) if [[ "$PRNUM" != "" ]]; then echo "PRNUM was already set to $PRNUM and is now being overridden with $1." fi PRNUM="$1" shift esac done REMOTE=$(git config --get "branch.master.remote" || true) if [ -z "$REMOTE" ]; then echo "Branch master has no remote. Using the local state of the master branch instead." MASTER=master else MASTER="$REMOTE/master" fi if ! git log $MASTER --grep "Merge PR #$PRNUM" | grep "." > /dev/null; then echo "PR #${PRNUM} does not exist." exit 1 fi SIGNATURE_STATUS=$(git log $MASTER --grep "Merge PR #$PRNUM" --format="%G?") git log $MASTER --grep "Merge PR #$PRNUM" --format="%GG" if [[ "$NO_SIGNATURE_CHECK" != "true" && "$SIGNATURE_STATUS" != "G" ]]; then echo read -p "Merge commit does not have a good (valid) signature. Bypass? [y/N] " -n 1 -r echo if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1 fi fi BRANCH=backport-pr-${PRNUM} RANGE=$(git log $MASTER --grep "Merge PR #$PRNUM" --format="%P" | sed 's/ /../') MESSAGE=$(git log $MASTER --grep "Merge PR #$PRNUM" --format="%s" | sed 's/Merge/Backport/') if [[ "$(git rev-parse --abbrev-ref HEAD)" == "$BRANCH" ]]; then if ! git cherry-pick --continue; then echo "Please fix the conflicts, then relaunch the script." exit 1 fi git checkout - elif git checkout -b "$BRANCH"; then if ! git cherry-pick -x "${RANGE}"; then if [[ "$NO_CONFLICTS" == "true" ]]; then git status echo "Conflicts! Aborting..." git cherry-pick --abort git checkout - git branch -d "$BRANCH" exit 1 fi echo "Please fix the conflicts, then relaunch the script." exit 1 fi git checkout - else echo read -p "Skip directly to merging phase? [y/N] " -n 1 -r echo if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1 fi fi if ! git diff --exit-code HEAD "${BRANCH}" -- "*.mli"; then echo read -p "Some mli files are modified. Bypass? [y/N] " -n 1 -r echo if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1 fi fi if [[ "$STOP_BEFORE_MERGING" == "true" ]]; then exit 0 fi git merge -S --no-ff "${BRANCH}" -m "${MESSAGE}" git branch -d "${BRANCH}" # To-Do: # - Support for backporting a PR before it is merged # - Automatically backport all PRs in the "Waiting to be backported" column using a command like: # $ curl -s -H "Authorization: token ${GITHUB_TOKEN}" -H "Accept: application/vnd.github.inertia-preview+json" https://api.github.com/projects/columns/1358120/cards | jq -r '.[].content_url' | grep issue | sed 's/^.*issues\/\([0-9]*\)$/\1/' | tac # (The ID of the column must first be obtained through https://api.github.com/repos/coq/coq/projects then https://api.github.com/projects/819866/columns.) # - Then move each of the backported PR to the subsequent columns automatically as well... coq-8.15.0/dev/tools/change-header000077500000000000000000000024531417001151100167010ustar00rootroot00000000000000#!/bin/sh #This script changes the header of .ml* files if [ ! $# = 2 ]; then echo Usage: change-header old-header-file new-header-file exit 1 fi oldheader=$1 newheader=$2 if [ ! -f $oldheader ]; then echo Cannot read file $oldheader; exit 1; fi if [ ! -f $newheader ]; then echo Cannot read file $newheader; exit 1; fi n=$(wc -l $oldheader | sed -e "s/ *\([0-9]*\).*/\1/g") nsucc=$(expr $n + 1) modified=0 kept=0 for i in $(git grep --name-only --fixed-strings "$(head -1 $oldheader)"); do headline=$(head -n 1 $i) if $(echo $headline | grep "(\* -\*- .* \*)" > /dev/null) || $(echo $headline | grep "^#\!" > /dev/null); then # Has header head -n +$nsucc $i | tail -n $n > $i.head.tmp$$ hasheadline=1 nnext=$(expr $nsucc + 1) else head -n +$n $i > $i.head.tmp$$ hasheadline=0 nnext=$nsucc fi if diff -a -q $oldheader $i.head.tmp$$ > /dev/null; then echo "$i: header changed" if [ $hasheadline = 1 ]; then echo $headline > $i.tmp$$ else touch $i.tmp$$ fi cat $newheader >> $i.tmp$$ tail -n +$nnext $i >> $i.tmp$$ mv $i.tmp$$ $i modified=$(expr $modified + 1) else echo "$i: header unchanged" kept=$(expr $kept + 1) fi rm $i.head.tmp$$ done echo $modified files updated echo $kept files unchanged coq-8.15.0/dev/tools/check-cachekey.sh000077500000000000000000000004141417001151100174610ustar00rootroot00000000000000#!/bin/sh hash=$(md5sum dev/ci/docker/bionic_coq/Dockerfile | head -c 10) key=$(grep CACHEKEY: .gitlab-ci.yml) keyhash=${key%\"} keyhash=${keyhash##*-} if ! [ "$hash" = "$keyhash" ]; then >&2 echo "Bad CACHEKEY: expected '$hash' but got '$keyhash'" exit 1 fi coq-8.15.0/dev/tools/check-eof-newline.sh000077500000000000000000000020731417001151100201200ustar00rootroot00000000000000#!/usr/bin/env bash # Usage: check-eof-newline.sh [--fix] FILES... # Detect missing end of file newlines for FILES. # Files are skipped if untracked by git and depending on gitattributes. # With --fix, automatically append a newline. # Exit status: # Without --fix: 1 if any file had a missing newline, 0 otherwise. # With --fix: 1 if any non writable file had a missing newline, 0 otherwise. FIX= if [ "$1" = --fix ]; then FIX=1 shift fi CODE=0 for f in "$@"; do if git ls-files --error-unmatch "$f" >/dev/null 2>&1 && \ git check-attr whitespace -- "$f" | grep -q -v -e 'unset$' -e 'unspecified$' && \ [ -n "$(tail -c 1 "$f")" ] then if [ -n "$FIX" ]; then if [ -w "$f" ]; then echo >> "$f" echo "Newline appended to file $f!" else echo "File $f is missing a newline and not writable!" CODE=1 fi else echo "No newline at end of file $f!" CODE=1 fi fi done exit "$CODE" coq-8.15.0/dev/tools/check-overlays.sh000077500000000000000000000004571417001151100175600ustar00rootroot00000000000000#!/usr/bin/env bash for f in $(git ls-files "dev/ci/user-overlays/") do if ! { [[ "$f" = dev/ci/user-overlays/README.md ]] || [[ "$f" == *.sh ]]; } then >&2 echo "Bad overlay '$f'." >&2 echo "User overlays need to have extension .sh to be picked up!" exit 1 fi done coq-8.15.0/dev/tools/check-owners-pr.sh000077500000000000000000000014511417001151100176430ustar00rootroot00000000000000#!/usr/bin/env sh usage() { { echo "usage: $0 PR [ARGS]..." echo "A wrapper around check-owners.sh to check owners for a PR." echo "Assumes upstream is the canonical Coq repository." echo "Assumes the PR is against master." echo echo " PR: PR number" echo " ARGS: passed through to check-owners.sh" } >&2 } case "$1" in "--help"|"-h") usage if [ $# = 1 ]; then exit 0; else exit 1; fi;; "") usage exit 1;; esac PR="$1" shift # this puts both refs in the FETCH_HEAD file but git rev-parse will use the first git fetch upstream "+refs/pull/$PR/head" master head=$(git rev-parse FETCH_HEAD) base=$(git merge-base upstream/master "$head") git diff --name-only -z "$base" "$head" | xargs -0 dev/tools/check-owners.sh "$@" coq-8.15.0/dev/tools/check-owners.sh000077500000000000000000000074631417001151100172350ustar00rootroot00000000000000#!/usr/bin/env bash # Determine CODEOWNERS of the files given in argument # For a given commit range: # git diff --name-only -z COMMIT1 COMMIT2 | xargs -0 dev/tools/check-owners.sh [opts] # NB: gitignore files will be messed up if you interrupt the script. # You should be able to just move the .gitignore.bak files back manually. usage() { { echo "usage: $0 [--show-patterns] [--owner OWNER] [FILE]..." echo " --show-patterns: instead of printing file names print the matching patterns (more compact)" echo " --owner: show only files/patterns owned by OWNER (use Nobody to see only non-owned files)" } >&2 } case "$1" in "--help"|"-h") usage if [ $# = 1 ]; then exit 0; else exit 1; fi esac if ! [ -e .github/CODEOWNERS ]; then >&2 echo "No CODEOWNERS set up or calling from wrong directory." exit 1 fi files=() show_patterns=false target_owner="" while [[ "$#" -gt 0 ]]; do case "$1" in "--show-patterns") show_patterns=true shift;; "--owner") if [[ "$#" = 1 ]]; then >&2 echo "Missing argument to --owner" usage exit 1 elif [[ "$target_owner" != "" ]]; then >&2 echo "Only one --owner allowed" usage exit 1 fi target_owner="$2" shift 2;; *) files+=("$@") break;; esac done # CODEOWNERS uses .gitignore patterns so we want to use git to parse it # The only available tool for that is git check-ignore # However it provides no way to use alternate .gitignore files # so we rename them temporarily find . -name .gitignore -print0 | while IFS= read -r -d '' f; do if [ -e "$f.bak" ]; then >&2 echo "$f.bak exists!" exit 1 else mv "$f" "$f.bak" fi done # CODEOWNERS is not quite .gitignore patterns: # after the pattern is the owner (space separated) # git would interpret that as a big pattern containing spaces # so we create a valid .gitignore by removing all but the first field while read -r pat _; do printf '%s\n' "$pat" >> .gitignore done < .github/CODEOWNERS # associative array [file => owner] declare -A owners for f in "${files[@]}"; do data=$(git check-ignore --verbose --no-index "./$f") code=$? if [[ "$code" = 1 ]] || ! [[ "$data" =~ .gitignore:.* ]] ; then # no match, or match from non tracked gitignore (eg global gitignore) if [ "$target_owner" != "" ] && [ "$target_owner" != Nobody ] ; then owner="" else owner="Nobody" pat="$f" # no patterns for unowned files fi else # data looks like [.gitignore:$line:$pattern $file] # extract the line to look it up in CODEOWNERS data=${data#'.gitignore:'} line=${data%%:*} # NB: supports multiple owners # Does not support secondary owners declared in comment read -r pat fowners < <(sed "${line}q;d" .github/CODEOWNERS) owner="" if [ "$target_owner" != "" ]; then for o in $fowners; do # do not quote: multiple owners possible if [ "$o" = "$target_owner" ]; then owner="$o" fi done else owner="$fowners" fi fi if [ "$owner" != "" ]; then if $show_patterns; then owners[$pat]="$owner" else owners[$f]="$owner" fi fi done for f in "${!owners[@]}"; do printf '%s: %s\n' "$f" "${owners[$f]}" done | sort -k 2 -k 1 # group by owner # restore gitignore files rm .gitignore find . -name .gitignore.bak -print0 | while IFS= read -r -d '' f; do base=${f%.bak} if [ -e "$base" ]; then >&2 echo "$base exists!" else mv "$f" "$base" fi done coq-8.15.0/dev/tools/coqdev.el000066400000000000000000000155501417001151100161050ustar00rootroot00000000000000;;; coqdev.el --- Emacs helpers for Coq development -*- lexical-binding:t -*- ;; Copyright (C) 2018 The Coq Development Team ;; Maintainer: coqdev@inria.fr ;;; Commentary: ;; Helpers to set compilation commands, proof general variables, etc ;; for Coq development ;; You can disable individual features without editing this file by ;; using `remove-hook', for instance ;; (remove-hook 'hack-local-variables-hook #'coqdev-setup-compile-command) ;;; Installation: ;; To use this, with coqdev.el located at /path/to/coqdev.el, add the ;; following to your init: ;; (add-to-list 'load-path "/path/to/coqdev/") ;; (require 'coqdev) ;; If you load this file from a git repository, checking out an old ;; commit will make it disappear and cause errors for your Emacs ;; startup. To ignore those errors use (require 'coqdev nil t). If you ;; check out a malicious commit Emacs startup would allow it to run ;; arbitrary code, to avoid this you can copy coqdev.el to any ;; location and adjust the load path accordingly (of course if you run ;; ./configure to compile Coq it is already too late). ;;; Code: (defun coqdev-default-directory () "Return the Coq repository containing `default-directory'." (let ((dir (locate-dominating-file default-directory "coq-core.opam"))) (when dir (expand-file-name dir)))) (defun coqdev-setup-compile-command () "Setup `compile-command' for Coq development." (let ((dir (coqdev-default-directory))) ;; we add a space at the end to make it easy to add arguments (eg -j or target) (when dir (setq-local compile-command (concat "make -C " (shell-quote-argument dir) " "))))) (add-hook 'hack-local-variables-hook #'coqdev-setup-compile-command) (defvar camldebug-command-name) ; from camldebug.el (caml package) (defvar ocamldebug-command-name) ; from ocamldebug.el (tuareg package) (defun coqdev-setup-camldebug () "Setup ocamldebug for Coq development. Specifically `camldebug-command-name' and `ocamldebug-command-name'." (let ((dir (coqdev-default-directory))) (when dir (setq-local camldebug-command-name (concat dir "dev/ocamldebug-coq")) (setq-local ocamldebug-command-name (concat dir "dev/ocamldebug-coq"))))) (add-hook 'hack-local-variables-hook #'coqdev-setup-camldebug) (defun coqdev-setup-tags () "Setup `tags-file-name' for Coq development." (let ((dir (coqdev-default-directory))) (when dir (setq-local tags-file-name (concat dir "TAGS"))))) (add-hook 'hack-local-variables-hook #'coqdev-setup-tags) (defvar coq-prog-args) (defvar coq-prog-name) ;; Lets us detect whether there are file local variables ;; even though PG sets it with `setq' when there's a _Coqproject. ;; Also makes sense generally, so might make it into PG someday. (make-variable-buffer-local 'coq-prog-args) (setq-default coq-prog-args nil) (defun coqdev-setup-proofgeneral () "Setup Proofgeneral variables for Coq development. Note that this function is executed before _Coqproject is read if it exists." (let ((dir (coqdev-default-directory))) (when dir (setq-local coq-prog-name (concat dir "_build/default/dev/shim/coqtop-prelude"))))) (add-hook 'hack-local-variables-hook #'coqdev-setup-proofgeneral) (defvar coqdev-ocamldebug-command "dune exec dev/dune-dbg" "Command run by `coqdev-ocamldebug'") (defun coqdev-ocamldebug () "Runs a command in an ocamldebug buffer." (interactive) (let* ((dir (read-directory-name "Run from directory: " (coqdev-default-directory))) (name "ocamldebug-coq") (buffer-name (concat "*" name "*"))) (pop-to-buffer buffer-name) (unless (comint-check-proc buffer-name) (setq default-directory dir) (setq coqdev-ocamldebug-command (read-from-minibuffer "Command to run: " coqdev-ocamldebug-command)) (let* ((cmdlist (tuareg--split-args coqdev-ocamldebug-command)) (cmdlist (mapcar #'substitute-in-file-name cmdlist))) (apply #'make-comint name (car cmdlist) nil (cdr cmdlist)) (set-process-filter (get-buffer-process (current-buffer)) #'ocamldebug-filter) (set-process-sentinel (get-buffer-process (current-buffer)) #'ocamldebug-sentinel) (ocamldebug-mode))) (ocamldebug-set-buffer))) ;; This Elisp snippet adds a regexp parser for the format of Anomaly ;; backtraces (coqc -bt ...), to the error parser of the Compilation ;; mode (C-c C-c: "Compile command: ..."). File locations in traces ;; are recognized and can be jumped from easily in the *compilation* ;; buffer. (defvar compilation-error-regexp-alist-alist) (defvar compilation-error-regexp-alist) (with-eval-after-load 'compile (add-to-list 'compilation-error-regexp-alist-alist '(coq-backtrace "^ *\\(?:raise\\|frame\\) @ file \\(\"?\\)\\([^,\" \n\t<>]+\\)\\1,\ lines? \\([0-9]+\\)-?\\([0-9]+\\)?\\(?:$\\|,\ \\(?: characters? \\([0-9]+\\)-?\\([0-9]+\\)?:?\\)?\\)" 2 (3 . 4) (5 . 6))) (add-to-list 'compilation-error-regexp-alist 'coq-backtrace)) (defvar bug-reference-bug-regexp) (defvar bug-reference-url-format) (defun coqdev-setup-bug-reference-mode () "Setup `bug-reference-bug-regexp' and `bug-reference-url-format' for Coq. This does not enable `bug-reference-mode'." (let ((dir (coqdev-default-directory))) (when dir (setq-local bug-reference-bug-regexp "#\\(?2:[0-9]+\\)") (setq-local bug-reference-url-format "https://github.com/coq/coq/issues/%s")))) (add-hook 'hack-local-variables-hook #'coqdev-setup-bug-reference-mode) (defun coqdev-sphinx-quote-coq-refman-region (left right &optional offset beg end) "Add LEFT and RIGHT around the BEG..END. Leave the point after RIGHT. BEG and END default to the bounds of the current region. Leave point OFFSET characters after the left quote (if OFFSET is nil, leave the point after the right quote)." (unless beg (if (region-active-p) (setq beg (region-beginning) end (region-end)) (setq beg (point) end nil))) (save-excursion (goto-char (or end beg)) (insert right)) (save-excursion (goto-char beg) (insert left)) (if (and end (not offset)) ;; Second test handles the ::`` case (goto-char (+ end (length left) (length right))) (goto-char (+ beg (or offset (length left)))))) (defun coqdev-sphinx-rst-coq-action () "Insert a Sphinx role template or quote the current region." (interactive) (pcase (read-char "Command [gntm:`]?") (?g (coqdev-sphinx-quote-coq-refman-region ":g:`" "`")) (?n (coqdev-sphinx-quote-coq-refman-region ":n:`" "`")) (?t (coqdev-sphinx-quote-coq-refman-region ":token:`" "`")) (?m (coqdev-sphinx-quote-coq-refman-region ":math:`" "`")) (?: (coqdev-sphinx-quote-coq-refman-region "::`" "`" 1)) (?` (coqdev-sphinx-quote-coq-refman-region "``" "``")))) (provide 'coqdev) ;;; coqdev ends here coq-8.15.0/dev/tools/create_overlays.sh000077500000000000000000000035411417001151100200250ustar00rootroot00000000000000#!/usr/bin/env bash # TODO: # # - Check if the branch already exists in the remote => checkout # - Better error handling # - Just checkout, don't build # - Rebase functionality # set -x set -e set -o pipefail # setup_contrib_git("_build_ci/fiat", "https://github.com/ejgallego/fiat-core.git") setup_contrib_git() { local _DIR=$1 local _GITURL=$2 ( cd $_DIR git checkout -b $OVERLAY_BRANCH || true # allow the branch to exist already git remote add $DEVELOPER_NAME $_GITURL || true # allow the remote to exist already ) } if [ $# -lt 3 ]; then echo "usage: $0 github_username pr_number contrib1 ... contribN" exit 1 fi set +x . dev/ci/ci-basic-overlay.sh set -x DEVELOPER_NAME=$1 shift PR_NUMBER=$1 shift OVERLAY_BRANCH=$(git rev-parse --abbrev-ref HEAD) OVERLAY_FILE=$(mktemp overlay-XXXX) # Create the overlay file > "$OVERLAY_FILE" # We first try to build the contribs while test $# -gt 0 do _CONTRIB_NAME=$1 _CONTRIB_GITURL=${_CONTRIB_NAME}_CI_GITURL _CONTRIB_GITURL=${!_CONTRIB_GITURL} echo "Processing Contrib $_CONTRIB_NAME" # check _CONTRIB_GIT exists and it is of the from github... _CONTRIB_DIR=_build_ci/$_CONTRIB_NAME # extract the relevant part of the repository _CONTRIB_GITSUFFIX=${_CONTRIB_GITURL#https://github.com/*/} _CONTRIB_GITURL="https://github.com/$DEVELOPER_NAME/$_CONTRIB_GITSUFFIX" _CONTRIB_GITPUSHURL="git@github.com:$DEVELOPER_NAME/${_CONTRIB_GITSUFFIX}.git" DOWNLOAD_ONLY=1 make ci-$_CONTRIB_NAME || true setup_contrib_git $_CONTRIB_DIR $_CONTRIB_GITPUSHURL echo "overlay ${_CONTRIB_NAME} $_CONTRIB_GITURL $OVERLAY_BRANCH $PR_NUMBER" >> $OVERLAY_FILE echo "" >> $OVERLAY_FILE shift done # Copy to overlays folder. PR_NUMBER=$(printf '%05d' "$PR_NUMBER") mv $OVERLAY_FILE dev/ci/user-overlays/$PR_NUMBER-$DEVELOPER_NAME-${OVERLAY_BRANCH///}.sh coq-8.15.0/dev/tools/generate-release-changelog.sh000077500000000000000000000053721417001151100217770ustar00rootroot00000000000000#!/usr/bin/env bash set -e set -o pipefail if [ $# != 1 ]; then echo "Usage: $0 BRANCH" exit fi branch=$1 # Set SLOW_CONF to have the confirmation output wait for a newline # Emacs doesn't send characters until the RET so we can't quick_conf if [ -z ${SLOW_CONF+x} ] || [ -n "$INSIDE_EMACS" ]; then quick_conf="-n 1" else quick_conf="" fi ask_confirmation() { read -p "Continue anyway? [y/N] " $quick_conf -r echo if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1 fi } if ! git diff --quiet; then echo "Warning: current tree is dirty." ask_confirmation fi remote=$(git config --get "branch.${branch}.remote" || true) if [ -z "$remote" ]; then echo "Warning: branch $branch has no associated remote." ask_confirmation else if [ "$remote" != $(git config --get "branch.master.remote" || true) ]; then echo "Warning: branch master and branch $branch do not have the same remote." ask_confirmation fi official_remote_git_url="git@github.com:coq/coq" official_remote_https_url="github.com/coq/coq" remote_url=$(git remote get-url "$remote" --all) if [ "$remote_url" != "${official_remote_git_url}" ] && \ [ "$remote_url" != "${official_remote_git_url}.git" ] && \ [ "$remote_url" != "https://${official_remote_https_url}" ] && \ [ "$remote_url" != "https://${official_remote_https_url}.git" ] && \ [[ "$remote_url" != "https://"*"@${official_remote_https_url}" ]] && \ [[ "$remote_url" != "https://"*"@${official_remote_https_url}.git" ]] ; then echo "Warning: remote $remote does not point to the official Coq repo," echo "that is $official_remote_git_url" echo "It points to $remote_url instead." ask_confirmation fi git fetch "$remote" if [ $(git rev-parse master) != $(git rev-parse "${remote}/master") ]; then echo "Warning: branch master is not up-to-date with ${remote}/master." ask_confirmation fi if [ $(git rev-parse "$branch") != $(git rev-parse "${remote}/${branch}") ]; then echo "Warning: branch ${branch} is not up-to-date with ${remote}/${branch}." ask_confirmation fi fi git checkout $branch --detach changelog_entries_with_title=$(ls doc/changelog/*/*.rst) changelog_entries_no_title=$(echo "$changelog_entries_with_title" | grep -v "00000-title.rst") git checkout master for f in $changelog_entries_with_title; do if [ -f "$f" ]; then cat "$f" >> released.rst else echo "Warning: $f is missing in master branch." fi done for f in $changelog_entries_no_title; do if [ -f "$f" ]; then git rm "$f" fi done echo "Changelog written in released.rst. Move its content to a new section in doc/sphinx/changes.rst." coq-8.15.0/dev/tools/github-check-prs.py000077500000000000000000000027441417001151100200170ustar00rootroot00000000000000#!/usr/bin/env python3 # Requires PyGithub https://pypi.python.org/pypi/PyGithub, for instance # debian package: python3-github # nix: nix-shell -p python3 python3Packages.PyGithub --run ./github-check-rebase.py from github import Github import argparse REPO = "coq/coq" REBASE_LABEL="needs: rebase" parser = argparse.ArgumentParser() parser.add_argument("--token-file", type=argparse.FileType('r')) args = parser.parse_args() if args.token_file is None: token = input("Github access token: ").strip() else: token = args.token_file.read().rstrip("\n") args.token_file.close() if token == "": print ("Warning: using the GitHub API without a token") print ("We may run into rate limit issues") g = Github() else: g = Github(token) repo = g.get_repo(REPO) for pull in repo.get_pulls(): # if conflicts then dirty # otherwise blocked (because I have no rights) dirty = pull.mergeable_state == "dirty" labelled = False for label in repo.get_issue(pull.number).get_labels(): if label.name == REBASE_LABEL: labelled = True if labelled and not dirty: print ("PR #" + str(pull.number) + " is not dirty but is labelled") print ("("+ pull.html_url +")") elif dirty and not labelled: print ("PR #" + str(pull.number) + " is dirty and not labelled") print ("("+ pull.html_url +")") else: # give some feedback so the user can see we didn't crash print ("PR #" + str(pull.number) + " OK") coq-8.15.0/dev/tools/list-contributors.sh000077500000000000000000000007231417001151100203430ustar00rootroot00000000000000#!/usr/bin/env bash # For compat with OSX which has a non-gnu sed which doesn't support -z SED=`(which gsed || which sed) 2> /dev/null` if [ $# != 1 ]; then echo "usage: $0 rev0..rev1" exit 1 fi git shortlog -s -n --group=author --group=trailer:Co-authored-by $1 | cut -f2 | sort -k 2 | grep -v -e "coqbot" -e "^$" > contributors.tmp cat contributors.tmp | wc -l | xargs echo "Contributors:" cat contributors.tmp | $SED -z "s/\n/, /g" echo rm contributors.tmp coq-8.15.0/dev/tools/make-changelog.sh000077500000000000000000000031121417001151100174720ustar00rootroot00000000000000#!/bin/sh printf "PR number? " read -r PR printf "Category? (type a prefix)\n" (cd doc/changelog && ls -d */) read -r where where="doc/changelog/$where" if ! [ -d "$where" ]; then where=$(echo "$where"*); fi where="$where/$PR-$(git rev-parse --abbrev-ref HEAD | tr / -).rst" printf "Type? (type first letter)\n" printf "[A]dded \t[C]hanged \t[D]eprecated \t[F]ixed \t[R]emoved\n" read -r type_first_letter case "$type_first_letter" in [Aa]) type_full="Added";; [Cc]) type_full="Changed";; [Dd]) type_full="Deprecated";; [Ff]) type_full="Fixed";; [Rr]) type_full="Removed";; *) printf "Invalid input!\n" exit 1;; esac printf "Fixes? (space separated list of bug numbers)\n" read -r fixes_list fixes_string="$(echo $fixes_list | sed 's/ /~ and /g; s,\([0-9][0-9]*\),`#\1 `_,g' | tr '~' '\n')" if [ ! -z "$fixes_string" ]; then fixes_string="$(printf '\n fixes %s,' "$fixes_string")"; fi # shellcheck disable=SC2016 # the ` are regular strings, this is intended # use %s for the leading - to avoid looking like an option (not sure # if necessary but doesn't hurt) printf '%s **%s:**\n Describe your change here but do not end with a period\n (`#%s `_,%s\n by %s).\n' - "$type_full" "$PR" "$PR" "$fixes_string" "$(git config user.name)" > "$where" printf 'Name of created changelog file:\n' printf '%s\n' "$where" giteditor=$(git config core.editor) if [ "$giteditor" ]; then $giteditor "$where" elif [ "$EDITOR" ]; then $EDITOR "$where" else printf "Describe the changes in the above file\n" fi coq-8.15.0/dev/tools/make_git_revision.sh000077500000000000000000000005161417001151100203330ustar00rootroot00000000000000#!/usr/bin/env bash if [ -x `which git` ] && [ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1 then export LANG=C GIT_BRANCH=$(git branch -a | sed -ne '/^\* /s/^\* \(.*\)/\1/p') GIT_HOST=$(hostname) GIT_PATH=$(pwd) echo "${GIT_HOST}:${GIT_PATH},${GIT_BRANCH}" echo $(git log -1 --pretty='format:%H') fi coq-8.15.0/dev/tools/merge-pr.sh000077500000000000000000000170521417001151100163560ustar00rootroot00000000000000#!/usr/bin/env bash set -e set -o pipefail API=https://api.github.com/repos/coq/coq OFFICIAL_REMOTE_GIT_URL="git@github.com:coq/coq" OFFICIAL_REMOTE_HTTPS_URL="github.com/coq/coq" # This script depends (at least) on git (>= 2.7) and jq. # It should be used like this: dev/tools/merge-pr.sh /PR number/ # Set SLOW_CONF to have the confirmation output wait for a newline # E.g. call $ SLOW_CONF= dev/tools/merge-pr.sh /PR number/ # emacs doesn't send characters until the RET so we can't quick_conf if [ -z ${SLOW_CONF+x} ] || [ -n "$INSIDE_EMACS" ]; then QUICK_CONF="-n 1" else QUICK_CONF="" fi RED="\033[31m" RESET="\033[0m" GREEN="\033[32m" YELLOW="\033[33m" info() { echo -e "${GREEN}info:${RESET} $1 ${RESET}" } error() { echo -e "${RED}error:${RESET} $1 ${RESET}" } warning() { echo -e "${YELLOW}warning:${RESET} $1 ${RESET}" } check_util() { if ! command -v "$1" > /dev/null 2>&1; then error "this script requires the $1 command line utility" exit 1 fi } ask_confirmation() { read -p "Continue anyway? [y/N] " $QUICK_CONF -r echo if [[ ! $REPLY =~ ^[Yy]$ ]] then exit 1 fi } curl_paginate_array() { # as per https://developer.github.com/v3/guides/traversing-with-pagination/#changing-the-number-of-items-received, GitHub will never give us more than 100 url="$1?per_page=100" # we keep fetching pages until the response is below the per-page limit (possibly 0 elements) page=1 while true; do response="$(curl -s "${url}&page=${page}")" echo "${response}" if [ "$(jq 'length' <<< "$response")" -lt 100 ]; then # done break fi page=$(($page + 1)) done | jq '[.[]]' # we concatenate the arrays } check_util jq check_util curl check_util git check_util gpg check_util grep # command line parsing if [ $# != 1 ]; then error "usage: $0 PR-number" exit 1 fi if [[ "$1" =~ ^[1-9][0-9]*$ ]]; then PR=$1 else error "$1 is not a number" exit 1 fi # Fetching PR metadata # The main API call returns a dict/object, not an array, so we don't # bother paginating PRDATA=$(curl -s "$API/pulls/$PR") TITLE=$(echo "$PRDATA" | jq -r '.title') info "title for PR $PR is $TITLE" BASE_BRANCH=$(echo "$PRDATA" | jq -r '.base.label') info "PR $PR targets branch $BASE_BRANCH" CURRENT_LOCAL_BRANCH=$(git rev-parse --abbrev-ref HEAD) info "you are merging in $CURRENT_LOCAL_BRANCH" REMOTE=$(git config --get "branch.$CURRENT_LOCAL_BRANCH.remote" || true) if [ -z "$REMOTE" ]; then error "branch $CURRENT_LOCAL_BRANCH has not associated remote" error "don't know where to fetch the PR from" error "please run: git branch --set-upstream-to=THE_REMOTE/$CURRENT_LOCAL_BRANCH" exit 1 fi REMOTE_URL=$(git remote get-url "$REMOTE" --all) if [ "$REMOTE_URL" != "${OFFICIAL_REMOTE_GIT_URL}" ] && \ [ "$REMOTE_URL" != "${OFFICIAL_REMOTE_GIT_URL}.git" ] && \ [ "$REMOTE_URL" != "https://${OFFICIAL_REMOTE_HTTPS_URL}" ] && \ [ "$REMOTE_URL" != "https://${OFFICIAL_REMOTE_HTTPS_URL}.git" ] && \ [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}" ]] && \ [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}.git" ]] ; then error "remote $REMOTE does not point to the official Coq repo" error "that is $OFFICIAL_REMOTE_GIT_URL" error "it points to $REMOTE_URL instead" ask_confirmation fi info "remote for $CURRENT_LOCAL_BRANCH is $REMOTE" info "fetching from $REMOTE the PR" git remote update "$REMOTE" if ! git ls-remote "$REMOTE" | grep pull >/dev/null; then error "remote $REMOTE is not configured to fetch pull requests" error "run: git config remote.$REMOTE.fetch +refs/pull/*/head:refs/remotes/$REMOTE/pr/*" exit 1 fi git fetch "$REMOTE" "refs/pull/$PR/head" COMMIT=$(git rev-parse FETCH_HEAD) info "commit for PR $PR is $COMMIT" # Sanity check: merge to a different branch if [ "$BASE_BRANCH" != "coq:$CURRENT_LOCAL_BRANCH" ]; then error "PR requests merge in $BASE_BRANCH but you are merging in $CURRENT_LOCAL_BRANCH" ask_confirmation fi; # Sanity check: the local branch is up-to-date with upstream LOCAL_BRANCH_COMMIT=$(git rev-parse HEAD) UPSTREAM_COMMIT=$(git rev-parse @{u}) if [ "$LOCAL_BRANCH_COMMIT" != "$UPSTREAM_COMMIT" ]; then # Is it just that the upstream branch is behind? # It could just be that we merged other PRs and we didn't push yet if git merge-base --is-ancestor -- "$UPSTREAM_COMMIT" "$LOCAL_BRANCH_COMMIT"; then warning "Your branch is ahead of ${REMOTE}." warning "On master, GitHub's branch protection rule prevents merging several PRs at once." warning "You should run [git push ${REMOTE}] between each call to the merge script." ask_confirmation else error "Local branch is not up-to-date with ${REMOTE}." error "Pull before merging." # This check should never be bypassed. exit 1 fi fi # Sanity check: PR has an outdated version of CI BASE_COMMIT=$(echo "$PRDATA" | jq -r '.base.sha') CI_FILES=(".gitlab-ci.yml" ".github/workflows/ci.yml") if ! git diff --quiet "$BASE_COMMIT" "$LOCAL_BRANCH_COMMIT" -- "${CI_FILES[@]}" then warning "This PR didn't run with the latest version of CI." warning "It is probably a good idea to ask for a rebase." read -p "Do you want to see the diff? [Y/n] " $QUICK_CONF -r echo if [[ ! $REPLY =~ ^[Nn]$ ]] then git diff "$BASE_COMMIT" "$LOCAL_BRANCH_COMMIT" -- "${CI_FILES[@]}" fi ask_confirmation fi # Sanity check: CI failed STATUS=$(curl -s "$API/commits/$COMMIT/status") if [ "$(echo "$STATUS" | jq -r '.state')" != "success" ]; then error "CI unsuccessful on $(echo "$STATUS" | jq -r -c '.statuses|map(select(.state != "success"))|map(.context)')" ask_confirmation fi; # Sanity check: has labels named "needs:" NEEDS_LABELS=$(echo "$PRDATA" | jq -rc '.labels | map(select(.name | match("needs:"))) | map(.name)') if [ "$NEEDS_LABELS" != "[]" ]; then error "needs:something labels still present: $NEEDS_LABELS" ask_confirmation fi # Sanity check: has milestone MILESTONE=$(echo "$PRDATA" | jq -rc '.milestone.title') if [ "$MILESTONE" = "null" ]; then error "no milestone set, please set one" ask_confirmation fi # Sanity check: has kind KIND=$(echo "$PRDATA" | jq -rc '.labels | map(select(.name | match("kind:"))) | map(.name)') if [ "$KIND" = "[]" ]; then error "no kind:something label set, please set one" ask_confirmation fi # Sanity check: user.signingkey if [ -z "$(git config user.signingkey)" ]; then warning "git config user.signingkey is empty" warning "gpg will guess a key out of your git config user.* data" fi # Generate commit message info "Fetching review data" reviews=$(curl_paginate_array "$API/pulls/$PR/reviews") msg="Merge PR #$PR: $TITLE" has_state() { [ "$(jq -rc 'map(select(.user.login == "'"$1"'") | .state) | any(. == "'"$2"'")' <<< "$reviews")" = true ] } author=$(echo "$PRDATA" | jq -rc '.user.login') for reviewer in $(jq -rc 'map(.user.login | select(. != "'"$author"'")) | unique | join(" ")' <<< "$reviews" ); do if has_state "$reviewer" APPROVED; then msg=$(printf '%s\n' "$msg" | git interpret-trailers --trailer Reviewed-by="$reviewer") elif has_state "$reviewer" COMMENTED; then msg=$(printf '%s\n' "$msg" | git interpret-trailers --trailer Ack-by="$reviewer") fi done info "merging" git merge -v -S --no-ff FETCH_HEAD -m "$msg" -e # TODO: improve this check if ! git diff --quiet --diff-filter=A "$REMOTE/$CURRENT_LOCAL_BRANCH" -- dev/ci/user-overlays; then warning "this PR has overlays, please check the following:" warning "- each overlay has a corresponding open PR on the upstream repo" warning "- after merging please notify the upstream they can merge the PR" fi coq-8.15.0/dev/tools/notify-upstream-pins.sh000077500000000000000000000052101417001151100207460ustar00rootroot00000000000000 #!/usr/bin/env bash # Script to notify upstreams that we need a tag to put in a platform/installer VERSION="8.13" DATEBETA="December 7, 2020" DATEFINAL="January 7, 2020" CC="CC: https://github.com/coq/coq/issues/12334" #CC="\n@coqbot column:...." REASON="bundled in the Windows installer" #REASON="bundled in the Coq platform" git show master:dev/ci/ci-basic-overlay.sh > /tmp/master-ci-basic-overlay.sh git show v${VERSION}:dev/ci/ci-basic-overlay.sh > /tmp/branch-ci-basic-overlay.sh # reads a variable value from a ci-basic-overlay.sh file function read_from() { ( . $1; varname="$2"; echo ${!varname} ) } # https://gist.github.com/cdown/1163649 function urlencode() { # urlencode old_lc_collate=$LC_COLLATE LC_COLLATE=C local length="${#1}" for (( i = 0; i < length; i++ )); do local c="${1:$i:1}" case $c in [a-zA-Z0-9.~_-]) printf '%s' "$c" ;; *) printf '%%%02X' "'$c" ;; esac done LC_COLLATE=$old_lc_collate } function template { TITLE="Please create a tag for the upcoming release of Coq $VERSION" BODY="The Coq team is planning to release Coq $VERSION-beta1 on $DATEBETA, and Coq $VERSION.0 on $DATEFINAL. Your project is currently scheduled for being $REASON. We are currently testing commit $3 on branch $1/tree/$2 but we would like to ship a released version instead (a tag in git's slang). Could you please tag that commit, or communicate us any other tag that works with the Coq branch v$VERSION at the *latest* 15 days before the date of the final release? Thanks! $CC " UUTITLE=$(urlencode "$TITLE") UUBODY=$(urlencode "$BODY") case $1 in ( http*github.com* ) echo "$1/issues/new?title=$UUTITLE&body=$UUBODY" ;; ( http*gitlab* ) echo "$1/-/issues/new" echo echo -e "$TITLE" echo echo -e "$BODY" ;; ( * ) echo "$1" echo echo -e "$TITLE" echo echo -e "$BODY" ;; esac } # TODO: filter w.r.t. what is in the platform PROJECTS=`read_from /tmp/branch-ci-basic-overlay.sh "projects[@]"` for addon in $PROJECTS; do URL=`read_from /tmp/master-ci-basic-overlay.sh "${addon}_CI_GITURL"` REF=`read_from /tmp/master-ci-basic-overlay.sh "${addon}_CI_REF"` PIN=`read_from /tmp/branch-ci-basic-overlay.sh "${addon}_CI_REF"` if [ "${#PIN}" = "40" ]; then echo -e "Addon $addon is pinned to a hash, to open an issue open the following url:\n" template $URL $REF $PIN elif [ "${#PIN}" = "0" ]; then echo "Addon $addon has no pin!" exit 1 else echo "Addon $addon is already pinned to version $PIN" fi echo -e "\n----------------------------------------------" done coq-8.15.0/dev/tools/objects.el000066400000000000000000000075701417001151100162600ustar00rootroot00000000000000(defun add-survive-module nil (interactive) (query-replace-regexp " \\([ ]*\\)\\(Summary\.\\)?survive_section" " \\1\\2survive_module = false; \\1\\2survive_section") ) (global-set-key [f2] 'add-survive-module) ; functions to change old style object declaration to new style (defun repl-open nil (interactive) (query-replace-regexp "open_function\\([ ]*\\)=\\([ ]*\\)cache_\\([a-zA-Z0-9'_]*\\)\\( *\\);" "open_function\\1=\\2(fun i o -> if i=1 then cache_\\3 o)\\4;") ) (global-set-key [f6] 'repl-open) (defun repl-load nil (interactive) (query-replace-regexp "load_function\\([ ]*\\)=\\([ ]*\\)cache_\\([a-zA-Z0-9'_]*\\)\\( *\\);" "load_function\\1=\\2(fun _ -> cache_\\3)\\4;") ) (global-set-key [f7] 'repl-load) (defun repl-decl nil (interactive) (query-replace-regexp "\\(Libobject\.\\)?declare_object[ ]*([ ]*\\(.*\\)[ ]*,[ ]* \\([ ]*\\){\\([ ]*\\)\\([^ ][^}]*\\)}[ ]*)" "\\1declare_object {(\\1default_object \\2) with \\3 \\4\\5}") ; "|$1=\\1|$2=\\2|$3=\\3|$4=\\4|") ) (global-set-key [f9] 'repl-decl) ; eval the above and try f9 f6 f7 on the following: let (inThing,outThing) = declare_object ("THING", { load_function = cache_thing; cache_function = cache_thing; open_function = cache_thing; export_function = (function x -> Some x) }) ;%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ;%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ; functions helping writing non-copying substitutions (defun make-subst (name) (interactive "s") (defun f (l) (save-excursion (query-replace-regexp (concat "\\([a-zA-z_0-9]*\\)[ ]*:[ ]*" (car l) "\\([ ]*;\\|[ ]*\}\\)") (concat "let \\1\' = " (cdr l) " " name "\\1 in")) ) ) (mapcar 'f '(("constr"."subst_mps subst") ("Coqast.t"."subst_ast subst") ("Coqast.t list"."list_smartmap (subst_ast subst)") ("'pat"."subst_pat subst") ("'pat unparsing_hunk"."subst_hunk subst_pat subst") ("'pat unparsing_hunk list"."list_smartmap (subst_hunk subst_pat subst)") ("'pat syntax_entry"."subst_syntax_entry subst_pat subst") ("'pat syntax_entry list"."list_smartmap (subst_syntax_entry subst_pat subst)") ("constr option"."option_smartmap (subst_mps subst)") ("constr list"."list_smartmap (subst_mps subst)") ("constr array"."array_smartmap (subst_mps subst)") ("constr_pattern"."subst_pattern subst") ("constr_pattern option"."option_smartmap (subst_pattern subst)") ("constr_pattern array"."array_smartmap (subst_pattern subst)") ("constr_pattern list"."list_smartmap (subst_pattern subst)") ("global_reference"."subst_global subst") ("extended_global_reference"."subst_ext subst") ("obj_typ"."subst_obj subst") ) ) ) (global-set-key [f2] 'make-subst) (defun make-if (name) (interactive "s") (save-excursion (query-replace-regexp "\\([a-zA-z_0-9]*\\)[ ]*:[ ]*['a-zA-z_. ]*\\(;\\|[ ]*\}\\)" (concat "&& \\1\' == " name "\\1") ) ) ) (global-set-key [f4] 'make-if) (defun make-record nil (interactive) (save-excursion (query-replace-regexp "\\([a-zA-z_0-9]*\\)[ ]*:[ ]*['a-zA-z_. ]*\\(;\\|[ ]*\}\\)" (concat "\\1 = \\1\' ;") ) ) ) (global-set-key [f5] 'make-record) (defun make-prim nil (interactive) (save-excursion (query-replace-regexp "\\<[a-zA-Z'_0-9]*\\>" "\\&'")) ) (global-set-key [f6] 'make-prim) ; eval the above, yank the text below and do ; paste f2 morph. ; paste f4 morph. ; paste f5 lem : constr; profil : bool list; arg_types : constr list; lem2 : constr option } ; and you almost get Setoid_replace.subst_morph :) ; and now f5 on this: (ref,(c1,c2)) coq-8.15.0/dev/tools/pin-ci.sh000077500000000000000000000022441417001151100160140ustar00rootroot00000000000000#!/usr/bin/env bash # Use this script to pin the commit used by the developments tracked by the CI OVERLAYS="./dev/ci/ci-basic-overlay.sh" process_development() { local DEV=$1 local REPO_VAR="${DEV}_CI_GITURL" local REPO=${!REPO_VAR} local BRANCH_VAR="${DEV}_CI_REF" local BRANCH=${!BRANCH_VAR} if [[ -z "$BRANCH" ]] then echo "$DEV has no branch set, skipping" return 0 fi if [[ $BRANCH =~ ^[a-f0-9]{40}$ ]] then echo "$DEV is already set to hash $BRANCH, skipping" return 0 fi echo "Resolving $DEV as $BRANCH from $REPO" local HASH=$(git ls-remote --heads $REPO $BRANCH | cut -f 1) if [[ -z "$HASH" ]] then echo "Could not resolve reference $BRANCH for $DEV (something went wrong), skipping" return 0 fi read -p "Expand $DEV from $BRANCH to $HASH? [y/N] " -n 1 -r echo if [[ $REPLY =~ ^[Yy]$ ]]; then # use -i.bak to be compatible with MacOS; see, e.g., https://stackoverflow.com/a/7573438/377022 sed -i.bak -E "s|project +$DEV +.*|project $DEV '$REPO' '$HASH'|" $OVERLAYS fi } # Execute the script to set the overlay variables . $OVERLAYS for project in ${projects[@]} do process_development $project done coq-8.15.0/dev/tools/pre-commit000077500000000000000000000045661417001151100163110ustar00rootroot00000000000000#!/bin/sh # configure automatically sets up a wrapper at .git/hooks/pre-commit # which calls this script (if it exists). set -e dev/tools/check-overlays.sh log=$(mktemp "git-fix-ws-log.XXXXXX") exec > "$log" 1>&2 echo "Auto fixing whitespace issues ($log)..." # We fix whitespace in the index and in the working tree # separately to preserve non-added changes. index=$(mktemp "git-fix-ws-index.XXXXXX") fixed_index=$(mktemp "git-fix-ws-index-fixed.XXXXXX") tree=$(mktemp "git-fix-ws-tree.XXXXXX") echo "Patches are saved in '$index', '$fixed_index' and '$tree'." echo "If an error destroys your changes you can recover using them." echo "(The files are cleaned up on success.)" echo #newline git diff-index -p --binary --cached HEAD > "$index" git diff-index -p --binary HEAD > "$tree" # reset work tree and index # NB: untracked files which were not added are untouched if [ -s "$index" ]; then git apply --whitespace=nowarn --cached -R "$index"; fi if [ -s "$tree" ]; then git apply --whitespace=nowarn -R "$tree"; fi # Fix index # For end of file newlines we must go through the worktree if [ -s "$index" ]; then echo "Fixing staged changes..." git apply --cached --whitespace=fix "$index" git apply --whitespace=fix "$index" 2>/dev/null # no need to repeat yourself git diff --cached --name-only -z | xargs -0 dev/tools/check-eof-newline.sh --fix git add -u echo #newline fi # reset work tree git diff-index -p --binary --cached HEAD > "$fixed_index" # If all changes were bad whitespace changes the patch is empty # making git fail. Don't fail now: we fix the worktree first. if [ -s "$fixed_index" ]; then git apply --whitespace=nowarn -R "$fixed_index"; fi # Fix worktree if [ -s "$tree" ]; then echo "Fixing unstaged changes..." git apply --whitespace=fix "$tree" git diff --name-only -z | xargs -0 dev/tools/check-eof-newline.sh --fix echo #newline fi if [ -s "$index" ] && ! [ -s "$fixed_index" ]; then echo "Fixing whitespace issues cancelled all changes." exit 1 fi # Check that we did fix whitespace if ! git diff-index --check --cached HEAD; then echo "Auto-fixing whitespace failed: errors remain." echo "This may fix itself if you try again." echo "(Consider whether the number of errors decreases after each run.)" exit 1 fi echo "Whitespace pass complete." # clean up temporary files rm "$index" "$tree" "$fixed_index" "$log" coq-8.15.0/dev/tools/update-compat.py000077500000000000000000000511651417001151100174240ustar00rootroot00000000000000#!/usr/bin/env python3 import os, re, sys, subprocess from io import open # When passed `--release`, this script sets up Coq to support three # `-compat` flag arguments. If executed manually, this would consist # of doing the following steps: # # - Delete the file `theories/Compat/CoqUU.v`, where U.U is four # versions prior to the new version X.X. After this, there # should be exactly three `theories/Compat/CoqNN.v` files. # - Update # [`doc/stdlib/index-list.html.template`](/doc/stdlib/index-list.html.template) # with the deleted file. # - Remove any notations in the standard library which have `compat "U.U"`. # - Update the function `get_compat_file` in [`toplevel/coqargs.ml`](/toplevel/coqargs.ml) # by bumping all the version numbers by one. # # - Remove the file # [`test-suite/success/CompatOldOldFlag.v`](/test-suite/success/CompatOldOldFlag.v). # - Update # [`test-suite/tools/update-compat/run.sh`](/test-suite/tools/update-compat/run.sh) # to ensure that it passes `--release` to the `update-compat.py` # script. # When passed the `--master` flag, this script sets up Coq to support # four `-compat` flag arguments. If executed manually, this would # consist of doing the following steps: # # - Add a file `theories/Compat/CoqXX.v` which contains just the header # from [`dev/header.ml`](/dev/header.ml) # - Add the line `Require Export Coq.Compat.CoqXX.` at the top of # `theories/Compat/CoqYY.v`, where Y.Y is the version prior to X.X. # - Update # [`doc/stdlib/index-list.html.template`](/doc/stdlib/index-list.html.template) # with the added file. # - Update the function `get_compat_file` in [`toplevel/coqargs.ml`](/toplevel/coqargs.ml) # by bumping all the version numbers by one. # - Update the files # [`test-suite/success/CompatCurrentFlag.v`](/test-suite/success/CompatCurrentFlag.v), # [`test-suite/success/CompatPreviousFlag.v`](/test-suite/success/CompatPreviousFlag.v), # and # [`test-suite/success/CompatOldFlag.v`](/test-suite/success/CompatOldFlag.v) # by bumping all version numbers by 1. Re-create the file # [`test-suite/success/CompatOldOldFlag.v`](/test-suite/success/CompatOldOldFlag.v) # with its version numbers also bumped by 1 (file should have # been removed before branching; see above). # - Update # [`test-suite/tools/update-compat/run.sh`](/test-suite/tools/update-compat/run.sh) # to ensure that it passes `--master` to the `update-compat.py` # script. # Obtain the absolute path of the script being run. By assuming that # the script lives in dev/tools/, and basing all calls on the path of # the script, rather than the current working directory, we can be # robust to users who choose to run the script from any location. SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__)) ROOT_PATH = os.path.realpath(os.path.join(SCRIPT_PATH, '..', '..')) CONFIGURE_PATH = os.path.join(ROOT_PATH, 'tools/configure/configure.ml') HEADER_PATH = os.path.join(ROOT_PATH, 'dev', 'header.ml') DEFAULT_NUMBER_OF_OLD_VERSIONS = 2 RELEASE_NUMBER_OF_OLD_VERSIONS = 2 MASTER_NUMBER_OF_OLD_VERSIONS = 3 EXTRA_HEADER = '\n(** Compatibility file for making Coq act similar to Coq v%s *)\n' COQARGS_ML_PATH = os.path.join(ROOT_PATH, 'sysinit', 'coqargs.ml') DOC_INDEX_PATH = os.path.join(ROOT_PATH, 'doc', 'stdlib', 'index-list.html.template') TEST_SUITE_RUN_PATH = os.path.join(ROOT_PATH, 'test-suite', 'tools', 'update-compat', 'run.sh') TEST_SUITE_PATHS = tuple(os.path.join(ROOT_PATH, 'test-suite', 'success', i) for i in ('CompatOldOldFlag.v', 'CompatOldFlag.v', 'CompatPreviousFlag.v', 'CompatCurrentFlag.v')) TEST_SUITE_DESCRIPTIONS = ('current-minus-three', 'current-minus-two', 'current-minus-one', 'current') # sanity check that we are where we think we are assert(os.path.normpath(os.path.realpath(SCRIPT_PATH)) == os.path.normpath(os.path.realpath(os.path.join(ROOT_PATH, 'dev', 'tools')))) assert(os.path.exists(CONFIGURE_PATH)) BUG_HEADER = r"""(* DO NOT MODIFY THIS FILE DIRECTLY *) (* It is autogenerated by %s. *) """ % os.path.relpath(os.path.realpath(__file__), ROOT_PATH) def get_file_lines(file_name): with open(file_name, 'rb') as f: lines = f.readlines() return [line.decode('utf-8') for line in lines] def get_file(file_name): return ''.join(get_file_lines(file_name)) def get_header(): return get_file(HEADER_PATH) HEADER = get_header() def fatal_error(msg): if hasattr(sys.stderr, 'buffer'): sys.stderr.buffer.write(msg.encode("utf-8")) else: sys.stderr.write(msg.encode("utf-8")) sys.exit(1) def maybe_git_add(local_path, suggest_add=True, **args): if args['git_add']: print("Running 'git add %s'..." % local_path) retc = subprocess.call(['git', 'add', local_path], cwd=ROOT_PATH) if retc is not None and retc != 0: print('!!! Process returned code %d' % retc) elif suggest_add: print(r"!!! Don't forget to 'git add %s'!" % local_path) def maybe_git_rm(local_path, **args): if args['git_add']: print("Running 'git rm %s'..." % local_path) retc = subprocess.call(['git', 'rm', local_path], cwd=ROOT_PATH) if retc is not None and retc != 0: print('!!! Process returned code %d' % retc) def get_version(cur_version=None): if cur_version is not None: return cur_version for line in get_file_lines(CONFIGURE_PATH): found = re.findall(r'let coq_version = "([0-9]+\.[0-9]+)', line) if len(found) > 0: return found[0] raise Exception("No line 'let coq_version = \"X.X' found in %s" % os.path.relpath(CONFIGURE_PATH, ROOT_PATH)) def compat_name_to_version_name(compat_file_name): assert(compat_file_name.startswith('Coq') and compat_file_name.endswith('.v')) v = compat_file_name[len('Coq'):][:-len('.v')] assert(len(v) == 2 or (len(v) >= 2 and v[0] in ('8', '9'))) # we'll have to change this scheme when we hit Coq 10.* return '%s.%s' % (v[0], v[1:]) def version_name_to_compat_name(v, ext='.v'): return 'Coq%s%s%s' % tuple(v.split('.') + [ext]) # returns (lines of compat files, lines of not compat files def get_doc_index_lines(): lines = get_file_lines(DOC_INDEX_PATH) return (tuple(line for line in lines if 'theories/Compat/Coq' in line), tuple(line for line in lines if 'theories/Compat/Coq' not in line)) COMPAT_INDEX_LINES, DOC_INDEX_LINES = get_doc_index_lines() def version_to_int_pair(v): return tuple(map(int, v.split('.'))) def get_known_versions(): # We could either get the files from the doc index, or from the # directory list. We assume that the doc index is more # representative. If we wanted to use the directory list, we # would do: # compat_files = os.listdir(os.path.join(ROOT_PATH, 'theories', 'Compat')) compat_files = re.findall(r'Coq[^\.]+\.v', '\n'.join(COMPAT_INDEX_LINES)) return tuple(sorted((compat_name_to_version_name(i) for i in compat_files if i.startswith('Coq') and i.endswith('.v')), key=version_to_int_pair)) def get_new_versions(known_versions, **args): if args['cur_version'] in known_versions: assert(known_versions[-1] == args['cur_version']) known_versions = known_versions[:-1] assert(len(known_versions) >= args['number_of_old_versions']) return tuple(list(known_versions[-args['number_of_old_versions']:]) + [args['cur_version']]) def print_diff(olds, news, numch=30): for ch in range(min(len(olds), len(news))): if olds[ch] != news[ch]: print('Character %d differs:\nOld: %s\nNew: %s' % (ch, repr(olds[ch:][:numch]), repr(news[ch:][numch]))) return ch = min(len(olds), len(news)) assert(len(olds) != len(news)) print('Strings are different lengths:\nOld tail: %s\nNew tail: %s' % (repr(olds[ch:]), repr(news[ch:]))) def update_shebang_to_match(contents, new_contents, path): contents_lines = contents.split('\n') new_contents_lines = new_contents.split('\n') if not (contents_lines[0].startswith('#!/') and contents_lines[0].endswith('bash')): raise Exception('Unrecognized #! line in existing %s: %s' % (os.path.relpath(path, ROOT_PATH), repr(contents_lines[0]))) if not (new_contents_lines[0].startswith('#!/') and new_contents_lines[0].endswith('bash')): raise Exception('Unrecognized #! line in new %s: %s' % (os.path.relpath(path, ROOT_PATH), repr(new_contents_lines[0]))) new_contents_lines[0] = contents_lines[0] return '\n'.join(new_contents_lines) def update_if_changed(contents, new_contents, path, exn_string='%s changed!', suggest_add=False, pass_through_shebang=False, assert_unchanged=False, **args): if contents is not None and pass_through_shebang: new_contents = update_shebang_to_match(contents, new_contents, path) if contents is None or contents != new_contents: if not assert_unchanged: print('Updating %s...' % os.path.relpath(path, ROOT_PATH)) with open(path, 'w', encoding='utf-8') as f: f.write(new_contents) maybe_git_add(os.path.relpath(path, ROOT_PATH), suggest_add=suggest_add, **args) else: if contents is not None: print('Unexpected change:\nOld contents:\n%s\n\nNew contents:\n%s\n' % (contents, new_contents)) print_diff(contents, new_contents) raise Exception(exn_string % os.path.relpath(path, ROOT_PATH)) def remove_if_exists(path, exn_string='%s exists when it should not!', assert_unchanged=False, **args): if os.path.exists(path): if not assert_unchanged: print('Removing %s...' % os.path.relpath(path, ROOT_PATH)) os.remove(path) maybe_git_rm(os.path.relpath(path, ROOT_PATH), **args) else: raise Exception(exn_string % os.path.relpath(path, ROOT_PATH)) def update_file(new_contents, path, **args): update_if_changed(None, new_contents, path, **args) def update_compat_files(old_versions, new_versions, assert_unchanged=False, **args): for v in old_versions: if v not in new_versions: compat_file = os.path.join('theories', 'Compat', version_name_to_compat_name(v)) if not assert_unchanged: print('Removing %s...' % compat_file) compat_path = os.path.join(ROOT_PATH, compat_file) os.rename(compat_path, compat_path + '.bak') maybe_git_rm(compat_file, **args) else: raise Exception('%s exists!' % compat_file) for v, next_v in zip(new_versions, list(new_versions[1:]) + [None]): compat_file = os.path.join('theories', 'Compat', version_name_to_compat_name(v)) compat_path = os.path.join(ROOT_PATH, compat_file) if not os.path.exists(compat_path): print('Creating %s...' % compat_file) contents = HEADER + (EXTRA_HEADER % v) if next_v is not None: contents += '\nRequire Export Coq.Compat.%s.\n' % version_name_to_compat_name(next_v, ext='') update_file(contents, compat_path, exn_string='%s does not exist!', assert_unchanged=assert_unchanged, **args) else: # print('Checking %s...' % compat_file) contents = get_file(compat_path) header = HEADER + (EXTRA_HEADER % v) if not contents.startswith(HEADER): raise Exception("Invalid header in %s; does not match %s" % (compat_file, os.path.relpath(HEADER_PATH, ROOT_PATH))) if not contents.startswith(header): raise Exception("Invalid header in %s; missing line %s" % (compat_file, EXTRA_HEADER.strip('\n') % v)) if next_v is not None: line = 'Require Export Coq.Compat.%s.' % version_name_to_compat_name(next_v, ext='') if ('\n%s\n' % line) not in contents: if not contents.startswith(header + '\n'): contents = contents.replace(header, header + '\n') contents = contents.replace(header, '%s\n%s' % (header, line)) update_file(contents, compat_path, exn_string=('Compat file %%s is missing line %s' % line), assert_unchanged=assert_unchanged, **args) def update_get_compat_file(new_versions, contents, relpath): line_count = 3 # 1 for the first line, 1 for the invalid flags, and 1 for Current first_line = 'let get_compat_file = function' split_contents = contents[contents.index(first_line):].split('\n') while True: cur_line = split_contents[:line_count][-1] if re.match(r'^ \| \([0-9 "\.\|]*\) as s ->$', cur_line) is not None: break elif re.match(r'^ \| "[0-9\.]*" -> "Coq.Compat.Coq[0-9]*"$', cur_line) is not None: line_count += 1 else: raise Exception('Could not recognize line %d of get_compat_file in %s as a list of invalid versions (line was %s)' % (line_count, relpath, repr(cur_line))) old_function_lines = split_contents[:line_count] all_versions = re.findall(r'"([0-9\.]+)"', ''.join(old_function_lines)) invalid_versions = tuple(i for i in all_versions if i not in new_versions) new_function_lines = [first_line] for v, V in reversed(list(zip(new_versions, ['"Coq.Compat.Coq%s%s"' % tuple(v.split('.')) for v in new_versions]))): new_function_lines.append(' | "%s" -> %s' % (v, V)) new_function_lines.append(' | (%s) as s ->' % ' | '.join('"%s"' % v for v in invalid_versions)) new_lines = '\n'.join(new_function_lines) new_contents = contents.replace('\n'.join(old_function_lines), new_lines) if new_lines not in new_contents: raise Exception('Could not find get_compat_file in %s' % relpath) return new_contents def update_coqargs_ml(old_versions, new_versions, **args): contents = get_file(COQARGS_ML_PATH) new_contents = update_get_compat_file(new_versions, contents, os.path.relpath(COQARGS_ML_PATH, ROOT_PATH)) update_if_changed(contents, new_contents, COQARGS_ML_PATH, **args) def update_flags(old_versions, new_versions, **args): update_coqargs_ml(old_versions, new_versions, **args) def update_test_suite(new_versions, assert_unchanged=False, test_suite_paths=TEST_SUITE_PATHS, test_suite_descriptions=TEST_SUITE_DESCRIPTIONS, test_suite_outdated_paths=tuple(), **args): assert(len(new_versions) == len(test_suite_paths)) assert(len(new_versions) == len(test_suite_descriptions)) for i, (v, path, descr) in enumerate(zip(new_versions, test_suite_paths, test_suite_descriptions)): contents = None suggest_add = False if os.path.exists(path): contents = get_file(path) else: suggest_add = True if '%s' in descr: descr = descr % v lines = ['(* -*- coq-prog-args: ("-compat" "%s") -*- *)' % v, '(** Check that the %s compatibility flag actually requires the relevant modules. *)' % descr] for imp_v in reversed(new_versions[i:]): lines.append('Import Coq.Compat.%s.' % version_name_to_compat_name(imp_v, ext='')) lines.append('') new_contents = '\n'.join(lines) update_if_changed(contents, new_contents, path, suggest_add=suggest_add, **args) for path in test_suite_outdated_paths: remove_if_exists(path, assert_unchanged=assert_unchanged, **args) def update_doc_index(new_versions, **args): contents = get_file(DOC_INDEX_PATH) firstline = ' theories/Compat/AdmitAxiom.v' new_contents = ''.join(DOC_INDEX_LINES) if firstline not in new_contents: raise Exception("Could not find line '%s' in %s" % (firstline, os.path.relpath(DOC_INDEX_PATH, ROOT_PATH))) extra_lines = [' theories/Compat/%s' % version_name_to_compat_name(v) for v in new_versions] new_contents = new_contents.replace(firstline, '\n'.join([firstline] + extra_lines)) update_if_changed(contents, new_contents, DOC_INDEX_PATH, **args) def update_test_suite_run(**args): contents = get_file(TEST_SUITE_RUN_PATH) new_contents = r'''#!/usr/bin/env bash # allow running this script from any directory by basing things on where the script lives SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" # we assume that the script lives in test-suite/tools/update-compat/, # and that update-compat.py lives in dev/tools/ cd "${SCRIPT_DIR}/../../.." dev/tools/update-compat.py --assert-unchanged %s || exit $? ''' % ' '.join([('--master' if args['master'] else ''), ('--release' if args['release'] else '')]).strip() update_if_changed(contents, new_contents, TEST_SUITE_RUN_PATH, pass_through_shebang=True, **args) def update_compat_notations_in(old_versions, new_versions, contents): for v in old_versions: if v not in new_versions: reg = re.compile(r'^[ \t]*(?:Notation|Infix)[^\n]*?compat "%s"[^\n]*?\n' % v, flags=re.MULTILINE) contents = re.sub(r'^[ \t]*(?:Notation|Infix)[^\n]*?compat "%s"[^\n]*?\n' % v, '', contents, flags=re.MULTILINE) return contents def update_compat_notations(old_versions, new_versions, **args): for root, dirs, files in os.walk(os.path.join(ROOT_PATH, 'theories')): for fname in files: if not fname.endswith('.v'): continue contents = get_file(os.path.join(root, fname)) new_contents = update_compat_notations_in(old_versions, new_versions, contents) update_if_changed(contents, new_contents, os.path.join(root, fname), **args) def display_git_grep(old_versions, new_versions): Vs = ['V%s_%s' % tuple(v.split('.')) for v in old_versions if v not in new_versions] compat_vs = ['compat "%s"' % v for v in old_versions if v not in new_versions] all_options = tuple(Vs + compat_vs) options = (['"-compat" "%s"' % v for v in old_versions if v not in new_versions] + [version_name_to_compat_name(v, ext='') for v in old_versions if v not in new_versions]) if len(options) > 0 or len(all_options) > 0: print('To discover what files require manual updating, run:') if len(options) > 0: print("git grep -- '%s' test-suite/" % r'\|'.join(options)) if len(all_options) > 0: print("git grep -- '%s'" % r'\|'.join(all_options)) def parse_args(argv): args = { 'assert_unchanged': False, 'cur_version': None, 'number_of_old_versions': None, 'master': False, 'release': False, 'git_add': False, } if '--master' not in argv and '--release' not in argv: fatal_error(r'''ERROR: You should pass either --release (sometime before branching) or --master (right after branching and updating the version number in version.ml)''') for arg in argv[1:]: if arg == '--assert-unchanged': args['assert_unchanged'] = True elif arg == '--git-add': args['git_add'] = True elif arg == '--master': args['master'] = True if args['number_of_old_versions'] is None: args['number_of_old_versions'] = MASTER_NUMBER_OF_OLD_VERSIONS elif arg == '--release': args['release'] = True if args['number_of_old_versions'] is None: args['number_of_old_versions'] = RELEASE_NUMBER_OF_OLD_VERSIONS elif arg.startswith('--cur-version='): args['cur_version'] = arg[len('--cur-version='):] assert(len(args['cur_version'].split('.')) == 2) assert(all(map(str.isdigit, args['cur_version'].split('.')))) elif arg.startswith('--number-of-old-versions='): args['number_of_old_versions'] = int(arg[len('--number-of-old-versions='):]) else: print('USAGE: %s [--assert-unchanged] [--cur-version=NN.NN] [--number-of-old-versions=NN] [--git-add]' % argv[0]) print('') print('ERROR: Unrecognized argument: %s' % arg) sys.exit(1) if args['number_of_old_versions'] is None: args['number_of_old_versions'] = DEFAULT_NUMBER_OF_OLD_VERSIONS return args if __name__ == '__main__': args = parse_args(sys.argv) args['cur_version'] = get_version(args['cur_version']) args['number_of_compat_versions'] = args['number_of_old_versions'] + 1 known_versions = get_known_versions() new_versions = get_new_versions(known_versions, **args) assert(len(TEST_SUITE_PATHS) >= args['number_of_compat_versions']) args['test_suite_paths'] = tuple(TEST_SUITE_PATHS[-args['number_of_compat_versions']:]) args['test_suite_outdated_paths'] = tuple(TEST_SUITE_PATHS[:-args['number_of_compat_versions']]) args['test_suite_descriptions'] = tuple(TEST_SUITE_DESCRIPTIONS[-args['number_of_compat_versions']:]) update_compat_files(known_versions, new_versions, **args) update_flags(known_versions, new_versions, **args) update_test_suite(new_versions, **args) update_test_suite_run(**args) update_doc_index(new_versions, **args) update_compat_notations(known_versions, new_versions, **args) display_git_grep(known_versions, new_versions) coq-8.15.0/dev/top_printers.dbg000066400000000000000000000066621417001151100163540ustar00rootroot00000000000000install_printer Top_printers.pP install_printer Top_printers.ppfuture install_printer Top_printers.ppid install_printer Top_printers.pplab install_printer Top_printers.ppmbid install_printer Top_printers.ppdir install_printer Top_printers.ppmp install_printer Top_printers.ppcon install_printer Top_printers.ppproj install_printer Top_printers.ppprojrepr install_printer Top_printers.ppkn install_printer Top_printers.ppmind install_printer Top_printers.ppind install_printer Top_printers.ppsp install_printer Top_printers.ppqualid install_printer Top_printers.ppscheme install_printer Top_printers.ppwf_paths install_printer Top_printers.ppevar install_printer Top_printers.ppconstr install_printer Top_printers.ppeconstr install_printer Top_printers.ppconstr_expr install_printer Top_printers.ppglob_constr install_printer Top_printers.pppattern install_printer Top_printers.ppfconstr install_printer Top_printers.ppfsubst install_printer Top_printers.ppnumtokunsigned install_printer Top_printers.ppnumtokunsignednat install_printer Top_printers.ppintset install_printer Top_printers.ppidset install_printer Top_printers.ppidmapgen install_printer Top_printers.ppintmapgen install_printer Top_printers.ppididmap install_printer Top_printers.ppconstrunderbindersidmap install_printer Top_printers.ppevarsubst install_printer Top_printers.ppunbound_ltac_var_map install_printer Top_printers.ppclosure install_printer Top_printers.ppclosedglobconstr install_printer Top_printers.ppclosedglobconstridmap install_printer Top_printers.ppglobal install_printer Top_printers.ppconst install_printer Top_printers.ppvar install_printer Top_printers.ppj install_printer Top_printers.ppsubst install_printer Top_printers.ppdelta install_printer Top_printers.pp_idpred install_printer Top_printers.pp_cpred install_printer Top_printers.pp_transparent_state install_printer Top_printers.pp_estack_t install_printer Top_printers.pp_state_t install_printer Top_printers.ppmetas install_printer Top_printers.ppevm install_printer Top_printers.ppexistentialset install_printer Top_printers.ppexistentialfilter install_printer Top_printers.ppclenv install_printer Top_printers.ppgoalgoal install_printer Top_printers.ppgoal install_printer Top_printers.pphintdb install_printer Top_printers.ppproofview install_printer Top_printers.ppopenconstr install_printer Top_printers.pproof install_printer Top_printers.ppuni install_printer Top_printers.ppuni_level install_printer Top_printers.ppuniverse_set install_printer Top_printers.ppuniverse_instance install_printer Top_printers.ppuniverse_context install_printer Top_printers.ppaucontext install_printer Top_printers.ppuniverse_context_set install_printer Top_printers.ppuniverse_subst install_printer Top_printers.ppuniverse_opt_subst install_printer Top_printers.ppuniverse_level_subst install_printer Top_printers.ppevar_universe_context install_printer Top_printers.ppconstraints install_printer Top_printers.ppuniverseconstraints install_printer Top_printers.ppuniverse_context_future install_printer Top_printers.ppuniverses install_printer Top_printers.ppnamedcontextval install_printer Top_printers.ppenv install_printer Top_printers.ppglobenv install_printer Top_printers.pptac install_printer Top_printers.ppobj install_printer Top_printers.pploc install_printer Top_printers.pp_argument_type install_printer Top_printers.pp_generic_argument install_printer Top_printers.ppgenarginfo install_printer Top_printers.ppgenargargt install_printer Top_printers.ppist coq-8.15.0/dev/top_printers.ml000066400000000000000000000571361417001151100162320ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* str "_") kx) (* name printers *) let ppid id = pp (Id.print id) let pplab l = pp (Label.print l) let ppmbid mbid = pp (str (MBId.debug_to_string mbid)) let ppdir dir = pp (DirPath.print dir) let ppmp mp = pp(str (ModPath.debug_to_string mp)) let ppcon con = pp(Constant.debug_print con) let ppprojrepr con = pp(Constant.debug_print (Projection.Repr.constant con)) let ppproj con = pp(Constant.debug_print (Projection.constant con)) let ppkn kn = pp(str (KerName.to_string kn)) let ppmind kn = pp(MutInd.debug_print kn) let ppind (kn,i) = pp(MutInd.debug_print kn ++ str"," ++int i) let ppsp sp = pp(pr_path sp) let ppqualid qid = pp(pr_qualid qid) let ppscheme k = pp (Ind_tables.pr_scheme_kind k) let prrecarg = Declareops.pp_recarg let ppwf_paths x = pp (Declareops.pp_wf_paths x) let get_current_context () = try Vernacstate.Declare.get_current_context () with Vernacstate.Declare.NoCurrentProof -> let env = Global.env() in Evd.from_env env, env [@@ocaml.warning "-3"] (* term printers *) let envpp pp = let sigma,env = get_current_context () in pp env sigma let rawdebug = ref false let ppevar evk = pp (Evar.print evk) let pr_constr t = let sigma, env = get_current_context () in Printer.pr_constr_env env sigma t let pr_econstr t = let sigma, env = get_current_context () in Printer.pr_econstr_env env sigma t let ppconstr x = pp (pr_constr x) let ppeconstr x = pp (pr_econstr x) let ppconstr_expr x = let sigma,env = get_current_context () in pp (Ppconstr.pr_constr_expr env sigma x) let ppconstr_univ x = Constrextern.with_universes ppconstr x let ppglob_constr = (fun x -> pp(with_env_evm pr_lglob_constr_env x)) let pppattern = (fun x -> pp(envpp pr_constr_pattern_env x)) let pptype = (fun x -> try pp(envpp (fun env evm t -> pr_ltype_env env evm t) x) with e -> pp (str (Printexc.to_string e))) let ppfconstr c = ppconstr (CClosure.term_of_fconstr c) let ppfsubst s = let (s, k) = Esubst.Internal.repr s in let sep () = str ";" ++ spc () in let pr = function | Esubst.Internal.REL n -> str "<#" ++ int n ++ str ">" | Esubst.Internal.VAL (k, x) -> pr_constr (Vars.lift k (CClosure.term_of_fconstr x)) in pp @@ str "[" ++ prlist_with_sep sep pr s ++ str "| " ++ int k ++ str "]" let ppnumtokunsigned n = pp (NumTok.Unsigned.print n) let ppnumtokunsignednat n = pp (NumTok.UnsignedNat.print n) let prset pr l = str "[" ++ hov 0 (prlist_with_sep spc pr l) ++ str "]" let ppintset l = pp (prset int (Int.Set.elements l)) let ppidset l = pp (prset Id.print (Id.Set.elements l)) let prset' pr l = str "[" ++ hov 0 (prlist_with_sep pr_comma pr l) ++ str "]" let pridmap pr l = let pr (id,b) = Id.print id ++ str "=>" ++ pr id b in prset' pr (Id.Map.fold (fun a b l -> (a,b)::l) l []) let ppidmap pr l = pp (pridmap pr l) let prmapgen pr dom = if dom = [] then str "[]" else str "[domain= " ++ hov 0 (prlist_with_sep spc pr dom) ++ str "]" let pridmapgen l = prmapgen Id.print (Id.Set.elements (Id.Map.domain l)) let ppidmapgen l = pp (pridmapgen l) let printmapgen l = prmapgen int (Int.Set.elements (Int.Map.domain l)) let ppintmapgen l = pp (printmapgen l) let ppevarsubst = ppidmap (fun id0 -> prset (fun (c,copt,id) -> hov 0 (pr_constr c ++ (match copt with None -> mt () | Some c -> spc () ++ str "") ++ (if id = id0 then mt () else spc () ++ str "")))) let prididmap = pridmap (fun _ -> Id.print) let ppididmap = ppidmap (fun _ -> Id.print) let prconstrunderbindersidmap = pridmap (fun _ (l,c) -> hov 1 (str"[" ++ prlist_with_sep spc Id.print l ++ str"]") ++ str "," ++ spc () ++ pr_econstr c) let ppconstrunderbindersidmap l = pp (prconstrunderbindersidmap l) let ppunbound_ltac_var_map l = ppidmap (fun _ arg -> str"") l open Ltac_pretype let rec pr_closure {idents=idents;typed=typed;untyped=untyped} = hov 1 (str"{idents=" ++ prididmap idents ++ str";" ++ spc() ++ str"typed=" ++ prconstrunderbindersidmap typed ++ str";" ++ spc() ++ str"untyped=" ++ pr_closed_glob_constr_idmap untyped ++ str"}") and pr_closed_glob_constr_idmap x = pridmap (fun _ -> pr_closed_glob_constr) x and pr_closed_glob_constr {closure=closure;term=term} = pr_closure closure ++ with_env_evm pr_lglob_constr_env term let ppclosure x = pp (pr_closure x) let ppclosedglobconstr x = pp (pr_closed_glob_constr x) let ppclosedglobconstridmap x = pp (pr_closed_glob_constr_idmap x) let pP s = pp (hov 0 s) let safe_pr_global = let open GlobRef in function | ConstRef kn -> pp (str "CONSTREF(" ++ Constant.debug_print kn ++ str ")") | IndRef (kn,i) -> pp (str "INDREF(" ++ MutInd.debug_print kn ++ str "," ++ int i ++ str ")") | ConstructRef ((kn,i),j) -> pp (str "CONSTRUCTREF(" ++ MutInd.debug_print kn ++ str "," ++ int i ++ str "," ++ int j ++ str ")") | VarRef id -> pp (str "VARREF(" ++ Id.print id ++ str ")") let ppglobal x = try pp(pr_global x) with _ -> safe_pr_global x let ppconst (sp,j) = pp (str"#" ++ KerName.print sp ++ str"=" ++ envpp pr_lconstr_env j.uj_val) let ppvar ((id,a)) = pp (str"#" ++ Id.print id ++ str":" ++ envpp pr_lconstr_env a) let genppj f j = let (c,t) = f j in (c ++ str " : " ++ t) let ppj j = pp (genppj (envpp pr_ljudge_env) j) let ppsubst s = pp (Mod_subst.debug_pr_subst s) let ppdelta s = pp (Mod_subst.debug_pr_delta s) let pp_idpred s = pp (pr_idpred s) let pp_cpred s = pp (pr_cpred s) let pp_transparent_state s = pp (pr_transparent_state s) let pp_estack_t n = pp (Reductionops.Stack.pr pr_econstr n) let pp_state_t n = pp (Reductionops.pr_state Global.(env()) Evd.empty n) (* proof printers *) let pr_evar ev = Pp.int (Evar.repr ev) let ppmetas metas = pp(Termops.pr_metaset metas) let ppevm evd = pp(Termops.pr_evar_map ~with_univs:!Detyping.print_universes (Some 2) (Global.env ()) evd) let ppevmall evd = pp(Termops.pr_evar_map ~with_univs:!Detyping.print_universes None (Global.env ()) evd) let pr_existentialset evars = prlist_with_sep spc pr_evar (Evar.Set.elements evars) let ppexistentialset evars = pp (pr_existentialset evars) let ppexistentialfilter filter = match Evd.Filter.repr filter with | None -> pp (Pp.str "ø") | Some f -> pp (prlist_with_sep spc bool f) let ppclenv clenv = pp(pr_clenv clenv) let ppgoalgoal gl = pp(Goal.pr_goal gl) let ppgoal g = pp(Printer.pr_goal g) let ppgoalsigma g = pp(Printer.pr_goal g ++ Termops.pr_evar_map None (Global.env ()) (Tacmach.Old.project g)) let pphintdb db = pp(envpp Hints.pr_hint_db_env db) let ppproofview p = let gls,sigma = Proofview.proofview p in pp(pr_enum Goal.pr_goal gls ++ fnl () ++ Termops.pr_evar_map (Some 1) (Global.env ()) sigma) let ppopenconstr (x : Evd.open_constr) = let (evd,c) = x in pp (Termops.pr_evar_map (Some 2) (Global.env ()) evd ++ envpp pr_econstr_env c) (* spiwack: deactivated until a replacement is found let pppftreestate p = pp(print_pftreestate p) *) (* let ppgoal g = pp(db_pr_goal g) *) (* let pr_gls gls = *) (* hov 0 (pr_evar_defs (sig_sig gls) ++ fnl () ++ db_pr_goal (sig_it gls)) *) (* let pr_glls glls = *) (* hov 0 (pr_evar_defs (sig_sig glls) ++ fnl () ++ *) (* prlist_with_sep fnl db_pr_goal (sig_it glls)) *) (* let ppsigmagoal g = pp(pr_goal (sig_it g)) *) (* let prgls gls = pp(pr_gls gls) *) (* let prglls glls = pp(pr_glls glls) *) let pproof p = pp(Proof.pr_proof p) let ppuni u = pp(Universe.pr u) let ppuni_level u = pp (Level.pr u) let prlev = UnivNames.pr_with_global_universes Id.Map.empty let ppuniverse_set l = pp (Level.Set.pr prlev l) let ppuniverse_instance l = pp (Instance.pr prlev l) let ppuniverse_context l = pp (pr_universe_context prlev l) let ppuniverse_context_set l = pp (pr_universe_context_set prlev l) let ppuniverse_subst l = pp (Univ.pr_universe_subst l) let ppuniverse_opt_subst l = pp (UState.pr_universe_opt_subst l) let ppuniverse_level_subst l = pp (Univ.pr_universe_level_subst l) let ppevar_universe_context l = pp (Termops.pr_evar_universe_context l) let ppconstraints c = pp (pr_constraints Level.pr c) let ppuniverseconstraints c = pp (UnivProblem.Set.pr c) let ppuniverse_context_future c = let ctx = Future.force c in ppuniverse_context ctx let ppuniverses u = pp (UGraph.pr_universes Level.pr (UGraph.repr u)) let ppnamedcontextval e = let env = Global.env () in let sigma = Evd.from_env env in pp (pr_named_context env sigma (named_context_of_val e)) let ppaucontext auctx = let nas = AbstractContext.names auctx in let prlev l = match Level.var_index l with | Some n -> (match nas.(n) with | Anonymous -> prlev l | Name id -> Id.print id) | None -> prlev l in pp (pr_universe_context prlev (AbstractContext.repr auctx)) let ppenv e = pp (str "[" ++ pr_named_context_of e Evd.empty ++ str "]" ++ spc() ++ str "[" ++ pr_rel_context e Evd.empty (rel_context e) ++ str "]") let ppglobenv e = ppenv (GlobEnv.env e) let ppenvwithcst e = pp (str "[" ++ pr_named_context_of e Evd.empty ++ str "]" ++ spc() ++ str "[" ++ pr_rel_context e Evd.empty (rel_context e) ++ str "]" ++ spc() ++ str "{" ++ Environ.fold_constants (fun a _ s -> Constant.print a ++ spc () ++ s) e (mt ()) ++ str "}") let pptac = (fun x -> pp(Ltac_plugin.Pptactic.pr_glob_tactic (Global.env()) x)) let ppobj obj = let Libobject.Dyn.Dyn (tag, _) = obj in Format.print_string (Libobject.Dyn.repr tag) let cnt = ref 0 let cast_kind_display k = match k with | VMcast -> "VMcast" | DEFAULTcast -> "DEFAULTcast" | NATIVEcast -> "NATIVEcast" let constr_display csr = let rec term_display c = match kind c with | Rel n -> "Rel("^(string_of_int n)^")" | Meta n -> "Meta("^(string_of_int n)^")" | Var id -> "Var("^(Id.to_string id)^")" | Sort s -> "Sort("^(sort_display s)^")" | Cast (c,k, t) -> "Cast("^(term_display c)^","^(cast_kind_display k)^","^(term_display t)^")" | Prod (na,t,c) -> "Prod("^(name_display na)^","^(term_display t)^","^(term_display c)^")\n" | Lambda (na,t,c) -> "Lambda("^(name_display na)^","^(term_display t)^","^(term_display c)^")\n" | LetIn (na,b,t,c) -> "LetIn("^(name_display na)^","^(term_display b)^"," ^(term_display t)^","^(term_display c)^")" | App (c,l) -> "App("^(term_display c)^","^(array_display l)^")\n" | Evar (e,l) -> "Evar("^(Pp.string_of_ppcmds (Evar.print e))^","^(array_display (Array.of_list l))^")" | Const (c,u) -> "Const("^(Constant.to_string c)^","^(universes_display u)^")" | Ind ((sp,i),u) -> "MutInd("^(MutInd.to_string sp)^","^(string_of_int i)^","^(universes_display u)^")" | Construct (((sp,i),j),u) -> "MutConstruct(("^(MutInd.to_string sp)^","^(string_of_int i)^")," ^","^(universes_display u)^(string_of_int j)^")" | Proj (p, c) -> "Proj("^(Constant.to_string (Projection.constant p))^","^term_display c ^")" | Case (ci,u,pms,(_,p),iv,c,bl) -> "MutCase(,"^(term_display p)^","^(term_display c)^"," ^(array_display (Array.map snd bl))^")" | Fix ((t,i),(lna,tl,bl)) -> "Fix(([|"^(Array.fold_right (fun x i -> (string_of_int x)^(if not(i="") then (";"^i) else "")) t "")^"|],"^(string_of_int i)^")," ^(array_display tl)^",[|" ^(Array.fold_right (fun x i -> (name_display x)^(if not(i="") then (";"^i) else "")) lna "")^"|]," ^(array_display bl)^")" | CoFix(i,(lna,tl,bl)) -> "CoFix("^(string_of_int i)^")," ^(array_display tl)^"," ^(Array.fold_right (fun x i -> (name_display x)^(if not(i="") then (";"^i) else "")) lna "")^"," ^(array_display bl)^")" | Int i -> "Int("^(Uint63.to_string i)^")" | Float f -> "Float("^(Float64.to_string f)^")" | Array (u,t,def,ty) -> "Array("^(array_display t)^","^(term_display def)^","^(term_display ty)^")@{" ^universes_display u^"\n" and array_display v = "[|"^ (Array.fold_right (fun x i -> (term_display x)^(if not(i="") then (";"^i) else "")) v "")^"|]" and univ_display u = incr cnt; pp (str "with " ++ int !cnt ++ str" " ++ pr_uni u ++ fnl ()) and level_display u = incr cnt; pp (str "with " ++ int !cnt ++ str" " ++ Level.pr u ++ fnl ()) and sort_display = function | SProp -> "SProp" | Set -> "Set" | Prop -> "Prop" | Type u -> univ_display u; "Type("^(string_of_int !cnt)^")" and universes_display l = Array.fold_right (fun x i -> level_display x; (string_of_int !cnt)^(if not(i="") then (" "^i) else "")) (Instance.to_array l) "" and name_display x = match x.binder_name with | Name id -> "Name("^(Id.to_string id)^")" | Anonymous -> "Anonymous" in pp (str (term_display csr) ++fnl ()) let econstr_display c = constr_display EConstr.Unsafe.(to_constr c) ;; open Format;; let print_pure_constr csr = let rec term_display c = match Constr.kind c with | Rel n -> print_string "#"; print_int n | Meta n -> print_string "Meta("; print_int n; print_string ")" | Var id -> print_string (Id.to_string id) | Sort s -> sort_display s | Cast (c,_, t) -> open_hovbox 1; print_string "("; (term_display c); print_cut(); print_string "::"; (term_display t); print_string ")"; close_box() | Prod ({binder_name=Name(id)},t,c) -> open_hovbox 1; print_string"("; print_string (Id.to_string id); print_string ":"; box_display t; print_string ")"; print_cut(); box_display c; close_box() | Prod ({binder_name=Anonymous},t,c) -> print_string"("; box_display t; print_cut(); print_string "->"; box_display c; print_string ")"; | Lambda (na,t,c) -> print_string "["; name_display na; print_string ":"; box_display t; print_string "]"; print_cut(); box_display c; | LetIn (na,b,t,c) -> print_string "["; name_display na; print_string "="; box_display b; print_cut(); print_string ":"; box_display t; print_string "]"; print_cut(); box_display c; | App (c,l) -> print_string "("; box_display c; Array.iter (fun x -> print_space (); box_display x) l; print_string ")" | Evar (e,l) -> print_string "Evar#"; print_int (Evar.repr e); print_string "{"; List.iter (fun x -> print_space (); box_display x) l; print_string"}" | Const (c,u) -> print_string "Cons("; sp_con_display c; print_string ","; universes_display u; print_string ")" | Proj (p,c') -> print_string "Proj("; sp_con_display (Projection.constant p); print_string ","; box_display c'; print_string ")" | Ind ((sp,i),u) -> print_string "Ind("; sp_display sp; print_string ","; print_int i; print_string ","; universes_display u; print_string ")" | Construct (((sp,i),j),u) -> print_string "Constr("; sp_display sp; print_string ","; print_int i; print_string ","; print_int j; print_string ","; universes_display u; print_string ")" | Case (ci,u,pms,p,iv,c,bl) -> let pr_ctx (nas, c) = Array.iter (fun na -> print_cut (); name_display na) nas; print_string " |- "; box_display c in open_vbox 0; print_cut(); print_string "Case"; print_space(); box_display c; print_space (); print_cut(); print_string "in"; print_cut(); print_string "Ind("; sp_display (fst ci.ci_ind); print_string ","; print_int (snd ci.ci_ind); print_string ")"; print_string "@{"; universes_display u; print_string "}"; Array.iter (fun x -> print_space (); box_display x) pms; print_cut(); print_string "return <"; pr_ctx p; print_string ">"; print_cut(); print_string "with"; open_vbox 0; Array.iter (fun x -> print_cut(); pr_ctx x) bl; close_box(); print_cut(); print_string "end"; close_box() | Fix ((t,i),(lna,tl,bl)) -> print_string "Fix("; print_int i; print_string ")"; print_cut(); open_vbox 0; let print_fix () = for k = 0 to (Array.length tl) - 1 do open_vbox 0; name_display lna.(k); print_string "/"; print_int t.(k); print_cut(); print_string ":"; box_display tl.(k) ; print_cut(); print_string ":="; box_display bl.(k); close_box (); print_cut() done in print_string"{"; print_fix(); print_string"}" | CoFix(i,(lna,tl,bl)) -> print_string "CoFix("; print_int i; print_string ")"; print_cut(); open_vbox 0; let print_fix () = for k = 0 to (Array.length tl) - 1 do open_vbox 1; name_display lna.(k); print_cut(); print_string ":"; box_display tl.(k) ; print_cut(); print_string ":="; box_display bl.(k); close_box (); print_cut(); done in print_string"{"; print_fix (); print_string"}" | Int i -> print_string ("Int("^(Uint63.to_string i)^")") | Float f -> print_string ("Float("^(Float64.to_string f)^")") | Array (u,t,def,ty) -> print_string "Array("; Array.iter (fun x -> box_display x; print_space()) t; print_string "|"; box_display def; print_string ":"; box_display ty; print_string ")@{"; universes_display u; print_string "}" and box_display c = open_hovbox 1; term_display c; close_box() and universes_display u = Array.iter (fun u -> print_space (); pp (Level.pr u)) (Instance.to_array u) and sort_display = function | SProp -> print_string "SProp" | Set -> print_string "Set" | Prop -> print_string "Prop" | Type u -> open_hbox(); print_string "Type("; pp (pr_uni u); print_string ")"; close_box() and name_display x = match x.binder_name with | Name id -> print_string (Id.to_string id) | Anonymous -> print_string "_" (* Remove the top names for library and Scratch to avoid long names *) and sp_display sp = (* let dir,l = decode_kn sp in let ls = match List.rev_map Id.to_string (DirPath.repr dir) with ("Top"::l)-> l | ("Coq"::_::l) -> l | l -> l in List.iter (fun x -> print_string x; print_string ".") ls;*) print_string (MutInd.debug_to_string sp) and sp_con_display sp = (* let dir,l = decode_kn sp in let ls = match List.rev_map Id.to_string (DirPath.repr dir) with ("Top"::l)-> l | ("Coq"::_::l) -> l | l -> l in List.iter (fun x -> print_string x; print_string ".") ls;*) print_string (Constant.debug_to_string sp) in try box_display csr; print_flush() with e -> print_string (Printexc.to_string e);print_flush (); raise e let print_pure_econstr c = print_pure_constr EConstr.Unsafe.(to_constr c) ;; let pploc x = let (l,r) = Loc.unloc x in print_string"(";print_int l;print_string",";print_int r;print_string")" let pp_argument_type t = pp (pr_argument_type t) let pp_generic_argument arg = pp(str"") let prgenarginfo arg = let Geninterp.Val.Dyn (tag, _) = arg in let tpe = Geninterp.Val.pr tag in (* FIXME *) (* try *) (* let data = Pptactic.pr_top_generic (Global.env ()) arg in *) (* str "" *) (* with _any -> *) str "" let ppgenarginfo arg = pp (prgenarginfo arg) let ppgenargargt arg = pp (str (Genarg.ArgT.repr arg)) let ppist ist = let pr id arg = prgenarginfo arg in pp (pridmap pr ist.Geninterp.lfun) (**********************************************************************) (* Vernac-level debugging commands *) let in_current_context f c = let (evmap,sign) = get_current_context () in f (fst (Constrintern.interp_constr sign evmap c))(*FIXME*) (* We expand the result of preprocessing to be independent of camlp5 VERNAC COMMAND EXTEND PrintPureConstr | [ "PrintPureConstr" constr(c) ] -> [ in_current_context print_pure_constr c ] END VERNAC COMMAND EXTEND PrintConstr [ "PrintConstr" constr(c) ] -> [ in_current_context constr_display c ] END *) let _ = let open Vernacextend in let ty_constr = Extend.TUentry (get_arg_tag Stdarg.wit_constr) in let cmd_sig = TyTerminal("PrintConstr", TyNonTerminal(ty_constr, TyNil)) in let cmd_fn c ?loc:_ ~atts () = vtdefault (fun () -> in_current_context econstr_display c) in let cmd_class _ = VtQuery in let cmd : ty_ml = TyML (false, cmd_sig, cmd_fn, Some cmd_class) in vernac_extend ~command:"PrintConstr" [cmd] let _ = let open Vernacextend in let ty_constr = Extend.TUentry (get_arg_tag Stdarg.wit_constr) in let cmd_sig = TyTerminal("PrintPureConstr", TyNonTerminal(ty_constr, TyNil)) in let cmd_fn c ?loc:_ ~atts () = vtdefault (fun () -> in_current_context print_pure_econstr c) in let cmd_class _ = VtQuery in let cmd : ty_ml = TyML (false, cmd_sig, cmd_fn, Some cmd_class) in vernac_extend ~command:"PrintPureConstr" [cmd] (* Setting printer of unbound global reference *) open Names open Libnames let encode_path ?loc prefix mpdir suffix id = let dir = match mpdir with | None -> [] | Some mp -> DirPath.repr (dirpath_of_string (ModPath.to_string mp)) in make_qualid ?loc (DirPath.make (List.rev (Id.of_string prefix::dir@suffix))) id let raw_string_of_ref ?loc _ = let open GlobRef in function | ConstRef cst -> let (mp,id) = Constant.repr2 cst in encode_path ?loc "CST" (Some mp) [] (Label.to_id id) | IndRef (kn,i) -> let (mp,id) = MutInd.repr2 kn in encode_path ?loc "IND" (Some mp) [Label.to_id id] (Id.of_string ("_"^string_of_int i)) | ConstructRef ((kn,i),j) -> let (mp,id) = MutInd.repr2 kn in encode_path ?loc "CSTR" (Some mp) [Label.to_id id;Id.of_string ("_"^string_of_int i)] (Id.of_string ("_"^string_of_int j)) | VarRef id -> encode_path ?loc "SECVAR" None [] id let short_string_of_ref ?loc _ = let open GlobRef in function | VarRef id -> qualid_of_ident ?loc id | ConstRef cst -> qualid_of_ident ?loc (Label.to_id (Constant.label cst)) | IndRef (kn,0) -> qualid_of_ident ?loc (Label.to_id (MutInd.label kn)) | IndRef (kn,i) -> encode_path ?loc "IND" None [Label.to_id (MutInd.label kn)] (Id.of_string ("_"^string_of_int i)) | ConstructRef ((kn,i),j) -> encode_path ?loc "CSTR" None [Label.to_id (MutInd.label kn);Id.of_string ("_"^string_of_int i)] (Id.of_string ("_"^string_of_int j)) (* Anticipate that printers can be used from ocamldebug and that pretty-printer should not make calls to the global env since ocamldebug runs in a different process and does not have the proper env at hand *) let _ = Flags.in_debugger := true let _ = Constrextern.set_extern_reference (if !rawdebug then raw_string_of_ref else short_string_of_ref) coq-8.15.0/dev/top_printers.mli000066400000000000000000000141561417001151100163760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit val pP : Pp.t -> unit (* with surrounding box *) val ppfuture : 'a Future.computation -> unit val ppid : Names.Id.t -> unit val pplab : Names.Label.t -> unit val ppmbid : Names.MBId.t -> unit val ppdir : Names.DirPath.t -> unit val ppmp : Names.ModPath.t -> unit val ppcon : Names.Constant.t -> unit val ppproj : Names.Projection.t -> unit val ppprojrepr : Names.Projection.Repr.t -> unit val ppkn : Names.KerName.t -> unit val ppmind : Names.MutInd.t -> unit val ppind : Names.inductive -> unit val ppsp : Libnames.full_path -> unit val ppqualid : Libnames.qualid -> unit val ppscheme : 'a Ind_tables.scheme_kind -> unit val prrecarg : Declarations.recarg -> Pp.t val ppwf_paths : Declarations.recarg Rtree.t -> unit val pr_evar : Evar.t -> Pp.t val ppevar : Evar.t -> unit (* Multiple printers for Constr.t *) val ppconstr : Constr.t -> unit (* by Termops printer *) val ppconstr_univ : Constr.t -> unit (* Extern as type *) val pptype : Constr.types -> unit val ppeconstr : EConstr.constr -> unit (* Termops printer *) val ppconstr_expr : Constrexpr.constr_expr -> unit val ppglob_constr : 'a Glob_term.glob_constr_g -> unit val pppattern : Pattern.constr_pattern -> unit val ppfconstr : CClosure.fconstr -> unit val ppfsubst : CClosure.fconstr Esubst.subs -> unit val ppnumtokunsigned : NumTok.Unsigned.t -> unit val ppnumtokunsignednat : NumTok.UnsignedNat.t -> unit val ppintset : Int.Set.t -> unit val ppidset : Names.Id.Set.t -> unit val pridmap : (Names.Id.Map.key -> 'a -> Pp.t) -> 'a Names.Id.Map.t -> Pp.t val ppidmap : (Names.Id.Map.key -> 'a -> Pp.t) -> 'a Names.Id.Map.t -> unit val pridmapgen : 'a Names.Id.Map.t -> Pp.t val ppidmapgen : 'a Names.Id.Map.t -> unit val printmapgen : 'a Int.Map.t -> Pp.t val ppintmapgen : 'a Int.Map.t -> unit val prididmap : Names.Id.t Names.Id.Map.t -> Pp.t val ppididmap : Names.Id.t Names.Id.Map.t -> unit val prconstrunderbindersidmap : (Names.Id.t list * EConstr.constr) Names.Id.Map.t -> Pp.t val ppconstrunderbindersidmap : (Names.Id.t list * EConstr.constr) Names.Id.Map.t -> unit val ppevarsubst : (Constr.t * Constr.t option * Names.Id.Map.key) list Names.Id.Map.t -> unit val ppunbound_ltac_var_map : 'a Genarg.generic_argument Names.Id.Map.t -> unit val pr_closure : Ltac_pretype.closure -> Pp.t val pr_closed_glob_constr_idmap : Ltac_pretype.closed_glob_constr Names.Id.Map.t -> Pp.t val pr_closed_glob_constr : Ltac_pretype.closed_glob_constr -> Pp.t val ppclosure : Ltac_pretype.closure -> unit val ppclosedglobconstr : Ltac_pretype.closed_glob_constr -> unit val ppclosedglobconstridmap : Ltac_pretype.closed_glob_constr Names.Id.Map.t -> unit val ppglobal : Names.GlobRef.t -> unit val ppconst : Names.KerName.t * (Constr.constr, 'a) Environ.punsafe_judgment -> unit val ppvar : Names.Id.t * Constr.constr -> unit val genppj : ('a -> Pp.t * Pp.t) -> 'a -> Pp.t val ppj : EConstr.unsafe_judgment -> unit val ppsubst : Mod_subst.substitution -> unit val ppdelta : Mod_subst.delta_resolver -> unit val pp_idpred : Names.Id.Pred.t -> unit val pp_cpred : Names.Cpred.t -> unit val pp_transparent_state : TransparentState.t -> unit val pp_estack_t : Reductionops.Stack.t -> unit val pp_state_t : Reductionops.state -> unit val ppmetas : Evd.Metaset.t -> unit val ppevm : Evd.evar_map -> unit val ppevmall : Evd.evar_map -> unit val pr_existentialset : Evar.Set.t -> Pp.t val ppexistentialset : Evar.Set.t -> unit val ppexistentialfilter : Evd.Filter.t -> unit val ppclenv : Clenv.clausenv -> unit val ppgoalgoal : Goal.goal -> unit val ppgoal : Goal.goal Evd.sigma -> unit (* also print evar map *) val ppgoalsigma : Goal.goal Evd.sigma -> unit val pphintdb : Hints.Hint_db.t -> unit val ppproofview : Proofview.proofview -> unit val ppopenconstr : Evd.open_constr -> unit val pproof : Proof.t -> unit (* Universes *) val ppuni : Univ.Universe.t -> unit val ppuni_level : Univ.Level.t -> unit (* raw *) val prlev : Univ.Level.t -> Pp.t (* with global names (does this work?) *) val ppuniverse_set : Univ.Level.Set.t -> unit val ppuniverse_instance : Univ.Instance.t -> unit val ppuniverse_context : Univ.UContext.t -> unit val ppaucontext : Univ.AbstractContext.t -> unit val ppuniverse_context_set : Univ.ContextSet.t -> unit val ppuniverse_subst : Univ.universe_subst -> unit val ppuniverse_opt_subst : UState.universe_opt_subst -> unit val ppuniverse_level_subst : Univ.universe_level_subst -> unit val ppevar_universe_context : UState.t -> unit val ppconstraints : Univ.Constraints.t -> unit val ppuniverseconstraints : UnivProblem.Set.t -> unit val ppuniverse_context_future : Univ.UContext.t Future.computation -> unit val ppuniverses : UGraph.t -> unit val ppnamedcontextval : Environ.named_context_val -> unit val ppenv : Environ.env -> unit val ppglobenv : GlobEnv.t -> unit val ppenvwithcst : Environ.env -> unit val pptac : Ltac_plugin.Tacexpr.glob_tactic_expr -> unit val ppobj : Libobject.obj -> unit (* Some super raw printers *) val cast_kind_display : Constr.cast_kind -> string val constr_display : Constr.constr -> unit val econstr_display : EConstr.constr -> unit val print_pure_constr : Constr.types -> unit val print_pure_econstr : EConstr.types -> unit val pploc : Loc.t -> unit val pp_argument_type : Genarg.argument_type -> unit val pp_generic_argument : 'a Genarg.generic_argument -> unit val prgenarginfo : Geninterp.Val.t -> Pp.t val ppgenarginfo : Geninterp.Val.t -> unit val ppgenargargt : ('a, 'b, 'c) Genarg.ArgT.tag -> unit val ppist : Geninterp.interp_sign -> unit coq-8.15.0/dev/vm_printers.ml000066400000000000000000000060001417001151100160320ustar00rootroot00000000000000open Format open Term open Names open Vmemitcodes open Vmvalues let ppripos (ri,pos) = (match ri with | Reloc_annot a -> print_string "switch\n" | Reloc_const _ -> print_string "structured constant\n" | Reloc_getglobal kn -> print_string ("getglob "^(Constant.to_string kn)^"\n") | Reloc_proj_name p -> print_string ("proj "^(Projection.Repr.to_string p)^"\n") | Reloc_caml_prim op -> print_string ("caml primitive "^CPrimitives.to_string op) ); print_flush () let print_vfix () = print_string "vfix" let print_vfix_app () = print_string "vfix_app" let print_vswith () = print_string "switch" let ppsort = function | SProp -> print_string "SProp" | Set -> print_string "Set" | Prop -> print_string "Prop" | Type u -> print_string "Type" let print_idkey idk = match idk with | ConstKey sp -> print_string "Cons("; print_string (Constant.to_string sp); print_string ")" | VarKey id -> print_string (Id.to_string id) | RelKey i -> print_string "~";print_int i | EvarKey evk -> print_string "Evar("; print_int (Evar.repr evk); print_string ")" let rec ppzipper z = match z with | Zapp args -> let n = nargs args in open_hbox (); for i = 0 to n-2 do ppvalues (arg args i);print_string ";";print_space() done; if n-1 >= 0 then ppvalues (arg args (n-1)); close_box() | Zfix _ -> print_string "Zfix" | Zswitch _ -> print_string "Zswitch" | Zproj _ -> print_string "Zproj" and ppstack s = open_hovbox 0; print_string "["; List.iter (fun z -> ppzipper z;print_string " | ") s; print_string "]"; close_box() and ppatom a = match a with | Aid idk -> print_idkey idk | Asort u -> print_string "Sort(...)" | Aind(sp,i) -> print_string "Ind("; print_string (MutInd.to_string sp); print_string ","; print_int i; print_string ")" and ppwhd whd = match whd with | Vprod _ -> print_string "product" | Vfun _ -> print_string "function" | Vfix _ -> print_vfix() | Vcofix _ -> print_string "cofix" | Vconstr_const i -> print_string "C(";print_int i;print_string")" | Vconstr_block b -> ppvblock b | Vint64 i -> printf "int64(%LiL)" i | Vfloat64 f -> printf "float64(%.17g)" f | Varray t -> ppvarray t | Vatom_stk(a,s) -> open_hbox();ppatom a;close_box(); print_string"@";ppstack s | Vuniv_level lvl -> Feedback.msg_notice (Univ.Level.pr lvl) and ppvblock b = open_hbox(); print_string "Cb(";print_int (btag b); let n = bsize b in for i = 0 to n -1 do print_string ",";ppvalues (bfield b i) done; print_string")"; close_box() and ppvarray t = let length = Parray.length_int t in open_hbox(); print_string "[|"; for i = 0 to length - 2 do ppvalues (Parray.get t (Uint63.of_int i)); print_string "; " done; ppvalues (Parray.get t (Uint63.of_int (length - 1))); print_string " | "; ppvalues (Parray.default t); print_string " |]"; close_box() and ppvalues v = open_hovbox 0;ppwhd (whd_val v);close_box(); print_flush() coq-8.15.0/doc/000077500000000000000000000000001417001151100131235ustar00rootroot00000000000000coq-8.15.0/doc/LICENSE000066400000000000000000000753021417001151100141370ustar00rootroot00000000000000The Coq Reference Manual is a collective work from the Coq Development Team whose members are listed in the file CREDITS of the Coq source package. All related documents (the LaTeX and BibTeX sources, the embedded png files, and the PostScript, PDF and html outputs) are copyright (c) 1999-2019, Inria, CNRS and contributors, with the exception of the Ubuntu font file UbuntuMono-B.ttf, which is Copyright 2010,2011 Canonical Ltd and licensed under the Ubuntu font license, version 1.0 (https://www.ubuntu.com/legal/terms-and-policies/font-licence), its derivative CoqNotations.ttf distributed under the same license, and the _templates/versions.html file derived from sphinx_rtd_theme, which is Copyright 2013-2018 Dave Snider, Read the Docs, Inc. & contributors and distributed under the MIT License included in that file. The material connected to the Reference Manual may be distributed only subject to the terms and conditions set forth in the Open Publication License, v1.0 or later (the latest version is presently available at http://www.opencontent.org/openpub/). Options A and B are *not* elected. The Coq Standard Library is a collective work from the Coq Development Team whose members are listed in the file CREDITS of the Coq source package. All related documents (the Coq vernacular source files and the PostScript, PDF and html outputs) are copyright (c) 1999-2019, Inria, CNRS and contributors. The material connected to the Standard Library is distributed under the terms of the Lesser General Public License version 2.1 or later. ---------------------------------------------------------------------- *Open Publication License* v1.0, 8 June 1999 *I. REQUIREMENTS ON BOTH UNMODIFIED AND MODIFIED VERSIONS* The Open Publication works may be reproduced and distributed in whole or in part, in any medium physical or electronic, provided that the terms of this license are adhered to, and that this license or an incorporation of it by reference (with any options elected by the author(s) and/or publisher) is displayed in the reproduction. Proper form for an incorporation by reference is as follows: Copyright (c) by . This material may be distributed only subject to the terms and conditions set forth in the Open Publication License, vX.Y or later (the latest version is presently available at http://www.opencontent.org/openpub/). The reference must be immediately followed with any options elected by the author(s) and/or publisher of the document (see section VI). Commercial redistribution of Open Publication-licensed material is permitted. Any publication in standard (paper) book form shall require the citation of the original publisher and author. The publisher and author's names shall appear on all outer surfaces of the book. On all outer surfaces of the book the original publisher's name shall be as large as the title of the work and cited as possessive with respect to the title. *II. COPYRIGHT* The copyright to each Open Publication is owned by its author(s) or designee. *III. SCOPE OF LICENSE* The following license terms apply to all Open Publication works, unless otherwise explicitly stated in the document. Mere aggregation of Open Publication works or a portion of an Open Publication work with other works or programs on the same media shall not cause this license to apply to those other works. The aggregate work shall contain a notice specifying the inclusion of the Open Publication material and appropriate copyright notice. SEVERABILITY. If any part of this license is found to be unenforceable in any jurisdiction, the remaining portions of the license remain in force. NO WARRANTY. Open Publication works are licensed and provided "as is" without warranty of any kind, express or implied, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose or a warranty of non-infringement. *IV. REQUIREMENTS ON MODIFIED WORKS* All modified versions of documents covered by this license, including translations, anthologies, compilations and partial documents, must meet the following requirements: 1. The modified version must be labeled as such. 2. The person making the modifications must be identified and the modifications dated. 3. Acknowledgement of the original author and publisher if applicable must be retained according to normal academic citation practices. 4. The location of the original unmodified document must be identified. 5. The original author's (or authors') name(s) may not be used to assert or imply endorsement of the resulting document without the original author's (or authors') permission. *V. GOOD-PRACTICE RECOMMENDATIONS * In addition to the requirements of this license, it is requested from and strongly recommended of redistributors that: 1. If you are distributing Open Publication works on hardcopy or CD-ROM, you provide email notification to the authors of your intent to redistribute at least thirty days before your manuscript or media freeze, to give the authors time to provide updated documents. This notification should describe modifications, if any, made to the document. 2. All substantive modifications (including deletions) be either clearly marked up in the document or else described in an attachment to the document. 3. Finally, while it is not mandatory under this license, it is considered good form to offer a free copy of any hardcopy and CD-ROM expression of an Open Publication-licensed work to its author(s). *VI. LICENSE OPTIONS* The author(s) and/or publisher of an Open Publication-licensed document may elect certain options by appending language to the reference to or copy of the license. These options are considered part of the license instance and must be included with the license (or its incorporation by reference) in derived works. A. To prohibit distribution of substantively modified versions without the explicit permission of the author(s). "Substantive modification" is defined as a change to the semantic content of the document, and excludes mere changes in format or typographical corrections. To accomplish this, add the phrase `Distribution of substantively modified versions of this document is prohibited without the explicit permission of the copyright holder.' to the license reference or copy. B. To prohibit any publication of this work or derivative works in whole or in part in standard (paper) book form for commercial purposes is prohibited unless prior permission is obtained from the copyright holder. To accomplish this, add the phrase 'Distribution of the work or derivative of the work in any standard (paper) book form is prohibited unless prior permission is obtained from the copyright holder.' to the license reference or copy. ---------------------------------------------------------------------- GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS coq-8.15.0/doc/README.md000066400000000000000000000105601417001151100144040ustar00rootroot00000000000000The Coq documentation ===================== The Coq documentation includes - A Reference Manual - A document presenting the Coq standard library The documentation of the latest released version is available on the Coq web site at [coq.inria.fr/documentation](http://coq.inria.fr/documentation). Additionally, you can view the reference manual for the development version at , and the documentation of the standard library for the development version at . The reference manual is written is reStructuredText and compiled using Sphinx. See [`sphinx/README.rst`](sphinx/README.rst) to learn more about the format that is used. The documentation for the standard library is generated from the `.v` source files using coqdoc. Dependencies ------------ ### HTML documentation To produce the complete documentation in HTML, you will need Coq dependencies listed in [`INSTALL.md`](../INSTALL.md). Additionally, the Sphinx-based reference manual requires Python 3, and the following Python packages: - sphinx >= 3.0.2 - sphinx_rtd_theme >= 0.4.3 - beautifulsoup4 >= 4.0.6 - antlr4-python3-runtime >= 4.7.1 - pexpect >= 4.2.1 - sphinxcontrib-bibtex >= 0.4.2 To install them, you should first install pip and setuptools (for instance, with `apt install python3-pip python3-setuptools` on Debian / Ubuntu) then run: pip3 install sphinx sphinx_rtd_theme beautifulsoup4 \ antlr4-python3-runtime==4.7.1 pexpect sphinxcontrib-bibtex Nix users should get the correct development environment to build the HTML documentation from Coq's [`default.nix`](../default.nix) (note this doesn't include the LaTeX packages needed to build the full documentation). ### Other formats To produce the documentation in PDF and PostScript formats, the following additional tools are required: - latex (latex2e) - pdflatex - dvips - makeindex - xelatex - latexmk All of them are part of the TexLive distribution. E.g. on Debian / Ubuntu, install them with: apt install texlive-full Or if you want to use less disk space: apt install texlive-latex-extra texlive-fonts-recommended texlive-xetex \ latexmk fonts-freefont-otf ### Setting the locale for Python Make sure that the locale is configured on your platform so that Python encodes printed messages with utf-8 rather than generating runtime exceptions for non-ascii characters. The `.UTF-8` in `export LANG=C.UTF-8` sets UTF-8 encoding. The `C` can be replaced with any supported language code. You can set the default for a Docker build with `ENV LANG C.UTF-8`. (Python may look at other environment variables to determine the locale; see the [Python documentation](https://docs.python.org/3/library/locale.html#locale.getdefaultlocale)). Compilation ----------- To produce all documentation about Coq in all formats, just run: ./configure # (if you hadn't already) make doc Alternatively, you can use some specific targets: - `make doc-ps` to produce all PostScript documents - `make doc-pdf` to produce all PDF documents - `make doc-html` to produce all HTML documents - `make refman` to produce the HTML and PDF versions of the reference manual - `make refman-{html,pdf}` to produce only one format of the reference manual - `make doc-stdlib` to produce all formats of the Coq standard library Also note the `-with-doc yes` option of `./configure` to enable the build of the documentation as part of the default make target. To build the Sphinx documentation without stopping at the first warning with the legacy Makefile, set `SPHINXWARNERROR` to 0 as such: ``` SPHINXWARNERROR=0 make refman-html ``` To do the same with the Dune build system, change the value of the `SPHINXWARNOPT` variable (default is `-W`). The following will build the Sphinx documentation without stopping at the first warning, and store all the warnings in the file `/tmp/warn.log`: ``` SPHINXWARNOPT="-w/tmp/warn.log" dune build @refman-html ``` Installation ------------ To install all produced documents, do: make install-doc This will install the documentation in `/usr/share/doc/coq` unless you specify another value through the `-docdir` option of `./configure` or the `DOCDIR` environment variable. Note that `DOCDIR` controls the root of the documentation, that is to say, in the example above, the root is `/usr/share/doc`. coq-8.15.0/doc/changelog/000077500000000000000000000000001417001151100150525ustar00rootroot00000000000000coq-8.15.0/doc/changelog/00-title.rst000066400000000000000000000000461417001151100171420ustar00rootroot00000000000000Unreleased changes ------------------ coq-8.15.0/doc/changelog/01-kernel/000077500000000000000000000000001417001151100165505ustar00rootroot00000000000000coq-8.15.0/doc/changelog/01-kernel/00000-title.rst000066400000000000000000000000201417001151100210500ustar00rootroot00000000000000 Kernel ^^^^^^ coq-8.15.0/doc/changelog/02-specification-language/000077500000000000000000000000001417001151100216725ustar00rootroot00000000000000coq-8.15.0/doc/changelog/02-specification-language/00000-title.rst000066400000000000000000000001201417001151100241730ustar00rootroot00000000000000 Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/03-notations/000077500000000000000000000000001417001151100173105ustar00rootroot00000000000000coq-8.15.0/doc/changelog/03-notations/00000-title.rst000066400000000000000000000000261417001151100216160ustar00rootroot00000000000000 Notations ^^^^^^^^^ coq-8.15.0/doc/changelog/04-tactics/000077500000000000000000000000001417001151100167255ustar00rootroot00000000000000coq-8.15.0/doc/changelog/04-tactics/00000-title.rst000066400000000000000000000000221417001151100212270ustar00rootroot00000000000000 Tactics ^^^^^^^ coq-8.15.0/doc/changelog/05-tactic-language/000077500000000000000000000000001417001151100203245ustar00rootroot00000000000000coq-8.15.0/doc/changelog/05-tactic-language/00000-title.rst000066400000000000000000000000421417001151100226300ustar00rootroot00000000000000 Tactic language ^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/06-ssreflect/000077500000000000000000000000001417001151100172675ustar00rootroot00000000000000coq-8.15.0/doc/changelog/06-ssreflect/00000-title.rst000066400000000000000000000000261417001151100215750ustar00rootroot00000000000000 SSReflect ^^^^^^^^^ coq-8.15.0/doc/changelog/07-vernac-commands-and-options/000077500000000000000000000000001417001151100226045ustar00rootroot00000000000000coq-8.15.0/doc/changelog/07-vernac-commands-and-options/00000-title.rst000066400000000000000000000000541417001151100251130ustar00rootroot00000000000000 Commands and options ^^^^^^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/08-cli-tools/000077500000000000000000000000001417001151100172045ustar00rootroot00000000000000coq-8.15.0/doc/changelog/08-cli-tools/00000-title.rst000066400000000000000000000000501417001151100215070ustar00rootroot00000000000000 Command-line tools ^^^^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/09-coqide/000077500000000000000000000000001417001151100165445ustar00rootroot00000000000000coq-8.15.0/doc/changelog/09-coqide/00000-title.rst000066400000000000000000000000201417001151100210440ustar00rootroot00000000000000 CoqIDE ^^^^^^ coq-8.15.0/doc/changelog/10-standard-library/000077500000000000000000000000001417001151100205325ustar00rootroot00000000000000coq-8.15.0/doc/changelog/10-standard-library/00000-title.rst000066400000000000000000000000441417001151100230400ustar00rootroot00000000000000 Standard library ^^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/11-infrastructure-and-dependencies/000077500000000000000000000000001417001151100235355ustar00rootroot00000000000000coq-8.15.0/doc/changelog/11-infrastructure-and-dependencies/00000-title.rst000066400000000000000000000001021417001151100260360ustar00rootroot00000000000000 Infrastructure and dependencies ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/12-misc/000077500000000000000000000000001417001151100162255ustar00rootroot00000000000000coq-8.15.0/doc/changelog/12-misc/00000-title.rst000066400000000000000000000000361417001151100205340ustar00rootroot00000000000000 Miscellaneous ^^^^^^^^^^^^^ coq-8.15.0/doc/changelog/README.md000066400000000000000000000037441417001151100163410ustar00rootroot00000000000000# Unreleased changelog # ## When to add an entry? ## All new features, user-visible changes to features, user-visible or otherwise important infrastructure changes, and important bug fixes should get a changelog entry. Compatibility-breaking changes should always get a changelog entry, which should explain what compatibility breakage is to expect. Pull requests changing the ML API in significant ways should add an entry in [`dev/doc/changes.md`](../../dev/doc/changes.md). ## How to add an entry? ## Run `./dev/tools/make-changelog.sh`: it will ask you for your PR number, and to choose among the predefined categories, and the predefined types of changes. Afterward, fill in the automatically generated entry with a short description of your change (which should describe any compatibility issues in particular). You may also add a reference to the relevant fixed issue, and credit reviewers, co-authors, and anyone who helped advance the PR. The format for changelog entries is the same as in the reference manual. In particular, you may reference the documentation you just added with `:ref:`, `:tacn:`, `:cmd:`, `:opt:`, `:token:`, etc. See the [documentation of the Sphinx format](../sphinx/README.rst) of the manual for details. Here is a summary of the structure of a changelog entry: ``` rst - **Added / Changed / Deprecated / Fixed / Removed:** Description of the changes, with possible link to :ref:`relevant-section` of the updated documentation (`#PRNUM `_, [fixes `#ISSUE1 `_ [ and `#ISSUE2 `_],] by Full Name[, with help / review of Full Name]). ``` The first line indicates the type of change. Available types come from the [Keep a Changelog 1.0.0](https://keepachangelog.com/en/1.0.0/) specification. We exclude the "Security" type for now because of the absence of a process for handling critical bugs (proof of False) as security vulnerabilities. coq-8.15.0/doc/common/000077500000000000000000000000001417001151100144135ustar00rootroot00000000000000coq-8.15.0/doc/common/macros.tex000066400000000000000000000476571417001151100164440ustar00rootroot00000000000000%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % MACROS FOR THE REFERENCE MANUAL OF COQ % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % For commentaries (define \com as {} for the release manual) %\newcommand{\com}[1]{{\it(* #1 *)}} %\newcommand{\com}[1]{} %%OPTIONS for HACHA %\renewcommand{\cuttingunit}{section} %BEGIN LATEX \newenvironment{centerframe}% {\bgroup \dimen0=\textwidth \advance\dimen0 by -2\fboxrule \advance\dimen0 by -2\fboxsep \setbox0=\hbox\bgroup \begin{minipage}{\dimen0}% \begin{center}}% {\end{center}% \end{minipage}\egroup \centerline{\fbox{\box0}}\egroup } %END LATEX %HEVEA \newenvironment{centerframe}{\begin{center}}{\end{center}} %HEVEA \renewcommand{\vec}[1]{\mathbf{#1}} %\renewcommand{\ominus}{-} % Hevea does a good job translating these commands %\renewcommand{\oplus}{+} %\renewcommand{\otimes}{\times} %\newcommand{\land}{\wedge} %\newcommand{\lor}{\vee} %HEVEA \renewcommand{\k}[1]{#1} % \k{a} is supposed to produce a with a little stroke %HEVEA \newcommand{\phantom}[1]{\qquad} %%%%%%%%%%%%%%%%%%%%%%% % Formatting commands % %%%%%%%%%%%%%%%%%%%%%%% \newcommand{\ErrMsg}{\medskip \noindent {\bf Error message: }} \newcommand{\ErrMsgx}{\medskip \noindent {\bf Error messages: }} \newcommand{\variant}{\medskip \noindent {\bf Variant: }} \newcommand{\variants}{\medskip \noindent {\bf Variants: }} \newcommand{\SeeAlso}{\medskip \noindent {\bf See also: }} \newcommand{\Rem}{\medskip \noindent {\bf Remark: }} \newcommand{\Rems}{\medskip \noindent {\bf Remarks: }} \newcommand{\Example}{\medskip \noindent {\bf Example: }} \newcommand{\examples}{\medskip \noindent {\bf Examples: }} \newcommand{\Warning}{\medskip \noindent {\bf Warning: }} \newcommand{\Warns}{\medskip \noindent {\bf Warnings: }} \newcounter{ex} \newcommand{\firstexample}{\setcounter{ex}{1}} \newcommand{\example}[1]{ \medskip \noindent \textbf{Example \arabic{ex}: }\textit{#1} \addtocounter{ex}{1}} \newenvironment{Variant}{\variant\begin{enumerate}}{\end{enumerate}} \newenvironment{Variants}{\variants\begin{enumerate}}{\end{enumerate}} \newenvironment{ErrMsgs}{\ErrMsgx\begin{enumerate}}{\end{enumerate}} \newenvironment{Remarks}{\Rems\begin{enumerate}}{\end{enumerate}} \newenvironment{Warnings}{\Warns\begin{enumerate}}{\end{enumerate}} \newenvironment{Examples}{\medskip\noindent{\bf Examples:} \begin{enumerate}}{\end{enumerate}} %\newcommand{\bd}{\noindent\bf} %\newcommand{\sbd}{\vspace{8pt}\noindent\bf} %\newcommand{\sdoll}[1]{\begin{small}$ #1~ $\end{small}} %\newcommand{\sdollnb}[1]{\begin{small}$ #1 $\end{small}} \newcommand{\kw}[1]{\textsf{#1}} %\newcommand{\spec}[1]{\{\,#1\,\}} % Building regular expressions \newcommand{\zeroone}[1]{\mbox{\sl [}{#1}\mbox{\sl ]}} \newcommand{\zeroonelax}[1]{\mbox{\sl [}#1\mbox{\sl ]}} %\newcommand{\zeroonemany}[1]{$\{$#1$\}$*} %\newcommand{\onemany}[1]{$\{$#1$\}$+} \newcommand{\nelistnosep}[1]{{#1} \mbox{\dots} {#1}} \newcommand{\nelist}[2]{{#1} {\tt #2} \mbox{\dots} {\tt #2} {#1}} \newcommand{\sequence}[2]{{\sl [}{#1} {\tt #2} \mbox{\dots} {\tt #2} {#1}{\sl ]}} \newcommand{\nelistwithoutblank}[2]{#1{\tt #2}\mbox{\dots}{\tt #2}#1} \newcommand{\sequencewithoutblank}[2]{$[$#1{\tt #2}\mbox{\dots}{\tt #2}#1$]$} % Used for RefMan-gal %\newcommand{\ml}[1]{\hbox{\tt{#1}}} %\newcommand{\op}{\,|\,} %%%%%%%%%%%%%%%%%%%%%%%% % Trademarks and so on % %%%%%%%%%%%%%%%%%%%%%%%% \newcommand{\Coq}{\textsc{Coq}} \newcommand{\gallina}{\textsc{Gallina}} \newcommand{\Gallina}{\textsc{Gallina}} \newcommand{\CoqIDE}{\textsc{CoqIDE}} \newcommand{\ocaml}{\textsc{OCaml}} \newcommand{\camlpppp}{\textsc{Camlp5}} \newcommand{\emacs}{\textsc{GNU Emacs}} \newcommand{\ProofGeneral}{\textsc{Proof General}} \newcommand{\CIC}{\textsc{Cic}} \newcommand{\iCIC}{\textsc{Cic}} \newcommand{\FW}{\ensuremath{F_{\omega}}} \newcommand{\Program}{\textsc{Program}} \newcommand{\Russell}{\textsc{Russell}} \newcommand{\PVS}{\textsc{PVS}} %\newcommand{\bn}{{\sf BNF}} %%%%%%%%%%%%%%%%%%% % Name of tactics % %%%%%%%%%%%%%%%%%%% %\newcommand{\Natural}{\mbox{\tt Natural}} %%%%%%%%%%%%%%%%% % \rm\sl series % %%%%%%%%%%%%%%%%% \newcommand{\nterm}[1]{\textrm{\textsl{#1}}} \newcommand{\qstring}{\nterm{string}} %% New syntax specific entries \newcommand{\annotation}{\nterm{annotation}} \newcommand{\assums}{\nterm{assums}} % vernac \newcommand{\simpleassums}{\nterm{simple\_assums}} % assumptions \newcommand{\binder}{\nterm{binder}} \newcommand{\binders}{\nterm{binders}} \newcommand{\caseitems}{\nterm{match\_items}} \newcommand{\caseitem}{\nterm{match\_item}} \newcommand{\eqn}{\nterm{equation}} \newcommand{\ifitem}{\nterm{dep\_ret\_type}} \newcommand{\hyplocation}{\nterm{hyp\_location}} \newcommand{\convclause}{\nterm{conversion\_clause}} \newcommand{\occclause}{\nterm{occurrence\_clause}} \newcommand{\occgoalset}{\nterm{goal\_occurrences}} \newcommand{\atoccurrences}{\nterm{at\_occurrences}} \newcommand{\occlist}{\nterm{occurrences}} \newcommand{\params}{\nterm{params}} % vernac \newcommand{\returntype}{\nterm{return\_type}} \newcommand{\idparams}{\nterm{ident\_with\_params}} \newcommand{\statkwd}{\nterm{assertion\_keyword}} % vernac \newcommand{\termarg}{\nterm{arg}} \newcommand{\hintdef}{\nterm{hint\_definition}} \newcommand{\typecstr}{\zeroone{{\tt :}~{\term}}} \newcommand{\typecstrwithoutblank}{\zeroone{{\tt :}{\term}}} \newcommand{\typecstrtype}{\zeroone{{\tt :}~{\type}}} \newcommand{\Fwterm}{\nterm{Fwterm}} \newcommand{\Index}{\nterm{index}} \newcommand{\abbrev}{\nterm{abbreviation}} \newcommand{\atomictac}{\nterm{atomic\_tactic}} \newcommand{\bindinglist}{\nterm{bindings\_list}} \newcommand{\cast}{\nterm{cast}} \newcommand{\cofixpointbodies}{\nterm{cofix\_bodies}} \newcommand{\cofixpointbody}{\nterm{cofix\_body}} \newcommand{\commandtac}{\nterm{tactic\_invocation}} \newcommand{\constructor}{\nterm{constructor}} \newcommand{\convtactic}{\nterm{conv\_tactic}} \newcommand{\assumptionkeyword}{\nterm{assumption\_keyword}} \newcommand{\assumption}{\nterm{assumption}} \newcommand{\definition}{\nterm{definition}} \newcommand{\digit}{\nterm{digit}} \newcommand{\exteqn}{\nterm{ext\_eqn}} \newcommand{\field}{\nterm{field}} \newcommand{\fielddef}{\nterm{field\_def}} \newcommand{\firstletter}{\nterm{first\_letter}} \newcommand{\fixpg}{\nterm{fix\_pgm}} \newcommand{\fixpointbodies}{\nterm{fix\_bodies}} \newcommand{\fixpointbody}{\nterm{fix\_body}} \newcommand{\fixpoint}{\nterm{fixpoint}} \newcommand{\flag}{\nterm{flag}} \newcommand{\form}{\nterm{form}} \newcommand{\entry}{\nterm{entry}} \newcommand{\proditem}{\nterm{prod\_item}} \newcommand{\taclevel}{\nterm{tactic\_level}} \newcommand{\tacargtype}{\nterm{tactic\_argument\_type}} \newcommand{\scope}{\nterm{scope}} \newcommand{\delimkey}{\nterm{key}} \newcommand{\optscope}{\nterm{opt\_scope}} \newcommand{\declnotation}{\nterm{decl\_notation}} \newcommand{\symbolentry}{\nterm{symbol}} \newcommand{\modifiers}{\nterm{modifiers}} \newcommand{\binderinterp}{\nterm{binder\_interp}} \newcommand{\localdef}{\nterm{local\_def}} \newcommand{\localdecls}{\nterm{local\_decls}} \newcommand{\ident}{\nterm{ident}} \newcommand{\accessident}{\nterm{access\_ident}} \newcommand{\possiblybracketedident}{\nterm{possibly\_bracketed\_ident}} \newcommand{\inductivebody}{\nterm{ind\_body}} \newcommand{\inductive}{\nterm{inductive}} \newcommand{\naturalnumber}{\nterm{natural}} \newcommand{\integer}{\nterm{integer}} \newcommand{\multpattern}{\nterm{mult\_pattern}} \newcommand{\mutualcoinductive}{\nterm{mutual\_coinductive}} \newcommand{\mutualinductive}{\nterm{mutual\_inductive}} \newcommand{\nestedpattern}{\nterm{nested\_pattern}} \newcommand{\name}{\nterm{name}} \newcommand{\num}{\nterm{num}} \newcommand{\pattern}{\nterm{pattern}} % pattern for pattern-matching \newcommand{\orpattern}{\nterm{or\_pattern}} \newcommand{\intropattern}{\nterm{intro\_pattern}} \newcommand{\intropatternlist}{\nterm{intro\_pattern\_list}} \newcommand{\disjconjintropattern}{\nterm{disj\_conj\_intro\_pattern}} \newcommand{\namingintropattern}{\nterm{naming\_intro\_pattern}} \newcommand{\termpattern}{\nterm{term\_pattern}} % term with holes \newcommand{\pat}{\nterm{pat}} \newcommand{\pgs}{\nterm{pgms}} \newcommand{\pg}{\nterm{pgm}} \newcommand{\abullet}{\nterm{bullet}} %BEGIN LATEX \newcommand{\proof}{\nterm{proof}} %END LATEX %HEVEA \renewcommand{\proof}{\nterm{proof}} \newcommand{\record}{\nterm{record}} \newcommand{\recordkw}{\nterm{record\_keyword}} \newcommand{\rewrule}{\nterm{rewriting\_rule}} \newcommand{\sentence}{\nterm{sentence}} \newcommand{\simplepattern}{\nterm{simple\_pattern}} \newcommand{\sort}{\nterm{sort}} \newcommand{\specif}{\nterm{specif}} \newcommand{\assertion}{\nterm{assertion}} \newcommand{\str}{\nterm{string}} \newcommand{\subsequentletter}{\nterm{subsequent\_letter}} \newcommand{\switch}{\nterm{switch}} \newcommand{\messagetoken}{\nterm{message\_token}} \newcommand{\tac}{\nterm{tactic}} \newcommand{\terms}{\nterm{terms}} \newcommand{\term}{\nterm{term}} \newcommand{\module}{\nterm{module}} \newcommand{\modexpr}{\nterm{module\_expression}} \newcommand{\modtype}{\nterm{module\_type}} \newcommand{\onemodbinding}{\nterm{module\_binding}} \newcommand{\modbindings}{\nterm{module\_bindings}} \newcommand{\qualid}{\nterm{qualid}} \newcommand{\qualidorstring}{\nterm{qualid\_or\_string}} \newcommand{\class}{\nterm{class}} \newcommand{\dirpath}{\nterm{dirpath}} \newcommand{\typedidents}{\nterm{typed\_idents}} \newcommand{\type}{\nterm{type}} \newcommand{\vref}{\nterm{ref}} \newcommand{\zarithformula}{\nterm{zarith\_formula}} \newcommand{\zarith}{\nterm{zarith}} \newcommand{\ltac}{\mbox{${\mathcal{L}}_{tac}$}} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % \mbox{\sf } series for roman text in maths formulas % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \newcommand{\alors}{\mbox{\textsf{then}}} \newcommand{\alter}{\mbox{\textsf{alter}}} \newcommand{\bool}{\mbox{\textsf{bool}}} \newcommand{\conc}{\mbox{\textsf{conc}}} \newcommand{\cons}{\mbox{\textsf{cons}}} \newcommand{\consf}{\mbox{\textsf{consf}}} \newcommand{\emptyf}{\mbox{\textsf{emptyf}}} \newcommand{\EqSt}{\mbox{\textsf{EqSt}}} \newcommand{\false}{\mbox{\textsf{false}}} \newcommand{\filter}{\mbox{\textsf{filter}}} \newcommand{\forest}{\mbox{\textsf{forest}}} \newcommand{\from}{\mbox{\textsf{from}}} \newcommand{\hd}{\mbox{\textsf{hd}}} \newcommand{\haslength}{\mbox{\textsf{has\_length}}} \newcommand{\length}{\mbox{\textsf{length}}} \newcommand{\haslengthA}{\mbox {\textsf{has\_length~A}}} \newcommand{\List}{\mbox{\textsf{list}}} \newcommand{\ListA}{\mbox{\textsf{list}}~\ensuremath{A}} \newcommand{\nilhl}{\mbox{\textsf{nil\_hl}}} \newcommand{\conshl}{\mbox{\textsf{cons\_hl}}} \newcommand{\nat}{\mbox{\textsf{nat}}} \newcommand{\nO}{\mbox{\textsf{O}}} \newcommand{\nS}{\mbox{\textsf{S}}} \newcommand{\node}{\mbox{\textsf{node}}} \newcommand{\Nil}{\mbox{\textsf{nil}}} \newcommand{\SProp}{\mbox{\textsf{SProp}}} \newcommand{\Prop}{\mbox{\textsf{Prop}}} \newcommand{\Set}{\mbox{\textsf{Set}}} \newcommand{\si}{\mbox{\textsf{if}}} \newcommand{\sinon}{\mbox{\textsf{else}}} \newcommand{\Str}{\mbox{\textsf{Stream}}} \newcommand{\tl}{\mbox{\textsf{tl}}} \newcommand{\tree}{\mbox{\textsf{tree}}} \newcommand{\true}{\mbox{\textsf{true}}} \newcommand{\Type}{\mbox{\textsf{Type}}} \newcommand{\unfold}{\mbox{\textsf{unfold}}} \newcommand{\zeros}{\mbox{\textsf{zeros}}} \newcommand{\even}{\mbox{\textsf{even}}} \newcommand{\odd}{\mbox{\textsf{odd}}} \newcommand{\evenO}{\mbox{\textsf{even\_O}}} \newcommand{\evenS}{\mbox{\textsf{even\_S}}} \newcommand{\oddS}{\mbox{\textsf{odd\_S}}} \newcommand{\Prod}{\mbox{\textsf{prod}}} \newcommand{\Pair}{\mbox{\textsf{pair}}} %%%%%%%%% % Misc. % %%%%%%%%% \newcommand{\T}{\texttt{T}} \newcommand{\U}{\texttt{U}} \newcommand{\real}{\textsf{Real}} \newcommand{\Data}{\textit{Data}} \newcommand{\In} {{\textbf{in }}} \newcommand{\AND} {{\textbf{and}}} \newcommand{\If}{{\textbf{if }}} \newcommand{\Else}{{\textbf{else }}} \newcommand{\Then} {{\textbf{then }}} %\newcommand{\Let}{{\textbf{let }}} % looks like this is never used \newcommand{\Where}{{\textbf{where rec }}} \newcommand{\Function}{{\textbf{function }}} \newcommand{\Rec}{{\textbf{rec }}} %\newcommand{\cn}{\centering} \newcommand{\nth}{\mbox{$^{\mbox{\scriptsize th}}$}} %%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Math commands and symbols % %%%%%%%%%%%%%%%%%%%%%%%%%%%%% \newcommand{\la}{\leftarrow} \newcommand{\ra}{\rightarrow} \newcommand{\Ra}{\Rightarrow} \newcommand{\rt}{\Rightarrow} \newcommand{\lla}{\longleftarrow} \newcommand{\lra}{\longrightarrow} \newcommand{\Llra}{\Longleftrightarrow} \newcommand{\mt}{\mapsto} \newcommand{\ov}{\overrightarrow} \newcommand{\wh}{\widehat} \newcommand{\up}{\uparrow} \newcommand{\dw}{\downarrow} \newcommand{\nr}{\nearrow} \newcommand{\se}{\searrow} \newcommand{\sw}{\swarrow} \newcommand{\nw}{\nwarrow} \newcommand{\mto}{.\;} \newcommand{\vm}[1]{\vspace{#1em}} \newcommand{\vx}[1]{\vspace{#1ex}} \newcommand{\hm}[1]{\hspace{#1em}} \newcommand{\hx}[1]{\hspace{#1ex}} \newcommand{\sm}{\mbox{ }} \newcommand{\mx}{\mbox} %\newcommand{\nq}{\neq} %\newcommand{\eq}{\equiv} \newcommand{\fa}{\forall} %\newcommand{\ex}{\exists} \newcommand{\impl}{\rightarrow} %\newcommand{\Or}{\vee} %\newcommand{\And}{\wedge} \newcommand{\ms}{\models} \newcommand{\bw}{\bigwedge} \newcommand{\ts}{\times} \newcommand{\cc}{\circ} %\newcommand{\es}{\emptyset} %\newcommand{\bs}{\backslash} \newcommand{\vd}{\vdash} %\newcommand{\lan}{{\langle }} %\newcommand{\ran}{{\rangle }} %\newcommand{\al}{\alpha} \newcommand{\bt}{\beta} %\newcommand{\io}{\iota} \newcommand{\lb}{\lambda} %\newcommand{\sg}{\sigma} %\newcommand{\sa}{\Sigma} %\newcommand{\om}{\Omega} %\newcommand{\tu}{\tau} %%%%%%%%%%%%%%%%%%%%%%%%% % Custom maths commands % %%%%%%%%%%%%%%%%%%%%%%%%% \newcommand{\sumbool}[2]{\{#1\}+\{#2\}} \newcommand{\myifthenelse}[3]{\kw{if} ~ #1 ~\kw{then} ~ #2 ~ \kw{else} ~ #3} \newcommand{\fun}[2]{\item[]{\tt {#1}}. \quad\\ #2} \newcommand{\WF}[2]{\ensuremath{{\mathcal{W\!F}}(#1)[#2]}} \newcommand{\WFTWOLINES}[2]{\ensuremath{{\mathcal{W\!F}}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}}} \newcommand{\WFE}[1]{\WF{E}{#1}} \newcommand{\WT}[4]{\ensuremath{#1[#2] \vdash #3 : #4}} \newcommand{\WTE}[3]{\WT{E}{#1}{#2}{#3}} \newcommand{\WTEG}[2]{\WTE{\Gamma}{#1}{#2}} \newcommand{\WTM}[3]{\WT{#1}{}{#2}{#3}} \newcommand{\WFT}[2]{\ensuremath{#1[] \vdash {\mathcal{W\!F}}(#2)}} \newcommand{\WS}[3]{\ensuremath{#1[] \vdash #2 <: #3}} \newcommand{\WSE}[2]{\WS{E}{#1}{#2}} \newcommand{\WEV}[3]{\mbox{$#1[] \vdash #2 \lra #3$}} \newcommand{\WEVT}[3]{\mbox{$#1[] \vdash #2 \lra$}\\ \mbox{$ #3$}} \newcommand{\WTRED}[5]{\mbox{$#1[#2] \vdash #3 #4 #5$}} \newcommand{\WTERED}[4]{\mbox{$E[#1] \vdash #2 #3 #4$}} \newcommand{\WTELECONV}[3]{\WTERED{#1}{#2}{\leconvert}{#3}} \newcommand{\WTEGRED}[3]{\WTERED{\Gamma}{#1}{#2}{#3}} \newcommand{\WTECONV}[3]{\WTERED{#1}{#2}{\convert}{#3}} \newcommand{\WTEGCONV}[2]{\WTERED{\Gamma}{#1}{\convert}{#2}} \newcommand{\WTEGLECONV}[2]{\WTERED{\Gamma}{#1}{\leconvert}{#2}} \newcommand{\lab}[1]{\mathit{labels}(#1)} \newcommand{\dom}[1]{\mathit{dom}(#1)} \newcommand{\CI}[2]{\mbox{$\{#1\}^{#2}$}} \newcommand{\CIP}[3]{\mbox{$\{#1\}_{#2}^{#3}$}} \newcommand{\CIPV}[1]{\CIP{#1}{I_1.. I_k}{P_1.. P_k}} \newcommand{\CIPI}[1]{\CIP{#1}{I}{P}} \newcommand{\CIF}[1]{\mbox{$\{#1\}_{f_1.. f_n}$}} %BEGIN LATEX \newcommand{\NInd}[3]{\mbox{{\sf Ind}$(\begin{array}[t]{@{}l}#2:=#3 \,)\end{array}$}} \newcommand{\Ind}[4]{\mbox{{\sf Ind}$[#2](\begin{array}[t]{@{}l@{}}#3:=#4 \,)\end{array}$}} %END LATEX %HEVEA \newcommand{\NInd}[3]{\mbox{{\sf Ind}$(#2\,:=\,#3)$}} %HEVEA \newcommand{\Ind}[4]{\mbox{{\sf Ind}$[#2](#3\,:=\,#4)$}} \newcommand{\Indp}[5]{\mbox{{\sf Ind}$_{#5}(#1)[#2](\begin{array}[t]{@{}l}#3:=#4 \,)\end{array}$}} \newcommand{\Indpstr}[6]{\mbox{{\sf Ind}$_{#5}(#1)[#2](\begin{array}[t]{@{}l}#3:=#4 \,)/{#6}\end{array}$}} \newcommand{\Def}[4]{\mbox{{\sf Def}$(#1)(#2:=#3:#4)$}} \newcommand{\Assum}[3]{\mbox{{\sf Assum}$(#1)(#2:#3)$}} \newcommand{\Match}[3]{\mbox{$<\!#1\!>\!{\mbox{\tt Match}}~#2~{\mbox{\tt with}}~#3~{\mbox{\tt end}}$}} \newcommand{\Case}[3]{\mbox{$\kw{case}(#2,#1,#3)$}} \newcommand{\match}[3]{\mbox{$\kw{match}~ #2 ~\kw{with}~ #3 ~\kw{end}$}} \newcommand{\Fix}[2]{\mbox{\tt Fix}~#1\{#2\}} \newcommand{\CoFix}[2]{\mbox{\tt CoFix}~#1\{#2\}} \newcommand{\With}[2]{\mbox{\tt ~with~}} \newcommand{\letin}[3]{\kw{let}~#1:=#2~\kw{in}~#3} \newcommand{\subst}[3]{#1\{#2/#3\}} \newcommand{\substs}[4]{#1\{(#2/#3)_{#4}\}} \newcommand{\Sort}{\mbox{$\mathcal{S}$}} \newcommand{\convert}{=_{\beta\delta\iota\zeta\eta}} \newcommand{\leconvert}{\leq_{\beta\delta\iota\zeta\eta}} \newcommand{\NN}{\mathbb{N}} \newcommand{\inference}[1]{$${#1}$$} \newcommand{\compat}[2]{\mbox{$[#1|#2]$}} \newcommand{\tristackrel}[3]{\mathrel{\mathop{#2}\limits_{#3}^{#1}}} \newcommand{\Impl}{{\it Impl}} \newcommand{\elem}{{\it e}} \newcommand{\Mod}[3]{{\sf Mod}({#1}:{#2}\,\zeroone{:={#3}})} \newcommand{\ModS}[2]{{\sf Mod}({#1}:{#2})} \newcommand{\ModType}[2]{{\sf ModType}({#1}:={#2})} \newcommand{\ModA}[2]{{\sf ModA}({#1}=={#2})} \newcommand{\functor}[3]{\ensuremath{{\sf Functor}(#1:#2)\;#3}} \newcommand{\funsig}[3]{\ensuremath{{\sf Funsig}(#1:#2)\;#3}} \newcommand{\sig}[1]{\ensuremath{{\sf Sig}~#1~{\sf End}}} \newcommand{\struct}[1]{\ensuremath{{\sf Struct}~#1~{\sf End}}} \newcommand{\structe}[1]{\ensuremath{ {\sf Struct}~\elem_1;\ldots;\elem_i;#1;\elem_{i+2};\ldots ;\elem_n~{\sf End}}} \newcommand{\structes}[2]{\ensuremath{ {\sf Struct}~\elem_1;\ldots;\elem_i;#1;\elem_{i+2}\{#2\} ;\ldots;\elem_n\{#2\}~{\sf End}}} \newcommand{\with}[3]{\ensuremath{#1~{\sf with}~#2 := #3}} \newcommand{\Spec}{{\it Spec}} \newcommand{\ModSEq}[3]{{\sf Mod}({#1}:{#2}:={#3})} %\newbox\tempa %\newbox\tempb %\newdimen\tempc %\newcommand{\mud}[1]{\hfil $\displaystyle{\mathstrut #1}$\hfil} %\newcommand{\rig}[1]{\hfil $\displaystyle{#1}$} % \newcommand{\irulehelp}[3]{\setbox\tempa=\hbox{$\displaystyle{\mathstrut #2}$}% % \setbox\tempb=\vbox{\halign{##\cr % \mud{#1}\cr % \noalign{\vskip\the\lineskip} % \noalign{\hrule height 0pt} % \rig{\vbox to 0pt{\vss\hbox to 0pt{${\; #3}$\hss}\vss}}\cr % \noalign{\hrule} % \noalign{\vskip\the\lineskip} % \mud{\copy\tempa}\cr}} % \tempc=\wd\tempb % \advance\tempc by \wd\tempa % \divide\tempc by 2 } % \newcommand{\irule}[3]{{\irulehelp{#1}{#2}{#3} % \hbox to \wd\tempa{\hss \box\tempb \hss}}} \newcommand{\sverb}[1]{{\tt #1}} \newcommand{\mover}[2]{{#1\over #2}} \newcommand{\jd}[2]{#1 \vdash #2} \newcommand{\mathline}[1]{\[#1\]} \newcommand{\zrule}[2]{#2: #1} \newcommand{\orule}[3]{#3: {\mover{#1}{#2}}} \newcommand{\trule}[4]{#4: \mover{#1 \qquad #2} {#3}} \newcommand{\thrule}[5]{#5: {\mover{#1 \qquad #2 \qquad #3}{#4}}} % placement of figures %BEGIN LATEX \renewcommand{\topfraction}{.99} \renewcommand{\bottomfraction}{.99} \renewcommand{\textfraction}{.01} \renewcommand{\floatpagefraction}{.9} %END LATEX % Macros Bruno pour description de la syntaxe \def\bfbar{\ensuremath{|\hskip -0.22em{}|\hskip -0.24em{}|}} \def\TERMbar{\bfbar} \def\TERMbarbar{\bfbar\bfbar} %% Macros pour les grammaires \def\GR#1{\text{\large(}#1\text{\large)}} \def\NT#1{\langle\textit{#1}\rangle} \def\NTL#1#2{\langle\textit{#1}\rangle_{#2}} \def\TERM#1{{\bf\textrm{\bf #1}}} %\def\TERM#1{{\bf\textsf{#1}}} \def\KWD#1{\TERM{#1}} \def\ETERM#1{\TERM{#1}} \def\CHAR#1{\TERM{#1}} \def\STAR#1{#1*} \def\STARGR#1{\GR{#1}*} \def\PLUS#1{#1+} \def\PLUSGR#1{\GR{#1}+} \def\OPT#1{#1?} \def\OPTGR#1{\GR{#1}?} %% Tableaux de definition de non-terminaux \newenvironment{cadre} {\begin{array}{|c|}\hline\\} {\\\\\hline\end{array}} \newenvironment{rulebox} {$$\begin{cadre}\begin{array}{r@{~}c@{~}l@{}l@{}r}} {\end{array}\end{cadre}$$} \def\DEFNT#1{\NT{#1} & ::= &} \def\EXTNT#1{\NT{#1} & ::= & ... \\&|&} \def\RNAME#1{(\textsc{#1})} \def\SEPDEF{\\\\} \def\nlsep{\\&|&} \def\nlcont{\\&&} \newenvironment{rules} {\begin{center}\begin{rulebox}} {\end{rulebox}\end{center}} %%% Local Variables: %%% mode: latex %%% TeX-master: "Reference-Manual" %%% End: coq-8.15.0/doc/common/styles/000077500000000000000000000000001417001151100157365ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/000077500000000000000000000000001417001151100167025ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/000077500000000000000000000000001417001151100207005ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/cover.html000066400000000000000000000062311417001151100227060ustar00rootroot00000000000000 Reference Manual | The Coq Proof Assistant

Reference Manual

Version COQVERSION


The Coq Development Team




Copyright © 1999-2019, Inria, CNRS and contributors

This material may be distributed only subject to the terms and conditions set forth in the Open Publication License, v1.0 or later (the latest version is presently available at http://www.opencontent.org/openpub). Options A and B are not elected.

coq-8.15.0/doc/common/styles/html/coqremote/footer.html000066400000000000000000000015611417001151100230670ustar00rootroot00000000000000
coq-8.15.0/doc/common/styles/html/coqremote/header.html000066400000000000000000000034561417001151100230260ustar00rootroot00000000000000 Standard Library | The Coq Proof Assistant
coq-8.15.0/doc/common/styles/html/coqremote/hevea.css000066400000000000000000000034461417001151100225110ustar00rootroot00000000000000 .li-itemize{margin:1ex 0ex;} .li-enumerate{margin:1ex 0ex;} .dd-description{margin:0ex 0ex 1ex 4ex;} .dt-description{margin:0ex;} .toc{list-style:none;} .thefootnotes{text-align:left;margin:0ex;} .dt-thefootnotes{margin:0em;} .dd-thefootnotes{margin:0em 0em 0em 2em;} .footnoterule{margin:1em auto 1em 0px;width:50%;} .caption{padding-left:2ex; padding-right:2ex; margin-left:auto; margin-right:auto} .title{margin:2ex auto;text-align:center} .center{text-align:center;margin-left:auto;margin-right:auto;} .flushleft{text-align:left;margin-left:0ex;margin-right:auto;} .flushright{text-align:right;margin-left:auto;margin-right:0ex;} DIV TABLE{margin-left:inherit;margin-right:inherit;} PRE{text-align:left;margin-left:0ex;margin-right:auto;} BLOCKQUOTE{margin-left:4ex;margin-right:4ex;text-align:left;} TD P{margin:0px;} .boxed{border:1px solid black} .textboxed{border:1px solid black} .vbar{border:none;width:2px;background-color:black;} .hbar{border:none;height:2px;width:100%;background-color:black;} .hfill{border:none;height:1px;width:200%;background-color:black;} .vdisplay{border-collapse:separate;border-spacing:2px;width:auto; empty-cells:show; border:2px solid red;} .vdcell{white-space:nowrap;padding:0px;width:auto; border:2px solid green;} .display{border-collapse:separate;border-spacing:2px;width:auto; border:none;} .dcell{white-space:nowrap;padding:0px;width:auto; border:none;} .dcenter{margin:0ex auto;} .vdcenter{border:solid #FF8000 2px; margin:0ex auto;} .minipage{text-align:left; margin-left:0em; margin-right:auto;} .marginpar{border:solid thin black; width:20%; text-align:left;} .marginparleft{float:left; margin-left:0ex; margin-right:1ex;} .marginparright{float:right; margin-left:1ex; margin-right:0ex;} .theorem{text-align:left;margin:1ex auto 1ex 0ex;} .part{margin:2ex auto;text-align:center} coq-8.15.0/doc/common/styles/html/coqremote/modules/000077500000000000000000000000001417001151100223505ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/modules/node/000077500000000000000000000000001417001151100232755ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/modules/node/node.css000066400000000000000000000012531417001151100247350ustar00rootroot00000000000000 .node-unpublished { background-color: #fff4f4; } .preview .node { background-color: #ffffea; } #node-admin-filter ul { list-style-type: none; padding: 0; margin: 0; width: 100%; } #node-admin-buttons { float: left; /* LTR */ margin-left: 0.5em; /* LTR */ clear: right; /* LTR */ } td.revision-current { background: #ffc; } .node-form .form-text { display: block; width: 95%; } .node-form .container-inline .form-text { display: inline; width: auto; } .node-form .standard { clear: both; } .node-form textarea { display: block; width: 95%; } .node-form .attachments fieldset { float: none; display: block; } .terms-inline { display: inline; } coq-8.15.0/doc/common/styles/html/coqremote/modules/system/000077500000000000000000000000001417001151100236745ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/modules/system/defaults.css000066400000000000000000000012671417001151100262230ustar00rootroot00000000000000 /* ** HTML elements */ fieldset { margin-bottom: 1em; padding: .5em; } form { margin: 0; padding: 0; } hr { height: 1px; border: 1px solid gray; } img { border: 0; } table { border-collapse: collapse; } th { text-align: left; /* LTR */ padding-right: 1em; /* LTR */ border-bottom: 3px solid #ccc; } /* ** Markup free clearing ** Details: http://www.positioniseverything.net/easyclearing.html */ .clear-block:after { content: "."; display: block; height: 0; clear: both; visibility: hidden; } .clear-block { display: inline-block; } /* Hides from IE-mac \*/ * html .clear-block { height: 1%; } .clear-block { display: block; } /* End hide from IE-mac */ coq-8.15.0/doc/common/styles/html/coqremote/modules/system/system.css000066400000000000000000000233511417001151100257360ustar00rootroot00000000000000 /* ** HTML elements */ body.drag { cursor: move; } th.active img { display: inline; } tr.even, tr.odd { background-color: #eee; border-bottom: 1px solid #ccc; padding: 0.1em 0.6em; } tr.drag { background-color: #fffff0; } tr.drag-previous { background-color: #ffd; } td.active { background-color: #ddd; } td.checkbox, th.checkbox { text-align: center; } tbody { border-top: 1px solid #ccc; } tbody th { border-bottom: 1px solid #ccc; } thead th { text-align: left; /* LTR */ padding-right: 1em; /* LTR */ border-bottom: 3px solid #ccc; } /* ** Other common styles */ .breadcrumb { padding-bottom: .5em } div.indentation { width: 20px; height: 1.7em; margin: -0.4em 0.2em -0.4em -0.4em; /* LTR */ padding: 0.42em 0 0.42em 0.6em; /* LTR */ float: left; /* LTR */ } div.tree-child { background: url(../../misc/tree.png) no-repeat 11px center; /* LTR */ } div.tree-child-last { background: url(../../misc/tree-bottom.png) no-repeat 11px center; /* LTR */ } div.tree-child-horizontal { background: url(../../misc/tree.png) no-repeat -11px center; } .error { color: #e55; } div.error { border: 1px solid #d77; } div.error, tr.error { background: #fcc; color: #200; padding: 2px; } .warning { color: #e09010; } div.warning { border: 1px solid #f0c020; } div.warning, tr.warning { background: #ffd; color: #220; padding: 2px; } .ok { color: #008000; } div.ok { border: 1px solid #00aa00; } div.ok, tr.ok { background: #dfd; color: #020; padding: 2px; } .item-list .icon { color: #555; float: right; /* LTR */ padding-left: 0.25em; /* LTR */ clear: right; /* LTR */ } .item-list .title { font-weight: bold; } .item-list ul { margin: 0 0 0.75em 0; padding: 0; } .item-list ul li { margin: 0 0 0.25em 1.5em; /* LTR */ padding: 0; list-style: disc; } ol.task-list li.active { font-weight: bold; } .form-item { margin-top: 1em; margin-bottom: 1em; } tr.odd .form-item, tr.even .form-item { margin-top: 0; margin-bottom: 0; white-space: nowrap; } tr.merge-down, tr.merge-down td, tr.merge-down th { border-bottom-width: 0 !important; } tr.merge-up, tr.merge-up td, tr.merge-up th { border-top-width: 0 !important; } .form-item input.error, .form-item textarea.error, .form-item select.error { border: 2px solid red; } .form-item .description { font-size: 0.85em; } .form-item label { display: block; font-weight: bold; } .form-item label.option { display: inline; font-weight: normal; } .form-checkboxes, .form-radios { margin: 1em 0; } .form-checkboxes .form-item, .form-radios .form-item { margin-top: 0.4em; margin-bottom: 0.4em; } .marker, .form-required { color: #f00; } .more-link { text-align: right; /* LTR */ } .more-help-link { font-size: 0.85em; text-align: right; /* LTR */ } .nowrap { white-space: nowrap; } .item-list .pager { clear: both; text-align: center; } .item-list .pager li { background-image:none; display:inline; list-style-type:none; padding: 0.5em; } .pager-current { font-weight:bold; } .tips { margin-top: 0; margin-bottom: 0; padding-top: 0; padding-bottom: 0; font-size: 0.9em; } dl.multiselect dd.b, dl.multiselect dd.b .form-item, dl.multiselect dd.b select { font-family: inherit; font-size: inherit; width: 14em; } dl.multiselect dd.a, dl.multiselect dd.a .form-item { width: 10em; } dl.multiselect dt, dl.multiselect dd { float: left; /* LTR */ line-height: 1.75em; padding: 0; margin: 0 1em 0 0; /* LTR */ } dl.multiselect .form-item { height: 1.75em; margin: 0; } /* ** Inline items (need to override above) */ .container-inline div, .container-inline label { display: inline; } /* ** Tab navigation */ ul.primary { border-collapse: collapse; padding: 0 0 0 1em; /* LTR */ white-space: nowrap; list-style: none; margin: 5px; height: auto; line-height: normal; border-bottom: 1px solid #bbb; } ul.primary li { display: inline; } ul.primary li a { background-color: #ddd; border-color: #bbb; border-width: 1px; border-style: solid solid none solid; height: auto; margin-right: 0.5em; /* LTR */ padding: 0 1em; text-decoration: none; } ul.primary li.active a { background-color: #fff; border: 1px solid #bbb; border-bottom: #fff 1px solid; } ul.primary li a:hover { background-color: #eee; border-color: #ccc; border-bottom-color: #eee; } ul.secondary { border-bottom: 1px solid #bbb; padding: 0.5em 1em; margin: 5px; } ul.secondary li { display: inline; padding: 0 1em; border-right: 1px solid #ccc; /* LTR */ } ul.secondary a { padding: 0; text-decoration: none; } ul.secondary a.active { border-bottom: 4px solid #999; } /* ** Autocomplete styles */ /* Suggestion list */ #autocomplete { position: absolute; border: 1px solid; overflow: hidden; z-index: 100; } #autocomplete ul { margin: 0; padding: 0; list-style: none; } #autocomplete li { background: #fff; color: #000; white-space: pre; cursor: default; } #autocomplete li.selected { background: #0072b9; color: #fff; } /* Animated throbber */ html.js input.form-autocomplete { background-image: url(../../misc/throbber.gif); background-repeat: no-repeat; background-position: 100% 2px; /* LTR */ } html.js input.throbbing { background-position: 100% -18px; /* LTR */ } /* ** Collapsing fieldsets */ html.js fieldset.collapsed { border-bottom-width: 0; border-left-width: 0; border-right-width: 0; margin-bottom: 0; height: 1em; } html.js fieldset.collapsed * { display: none; } html.js fieldset.collapsed legend { display: block; } html.js fieldset.collapsible legend a { padding-left: 15px; /* LTR */ background: url(../../misc/menu-expanded.png) 5px 75% no-repeat; /* LTR */ } html.js fieldset.collapsed legend a { background-image: url(../../misc/menu-collapsed.png); /* LTR */ background-position: 5px 50%; /* LTR */ } /* Note: IE-only fix due to '* html' (breaks Konqueror otherwise). */ * html.js fieldset.collapsed legend, * html.js fieldset.collapsed legend *, * html.js fieldset.collapsed table * { display: inline; } /* For Safari 2 to prevent collapsible fieldsets containing tables from disappearing due to tableheader.js. */ html.js fieldset.collapsible { position: relative; } html.js fieldset.collapsible legend a { display: block; } /* Avoid jumping around due to margins collapsing into collapsible fieldset border */ html.js fieldset.collapsible .fieldset-wrapper { overflow: auto; } /* ** Resizable text areas */ .resizable-textarea { width: 95%; } .resizable-textarea .grippie { height: 9px; overflow: hidden; background: #eee url(../../misc/grippie.png) no-repeat center 2px; border: 1px solid #ddd; border-top-width: 0; cursor: s-resize; } html.js .resizable-textarea textarea { margin-bottom: 0; width: 100%; display: block; } /* ** Table drag and drop. */ .draggable a.tabledrag-handle { cursor: move; float: left; /* LTR */ height: 1.7em; margin: -0.4em 0 -0.4em -0.5em; /* LTR */ padding: 0.42em 1.5em 0.42em 0.5em; /* LTR */ text-decoration: none; } a.tabledrag-handle:hover { text-decoration: none; } a.tabledrag-handle .handle { margin-top: 4px; height: 13px; width: 13px; background: url(../../misc/draggable.png) no-repeat 0 0; } a.tabledrag-handle-hover .handle { background-position: 0 -20px; } /* ** Teaser splitter */ .joined + .grippie { height: 5px; background-position: center 1px; margin-bottom: -2px; } /* Keeps inner content contained in Opera 9. */ .teaser-checkbox { padding-top: 1px; } div.teaser-button-wrapper { float: right; /* LTR */ padding-right: 5%; /* LTR */ margin: 0; } .teaser-checkbox div.form-item { float: right; /* LTR */ margin: 0 5% 0 0; /* LTR */ padding: 0; } textarea.teaser { display: none; } html.js .no-js { display: none; } /* ** Progressbar styles */ .progress { font-weight: bold; } .progress .bar { background: #fff url(../../misc/progress.gif); border: 1px solid #00375a; height: 1.5em; margin: 0 0.2em; } .progress .filled { background: #0072b9; height: 1em; border-bottom: 0.5em solid #004a73; width: 0%; } .progress .percentage { float: right; /* LTR */ } .progress-disabled { float: left; /* LTR */ } .ahah-progress { float: left; /* LTR */ } .ahah-progress .throbber { width: 15px; height: 15px; margin: 2px; background: transparent url(../../misc/throbber.gif) no-repeat 0px -18px; float: left; /* LTR */ } tr .ahah-progress .throbber { margin: 0 2px; } .ahah-progress-bar { width: 16em; } /* ** Formatting for welcome page */ #first-time strong { display: block; padding: 1.5em 0 .5em; } /* ** To be used with tableselect.js */ tr.selected td { background: #ffc; } /* ** Floating header for tableheader.js */ table.sticky-header { margin-top: 0; background: #fff; } /* ** Installation clean URLs */ #clean-url.install { display: none; } /* ** For anything you want to hide on page load when JS is enabled, so ** that you can use the JS to control visibility and avoid flicker. */ html.js .js-hide { display: none; } /* ** Styles for the system modules page (admin/build/modules) */ #system-modules div.incompatible { font-weight: bold; } /* ** Styles for the system themes page (admin/build/themes) */ #system-themes-form div.incompatible { font-weight: bold; } /* ** Password strength indicator */ span.password-strength { visibility: hidden; } input.password-field { margin-right: 10px; /* LTR */ } div.password-description { padding: 0 2px; margin: 4px 0 0 0; font-size: 0.85em; max-width: 500px; } div.password-description ul { margin-bottom: 0; } .password-parent { margin: 0 0 0 0; } /* ** Password confirmation checker */ input.password-confirm { margin-right: 10px; /* LTR */ } .confirm-parent { margin: 5px 0 0 0; } span.password-confirm { visibility: hidden; } span.password-confirm span { font-weight: normal; } coq-8.15.0/doc/common/styles/html/coqremote/modules/user/000077500000000000000000000000001417001151100233265ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/coqremote/modules/user/user.css000066400000000000000000000020531417001151100250160ustar00rootroot00000000000000 #permissions td.module { font-weight: bold; } #permissions td.permission { padding-left: 1.5em; /* LTR */ } #access-rules .access-type, #access-rules .rule-type { margin-right: 1em; /* LTR */ float: left; /* LTR */ } #access-rules .access-type .form-item, #access-rules .rule-type .form-item { margin-top: 0; } #access-rules .mask { clear: both; } #user-login-form { text-align: center; } #user-admin-filter ul { list-style-type: none; padding: 0; margin: 0; width: 100%; } #user-admin-buttons { float: left; /* LTR */ margin-left: 0.5em; /* LTR */ clear: right; /* LTR */ } #user-admin-settings fieldset .description { font-size: 0.85em; padding-bottom: .5em; } /* Generated by user.module but used by profile.module: */ .profile { clear: both; margin: 1em 0; } .profile .picture { float: right; /* LTR */ margin: 0 1em 1em 0; /* LTR */ } .profile h3 { border-bottom: 1px solid #ccc; } .profile dl { margin: 0 0 1.5em 0; } .profile dt { margin: 0 0 0.2em 0; font-weight: bold; } .profile dd { margin: 0 0 1em 0; } coq-8.15.0/doc/common/styles/html/coqremote/styles.hva000066400000000000000000000047561417001151100227370ustar00rootroot00000000000000\renewcommand{\@meta}{ \begin{rawhtml} \end{rawhtml}} % for HeVeA \htmlhead{\begin{rawhtml} \end{rawhtml}} coq-8.15.0/doc/common/styles/html/simple/000077500000000000000000000000001417001151100201735ustar00rootroot00000000000000coq-8.15.0/doc/common/styles/html/simple/cover.html000066400000000000000000000033141417001151100222000ustar00rootroot00000000000000 Reference Manual | The Coq Proof Assistant

Reference Manual

Version COQVERSION


The Coq Development Team




Copyright © 1999-2019, Inria, CNRS and contributors

This material may be distributed only subject to the terms and conditions set forth in the Open Publication License, v1.0 or later (the latest version is presently available at http://www.opencontent.org/openpub). Options A and B are not elected.

coq-8.15.0/doc/common/styles/html/simple/footer.html000066400000000000000000000000201417001151100223470ustar00rootroot00000000000000 coq-8.15.0/doc/common/styles/html/simple/header.html000066400000000000000000000005761417001151100223210ustar00rootroot00000000000000 The Coq Standard Library coq-8.15.0/doc/common/styles/html/simple/hevea.css000066400000000000000000000034461417001151100220040ustar00rootroot00000000000000 .li-itemize{margin:1ex 0ex;} .li-enumerate{margin:1ex 0ex;} .dd-description{margin:0ex 0ex 1ex 4ex;} .dt-description{margin:0ex;} .toc{list-style:none;} .thefootnotes{text-align:left;margin:0ex;} .dt-thefootnotes{margin:0em;} .dd-thefootnotes{margin:0em 0em 0em 2em;} .footnoterule{margin:1em auto 1em 0px;width:50%;} .caption{padding-left:2ex; padding-right:2ex; margin-left:auto; margin-right:auto} .title{margin:2ex auto;text-align:center} .center{text-align:center;margin-left:auto;margin-right:auto;} .flushleft{text-align:left;margin-left:0ex;margin-right:auto;} .flushright{text-align:right;margin-left:auto;margin-right:0ex;} DIV TABLE{margin-left:inherit;margin-right:inherit;} PRE{text-align:left;margin-left:0ex;margin-right:auto;} BLOCKQUOTE{margin-left:4ex;margin-right:4ex;text-align:left;} TD P{margin:0px;} .boxed{border:1px solid black} .textboxed{border:1px solid black} .vbar{border:none;width:2px;background-color:black;} .hbar{border:none;height:2px;width:100%;background-color:black;} .hfill{border:none;height:1px;width:200%;background-color:black;} .vdisplay{border-collapse:separate;border-spacing:2px;width:auto; empty-cells:show; border:2px solid red;} .vdcell{white-space:nowrap;padding:0px;width:auto; border:2px solid green;} .display{border-collapse:separate;border-spacing:2px;width:auto; border:none;} .dcell{white-space:nowrap;padding:0px;width:auto; border:none;} .dcenter{margin:0ex auto;} .vdcenter{border:solid #FF8000 2px; margin:0ex auto;} .minipage{text-align:left; margin-left:0em; margin-right:auto;} .marginpar{border:solid thin black; width:20%; text-align:left;} .marginparleft{float:left; margin-left:0ex; margin-right:1ex;} .marginparright{float:right; margin-left:1ex; margin-right:0ex;} .theorem{text-align:left;margin:1ex auto 1ex 0ex;} .part{margin:2ex auto;text-align:center} coq-8.15.0/doc/common/styles/html/simple/style.css000066400000000000000000000003061417001151100220440ustar00rootroot00000000000000#footer { border-top: solid black 1pt; text-align: center; text-indent: 0pt; } .menu { } .menu li { display: inline; margin: 0pt; padding: .5ex 1em; list-style: none } coq-8.15.0/doc/common/styles/html/simple/styles.hva000066400000000000000000000017211417001151100222170ustar00rootroot00000000000000\renewcommand{\@meta}{ \begin{rawhtml} \end{rawhtml}} % for HeVeA \htmlhead{\begin{rawhtml}
\end{rawhtml}} coq-8.15.0/doc/common/title.tex000066400000000000000000000026611417001151100162630ustar00rootroot00000000000000%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % File title.tex % Page formatting commands % Macro \coverpage %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %\setlength{\marginparwidth}{0pt} %\setlength{\oddsidemargin}{0pt} %\setlength{\evensidemargin}{0pt} %\setlength{\marginparsep}{0pt} %\setlength{\topmargin}{0pt} %\setlength{\textwidth}{16.9cm} %\setlength{\textheight}{22cm} %\usepackage{fullpage} %\newcommand{\printingdate}{\today} %\newcommand{\isdraft}{\Large\bf\today\\[20pt]} %\newcommand{\isdraft}{\vspace{20pt}} \newcommand{\coverpage}[3]{ \thispagestyle{empty} \begin{center} \bfseries % for the rest of this page, until \end{center} \Huge The Coq Proof Assistant\\[12pt] #1\\[20pt] \Large\today\\[20pt] Version \coqversion\footnote[1]{This research was partly supported by IST working group ``Types''} \vspace{0pt plus .5fill} #2 \par\vfill $\pi r^2$ Project (formerly LogiCal, then TypiCal) \vspace*{15pt} \end{center} \newpage \thispagestyle{empty} \hbox{}\vfill % without \hbox \vfill does not work at the top of the page \begin{flushleft} %BEGIN LATEX V\coqversion, \today \par\vspace{20pt} %END LATEX \copyright 1999-2019, Inria, CNRS and contributors #3 \end{flushleft} } % end of \coverpage definition % \newcommand{\shorttitle}[1]{ % \begin{center} % \begin{huge} % \begin{bf} % The Coq Proof Assistant\\ % \vspace{10pt} % #1\\ % \end{bf} % \end{huge} % \end{center} % \vspace{5pt} % } % Local Variables: % mode: LaTeX % TeX-master: "" % End: coq-8.15.0/doc/dune000066400000000000000000000041001417001151100137740ustar00rootroot00000000000000(rule (targets unreleased.rst) ; We need to keep this as the sphinx build still requires this file ; in-tree, to be fixed hopefully soon (mode promote-until-clean) (deps (source_tree changelog)) (action (with-stdout-to %{targets} (bash "cat changelog/00-title.rst changelog/*/*.rst")))) (alias (name refman-deps) (deps ; We could use finer dependencies here so the build is faster: ; ; - vo files: generated by sphinx after parsing the doc, promoted, ; - Static files: ; + %{bin:coqdoc} etc... ; + config/coq_config.py ; + tools/coqdoc/coqdoc.css (package coq-core) (package coq-stdlib) (source_tree sphinx) (source_tree tools/coqrst) unreleased.rst (env_var SPHINXWARNOPT))) (rule (targets refman-html) (alias refman-html) (package coq-doc) ; Cannot use this deps alias because of ocaml/dune#3415 ; (deps (alias refman-deps)) ; EJGA: note this should've been fixed in dune master as of 05/03/2021 (deps (package coq-core) (package coq-stdlib) (source_tree sphinx) (source_tree tools/coqrst) unreleased.rst (env_var SPHINXWARNOPT)) (action (run env sphinx-build -q %{env:SPHINXWARNOPT=-W} -b html sphinx %{targets}))) (rule (targets refman-pdf) (alias refman-pdf) (package coq-doc) ; Cannot use this deps alias because of ocaml/dune#3415 ; (deps (alias refman-deps)) ; EJGA: note this should've been fixed in dune master as of 05/03/2021 (deps (package coq-core) (package coq-stdlib) (source_tree sphinx) (source_tree tools/coqrst) unreleased.rst (env_var SPHINXWARNOPT)) (action (progn (run env sphinx-build -q %{env:SPHINXWARNOPT=-W} -b latex sphinx %{targets}) (chdir %{targets} (run make LATEXMKOPTS=-silent))))) ; Installable directories are not yet fully supported by Dune. See ; ocaml/dune#1868. Yet, this makes coq-doc.install a valid target to ; generate the whole Coq documentation. And the result under ; _build/install/default/doc/coq-doc looks just right! (install (files (refman-html as html/refman) (refman-pdf as pdf/refman)) (section doc) (package coq-doc)) (documentation (package coq-doc)) coq-8.15.0/doc/index.mld000066400000000000000000000001641417001151100147310ustar00rootroot00000000000000{0 coq-doc } The coq-doc package only contains user documentation on the Coq proof assistant and no OCaml library. coq-8.15.0/doc/plugin_tutorial/000077500000000000000000000000001417001151100163445ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/.travis.yml000066400000000000000000000021231417001151100204530ustar00rootroot00000000000000dist: trusty sudo: required language: generic services: - docker env: global: - NJOBS="2" - CONTRIB_NAME="plugin_tutorials" matrix: - COQ_IMAGE="coqorg/coq:dev" install: | # Prepare the COQ container docker run -d -i --init --name=COQ -v ${TRAVIS_BUILD_DIR}:/home/coq/$CONTRIB_NAME -w /home/coq/$CONTRIB_NAME ${COQ_IMAGE} docker exec COQ /bin/bash --login -c " # This bash script is double-quoted to interpolate Travis CI env vars: echo \"Build triggered by ${TRAVIS_EVENT_TYPE}\" export PS4='+ \e[33;1m(\$0 @ line \$LINENO) \$\e[0m ' set -ex # -e = exit on failure; -x = trace for debug opam list " script: - echo -e "${ANSI_YELLOW}Building $CONTRIB_NAME...${ANSI_RESET}" && echo -en 'travis_fold:start:testbuild\\r' - | docker exec COQ /bin/bash --login -c " export PS4='+ \e[33;1m(\$0 @ line \$LINENO) \$\e[0m ' set -ex sudo chown -R coq:coq /home/coq/$CONTRIB_NAME ( cd tuto0 && make ) ( cd tuto1 && make ) ( cd tuto2 && make ) ( cd tuto3 && make ) " - docker stop COQ # optional - echo -en 'travis_fold:end:testbuild\\r' coq-8.15.0/doc/plugin_tutorial/LICENSE000066400000000000000000000022721417001151100173540ustar00rootroot00000000000000This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to coq-8.15.0/doc/plugin_tutorial/Makefile000066400000000000000000000003451417001151100200060ustar00rootroot00000000000000 TUTOS:= \ tuto0 \ tuto1 \ tuto2 \ tuto3 all: $(TUTOS) .PHONY: $(TUTOS) all $(TUTOS): %: +$(MAKE) -C $@ CLEANS:=$(addsuffix -clean, $(TUTOS)) .PHONY: clean $(CLEANS) clean: $(CLEANS) %-clean: +$(MAKE) -C $* clean coq-8.15.0/doc/plugin_tutorial/README.md000066400000000000000000000072231417001151100176270ustar00rootroot00000000000000How to write plugins in Coq =========================== # Working environment In addition to installing OCaml and Coq, you need to make sure that you also have the development headers for Coq, because you will need them to compile extensions. If you installed Coq from source or from [OPAM](http://opam.ocaml.org/doc/Install.html), you already have the required headers. If you installed them from your system package manager, there may be a separate package which contains the development headers (for example, in Ubuntu they are contained in the package `libcoq-ocaml-dev`). It can help to install several tools for development. ## Tuareg and Merlin These instructions use [OPAM](http://opam.ocaml.org/doc/Install.html) ```shell opam install merlin # prints instructions for vim and emacs opam install tuareg # syntax highlighting for OCaml opam user-setup install # automatically configures editors for merlin ``` Adding this line to your .emacs helps Tuareg recognize the .mlg extension: ```shell (add-to-list 'auto-mode-alist '("\\.mlg$" . tuareg-mode) t) ``` If you are using [vscoq](https://github.com/coq-community/vscoq), you will need to ensure that vscoq loads the `_CoqProject` file for the extension you are working on. You can do this by opening Visual Studio Code with the `_CoqProject` file in the project root directory, or by editing the `coqtop.coqProjectRoot` setting for vscoq. ## This tutorial ```shell cd plugin_tutorials/tuto0 make .merlin # run before opening .ml files in your editor make # build ``` # tuto0 : basics of project organization package an mlg file in a plugin, organize a `Makefile`, `_CoqProject` - Example of syntax to add a new toplevel command - Example of function call to print a simple message - Example of function call to print a simple warning - Example of function call to raise a simple error to the user - Example of syntax to add a simple tactic (that does nothing and prints a message) - To use it: ```bash cd tuto0; make coqtop -I src -R theories Tuto0 ``` In the Coq session type: ```coq Require Import Tuto0.Loader. HelloWorld. ``` You can also modify and run `theories/Demo.v`. # tuto1 : OCaml to Coq communication Explore the memory of Coq, modify it - Commands that take arguments: strings, integers, symbols, expressions of the calculus of constructions - Examples of using environments correctly - Examples of using state (the evar_map) correctly - Commands that interact with type-checking in Coq - A command that checks convertibility between two terms - A command that adds a new definition or theorem - A command that uses a name and exploits the existing definitions or theorems - A command that exploits an existing ongoing proof - A command that defines a new tactic Compilation and loading must be performed as for `tuto0`. # tuto2 : OCaml to Coq communication A more step by step introduction to writing commands - Explanation of the syntax of entries - Adding a new type to and parsing to the available choices - Handling commands that store information in user-chosen registers and tables Compilation and loading must be performed as for `tuto0`. # tuto3 : manipulating terms of the calculus of constructions Manipulating terms, inside commands and tactics. - Obtaining existing values from memory - Composing values - Verifying types - Using these terms in commands - Using these terms in tactics - Automatic proofs without tactics using type classes and canonical structures compilation and loading must be performed as for `tuto0`. coq-8.15.0/doc/plugin_tutorial/tuto0/000077500000000000000000000000001417001151100174175ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto0/Makefile000066400000000000000000000003741417001151100210630ustar00rootroot00000000000000ifeq "$(COQBIN)" "" COQBIN=$(dir $(shell which coqtop))/ endif %: Makefile.coq Makefile.coq: _CoqProject $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq tests: all @$(MAKE) -C tests -s clean @$(MAKE) -C tests -s all -include Makefile.coq coq-8.15.0/doc/plugin_tutorial/tuto0/_CoqProject000066400000000000000000000002131417001151100215460ustar00rootroot00000000000000-R theories/ Tuto0 -I src theories/Loader.v theories/Demo.v src/tuto0_main.ml src/tuto0_main.mli src/g_tuto0.mlg src/tuto0_plugin.mlpack coq-8.15.0/doc/plugin_tutorial/tuto0/src/000077500000000000000000000000001417001151100202065ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto0/src/dune000066400000000000000000000002121417001151100210570ustar00rootroot00000000000000(library (name tuto0_plugin) (public_name coq-core.plugins.tutorial.p0) (libraries coq-core.plugins.ltac)) (coq.pp (modules g_tuto0)) coq-8.15.0/doc/plugin_tutorial/tuto0/src/g_tuto0.mlg000066400000000000000000000031701417001151100222710ustar00rootroot00000000000000DECLARE PLUGIN "tuto0_plugin" { open Pp open Ltac_plugin let tuto_warn = CWarnings.create ~name:"name" ~category:"category" (fun _ -> strbrk Tuto0_main.message) } (*** Printing messages ***) (* * This defines a command that prints HelloWorld. * Note that Feedback.msg_notice can be used to print messages. *) VERNAC COMMAND EXTEND HelloWorld CLASSIFIED AS QUERY | [ "HelloWorld" ] -> { Feedback.msg_notice (strbrk Tuto0_main.message) } END (* * This is a tactic version of the same thing. *) TACTIC EXTEND hello_world_tactic | [ "hello_world" ] -> { let _ = Feedback.msg_notice (str Tuto0_main.message) in Tacticals.tclIDTAC } END (*** Printing warnings ***) (* * This defines a command that prints HelloWorld as a warning. * tuto_warn is defined at the top-level, before the command runs, * which is standard. *) VERNAC COMMAND EXTEND HelloWarning CLASSIFIED AS QUERY | [ "HelloWarning" ] -> { tuto_warn () } END (* * This is a tactic version of the same thing. *) TACTIC EXTEND hello_warning_tactic | [ "hello_warning" ] -> { let _ = tuto_warn () in Tacticals.tclIDTAC } END (*** Printing errors ***) (* * This defines a command that prints HelloWorld inside of an error. * Note that CErrors.user_err can be used to raise errors to the user. *) VERNAC COMMAND EXTEND HelloError CLASSIFIED AS QUERY | [ "HelloError" ] -> { CErrors.user_err (str Tuto0_main.message) } END (* * This is a tactic version of the same thing. *) TACTIC EXTEND hello_error_tactic | [ "hello_error" ] -> { let _ = CErrors.user_err (str Tuto0_main.message) in Tacticals.tclIDTAC } END coq-8.15.0/doc/plugin_tutorial/tuto0/src/tuto0_main.ml000066400000000000000000000000351417001151100226150ustar00rootroot00000000000000let message = "Hello world!" coq-8.15.0/doc/plugin_tutorial/tuto0/src/tuto0_main.mli000066400000000000000000000000251417001151100227650ustar00rootroot00000000000000val message : string coq-8.15.0/doc/plugin_tutorial/tuto0/src/tuto0_plugin.mlpack000066400000000000000000000000231417001151100240230ustar00rootroot00000000000000Tuto0_main G_tuto0 coq-8.15.0/doc/plugin_tutorial/tuto0/theories/000077500000000000000000000000001417001151100212415ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto0/theories/Demo.v000066400000000000000000000004721417001151100223170ustar00rootroot00000000000000From Tuto0 Require Import Loader. (*** Printing messages ***) HelloWorld. Lemma test : True. Proof. hello_world. Abort. (*** Printing warnings ***) HelloWarning. Lemma test : True. Proof. hello_warning. Abort. (*** Signaling errors ***) Fail HelloError. Lemma test : True. Proof. Fail hello_error. Abort. coq-8.15.0/doc/plugin_tutorial/tuto0/theories/Loader.v000066400000000000000000000000421417001151100226320ustar00rootroot00000000000000Declare ML Module "tuto0_plugin". coq-8.15.0/doc/plugin_tutorial/tuto1/000077500000000000000000000000001417001151100174205ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto1/Makefile000066400000000000000000000003741417001151100210640ustar00rootroot00000000000000ifeq "$(COQBIN)" "" COQBIN=$(dir $(shell which coqtop))/ endif %: Makefile.coq Makefile.coq: _CoqProject $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq tests: all @$(MAKE) -C tests -s clean @$(MAKE) -C tests -s all -include Makefile.coq coq-8.15.0/doc/plugin_tutorial/tuto1/_CoqProject000066400000000000000000000004071417001151100215540ustar00rootroot00000000000000-R theories Tuto1 -I src theories/Loader.v theories/Demo.v src/inspector.mli src/inspector.ml src/simple_check.mli src/simple_check.ml src/simple_declare.mli src/simple_declare.ml src/simple_print.ml src/simple_print.mli src/g_tuto1.mlg src/tuto1_plugin.mlpack coq-8.15.0/doc/plugin_tutorial/tuto1/src/000077500000000000000000000000001417001151100202075ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto1/src/dune000066400000000000000000000002121417001151100210600ustar00rootroot00000000000000(library (name tuto1_plugin) (public_name coq-core.plugins.tutorial.p1) (libraries coq-core.plugins.ltac)) (coq.pp (modules g_tuto1)) coq-8.15.0/doc/plugin_tutorial/tuto1/src/g_tuto1.mlg000066400000000000000000000246421417001151100223020ustar00rootroot00000000000000DECLARE PLUGIN "tuto1_plugin" { (* If we forget this line and include our own tactic definition using TACTIC EXTEND, as below, then we get the strange error message no implementation available for Tacentries, only when compiling theories/Loader.v *) open Ltac_plugin open Pp (* This module defines the types of arguments to be used in the EXTEND directives below, for example the string one. *) open Stdarg } (*** Printing inputs ***) (* * This command prints an input from the user. * * A list with allowable inputs can be found in interp/stdarg.mli, * plugin/ltac/extraargs.mli, and plugin/ssr/ssrparser.mli * (remove the wit_ prefix), but not all of these are allowable * (unit and bool, for example, are not usable from within here). * * We include only some examples that are standard and useful for commands. * Some of the omitted examples are useful for tactics. * * Inspector is our own file that defines a simple messaging function. * The printing functions (pr_qualid and so on) are in printing. * * Some of these cases would be ambiguous if we used "What's" for each of * these. For example, all of these are terms. We purposely disambiguate. *) VERNAC COMMAND EXTEND WhatIsThis CLASSIFIED AS QUERY | [ "What's" constr(e) ] -> { let env = Global.env () in (* we'll explain later *) let sigma = Evd.from_env env in (* we'll explain later *) Inspector.print_input e (Ppconstr.pr_constr_expr env sigma) "term" } | [ "What" "kind" "of" "term" "is" string(s) ] -> { Inspector.print_input s strbrk "string" } | [ "What" "kind" "of" "term" "is" int(i) ] -> { Inspector.print_input i Pp.int "int" } | [ "What" "kind" "of" "term" "is" ident(id) ] -> { Inspector.print_input id Ppconstr.pr_id "identifier" } | [ "What" "kind" "of" "identifier" "is" reference(r) ] -> { Inspector.print_input r Ppconstr.pr_qualid "reference" } END (* * This command demonstrates basic combinators built into the DSL here. * You can generalize this for constr_list, constr_opt, int_list, and so on. *) VERNAC COMMAND EXTEND WhatAreThese CLASSIFIED AS QUERY | [ "What" "is" int_list(l) "a" "list" "of" ] -> { let print l = str "[" ++ Pp.prlist_with_sep (fun () -> str ";") Pp.int l ++ str "]" in Inspector.print_input l print "int list" } | [ "Is" ne_int_list(l) "nonempty" ] -> { let print l = str "[" ++ Pp.prlist_with_sep (fun () -> str ";") Pp.int l ++ str "]" in Inspector.print_input l print "nonempty int list" } | [ "And" "is" int_opt(o) "provided" ] -> { let print o = strbrk (if Option.has_some o then "Yes" else "No") in Feedback.msg_notice (print o) } END (*** Interning terms ***) (* * The next step is to make something of parsed expression. * Interesting information in interp/constrintern.mli. * * When you read in constr(e), e will have type Constrexpr.constr_expr, * which is defined in pretyping/constrexpr.ml. Your plugin * will want a different representation. * * The important function is Constrintern.interp_constr_evars, * which converts between a constr_expr and an * (EConstr.constr, evar_map) pair. This essentially contains * an internal representation of the term along with some state. * For more on the state, read /dev/doc/econstr.md. * * NOTE ON INTERNING: Always prefer Constrintern.interp_constr_evars * over Constrintern.interp_constr. The latter is an internal function * not meant for external use. * * To get your initial environment, call Global.env (). * To get state from that environment, call Evd.from_env on that environment. * It is important to NEVER use the empty environment or Evd.empty; * if you do, you will get confusing errors. * * NOTE ON STATE: It is important to use the evar_map that is returned to you. * Otherwise, you may get cryptic errors later in your plugin. * For example, you may get universe inconsistency errors. * In general, if a function returns an evar_map to you, that's the one * you want to thread through the rest of your command. * * NOTE ON STYLE: In general, it's better practice to move large * chunks of OCaml code like this one into an .ml file. We include * this here because it's really important to understand how to * thread state in a plugin, and it's easier to see that if it's in the * top-level file itself. *) VERNAC COMMAND EXTEND Intern CLASSIFIED AS QUERY | [ "Intern" constr(e) ] -> { let env = Global.env () in (* use this; never use empty *) let sigma = Evd.from_env env in (* use this; never use empty *) let debug sigma = Termops.pr_evar_map ~with_univs:true None env sigma in Feedback.msg_notice (strbrk "State before intern: " ++ debug sigma); let (sigma, t) = Constrintern.interp_constr_evars env sigma e in Feedback.msg_notice (strbrk "State after intern: " ++ debug sigma); let print t = Printer.pr_econstr_env env sigma t in Feedback.msg_notice (strbrk "Interned: " ++ print t) } END (*** Defining terms ***) (* * To define a term, we start similarly to our intern functionality, * then we call another function. We define this function in * the Simple_declare module. * * The line #[ poly = Attributes.polymorphic ] says that this command accepts * polymorphic attributes. * @SkySkimmer: Here, poly is what the result is bound to in the * rule's code. Multiple attributes may be used separated by ;, and we have * punning so foo is equivalent to foo = foo. * * The declare_definition function returns the reference * that was defined. This reference will be present in the new environment. * If you want to refer to it later in your plugin, you must use an * updated environment and the constructed reference. * * Note since we are now defining a term, we must classify this * as a side-effect (CLASSIFIED AS SIDEFF). *) VERNAC COMMAND EXTEND MyDefine CLASSIFIED AS SIDEFF | #[ poly = Attributes.polymorphic ] [ "MyDefine" ident(i) ":=" constr(e) ] -> { let env = Global.env () in let sigma = Evd.from_env env in let (sigma, t) = Constrintern.interp_constr_evars env sigma e in let r = Simple_declare.declare_definition ~poly i sigma t in let print r = strbrk "Defined " ++ Printer.pr_global r ++ strbrk "." in Feedback.msg_notice (print r) } END (*** Printing terms ***) (* * This command takes a name and return its value. It does less * than Print, because it fails on constructors, axioms, and inductive types. * It signals an error to the user for unsupported terms. * * Simple_print contains simple_body_access, which shows how to look up * a global reference. *) VERNAC COMMAND EXTEND ExamplePrint CLASSIFIED AS QUERY | [ "MyPrint" reference(r) ] -> { let env = Global.env () in let sigma = Evd.from_env env in try let t = Simple_print.simple_body_access (Nametab.global r) in Feedback.msg_notice (Printer.pr_econstr_env env sigma t) with Failure s -> CErrors.user_err (str s) } END (* * This command shows that after you define a new term, * you can also look it up. But there's a catch! You need to actually * refresh your environment. Otherwise, the defined term * will not be in the environment. * * Using the global reference as opposed to the ID is generally * a good idea, otherwise you might end up running into unforeseen * problems inside of modules and sections and so on. * * Inside of simple_body_access, note that it uses Global.env (), * which refreshes the environment before looking up the term. *) VERNAC COMMAND EXTEND DefineLookup CLASSIFIED AS SIDEFF | #[ poly = Attributes.polymorphic ] [ "DefineLookup" ident(i) ":=" constr(e) ] -> { let env = Global.env () in let sigma = Evd.from_env env in let (sigma, t) = Constrintern.interp_constr_evars env sigma e in let r = Simple_declare.declare_definition ~poly i sigma t in let print r = strbrk "Defined " ++ Printer.pr_global r ++ strbrk "." in Feedback.msg_notice (print r); let env = Global.env () in let sigma = Evd.from_env env in let t = Simple_print.simple_body_access r in let print t = strbrk "Found " ++ Printer.pr_econstr_env env sigma t in Feedback.msg_notice (print t) } END (*** Checking terms ***) (* * These are two commands for simple type-checking of terms. * The bodies and explanations of the differences are in simple_check.ml. *) VERNAC COMMAND EXTEND Check1 CLASSIFIED AS QUERY | [ "Check1" constr(e) ] -> { let env = Global.env () in let sigma = Evd.from_env env in let (sigma, t) = Constrintern.interp_constr_evars env sigma e in let (sigma, typ) = Simple_check.simple_check1 env sigma t in Feedback.msg_notice (Printer.pr_econstr_env env sigma typ) } END VERNAC COMMAND EXTEND Check2 CLASSIFIED AS QUERY | [ "Check2" constr(e) ] -> { let env = Global.env () in let sigma = Evd.from_env env in let (sigma, t) = Constrintern.interp_constr_evars env sigma e in let typ = Simple_check.simple_check2 env sigma t in Feedback.msg_notice (Printer.pr_econstr_env env sigma typ) } END (*** Convertibility ***) (* * This command checks if there is a possible assignment of * constraints in the state under which the two terms are * convertible. *) VERNAC COMMAND EXTEND Convertible CLASSIFIED AS QUERY | [ "Convertible" constr(e1) constr(e2) ] -> { let env = Global.env () in let sigma = Evd.from_env env in let (sigma, t1) = Constrintern.interp_constr_evars env sigma e1 in let (sigma, t2) = Constrintern.interp_constr_evars env sigma e2 in match Reductionops.infer_conv env sigma t1 t2 with | Some _ -> Feedback.msg_notice (strbrk "Yes :)") | None -> Feedback.msg_notice (strbrk "No :(") } END (*** Introducing terms ***) (* * We can call the tactics defined in Tactics within our tactics. * Here we call intros. *) TACTIC EXTEND my_intro | [ "my_intro" ident(i) ] -> { Tactics.introduction i } END (*** Exploring proof state ***) (* * This command demonstrates exploring the proof state from within * a command. * * Note that Pfedit.get_current_context gets us the environment * and state within a proof, as opposed to the global environment * and state. This is important within tactics. *) VERNAC COMMAND EXTEND ExploreProof CLASSIFIED AS QUERY | ![ proof_query ] [ "ExploreProof" ] -> { fun ~pstate -> let sigma, env = Declare.Proof.get_current_context pstate in let pprf = Proof.partial_proof (Declare.Proof.get pstate) in Feedback.msg_notice (Pp.prlist_with_sep Pp.fnl (Printer.pr_econstr_env env sigma) pprf) } END coq-8.15.0/doc/plugin_tutorial/tuto1/src/inspector.ml000066400000000000000000000004071417001151100225500ustar00rootroot00000000000000open Pp (* * Inspect an input and print a feedback message explaining what it is *) let print_input (a : 'a) (printer : 'a -> Pp.t) (type_str : string) : unit = let msg = printer a ++ strbrk (Printf.sprintf " is a %s." type_str) in Feedback.msg_notice msg coq-8.15.0/doc/plugin_tutorial/tuto1/src/inspector.mli000066400000000000000000000002051417001151100227150ustar00rootroot00000000000000(* * Inspect an input and print a feedback message explaining what it is *) val print_input : 'a -> ('a -> Pp.t) -> string -> unit coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_check.ml000066400000000000000000000012351417001151100231700ustar00rootroot00000000000000let simple_check1 env sigma evalue = (* This version should be preferred if you want to really verify that the input is well-typed, and if you want to obtain the type. *) (* Note that the output value is a pair containing a new evar_map: typing will fill out blanks in the term by add evar bindings. *) Typing.type_of env sigma evalue let simple_check2 env sigma evalue = (* This version should be preferred if you already expect the input to have been type-checked before. Set ~lax to false if you want an anomaly to be raised in case of a type error. Otherwise a ReTypeError exception is raised. *) Retyping.get_type_of ~lax:true env sigma evalue coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_check.mli000066400000000000000000000002741417001151100233430ustar00rootroot00000000000000val simple_check1 : Environ.env -> Evd.evar_map -> EConstr.constr -> Evd.evar_map * EConstr.constr val simple_check2 : Environ.env -> Evd.evar_map -> EConstr.constr -> EConstr.constr coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_declare.ml000066400000000000000000000006041417001151100235110ustar00rootroot00000000000000let declare_definition ~poly name sigma body = let udecl = UState.default_univ_decl in let scope = Locality.Global Locality.ImportDefaultBehavior in let kind = Decls.(IsDefinition Definition) in let cinfo = Declare.CInfo.make ~name ~typ:None () in let info = Declare.Info.make ~scope ~kind ~udecl ~poly () in Declare.declare_definition ~info ~cinfo ~opaque:false ~body sigma coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_declare.mli000066400000000000000000000001531417001151100236610ustar00rootroot00000000000000open Names val declare_definition : poly:bool -> Id.t -> Evd.evar_map -> EConstr.t -> Names.GlobRef.t coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_print.ml000066400000000000000000000012501417001151100232440ustar00rootroot00000000000000(* A more advanced example of how to explore the structure of terms of type constr is given in the coq-dpdgraph plugin. *) let simple_body_access gref = let open Names.GlobRef in match gref with | VarRef _ -> failwith "variables are not covered in this example" | IndRef _ -> failwith "inductive types are not covered in this example" | ConstructRef _ -> failwith "constructors are not covered in this example" | ConstRef cst -> let cb = Environ.lookup_constant cst (Global.env()) in match Global.body_of_constant_body Library.indirect_accessor cb with | Some(e, _, _) -> EConstr.of_constr e | None -> failwith "This term has no value" coq-8.15.0/doc/plugin_tutorial/tuto1/src/simple_print.mli000066400000000000000000000000731417001151100234170ustar00rootroot00000000000000val simple_body_access : Names.GlobRef.t -> EConstr.constr coq-8.15.0/doc/plugin_tutorial/tuto1/src/tuto1_plugin.mlpack000066400000000000000000000000731417001151100240320ustar00rootroot00000000000000Inspector Simple_check Simple_declare Simple_print G_tuto1 coq-8.15.0/doc/plugin_tutorial/tuto1/theories/000077500000000000000000000000001417001151100212425ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto1/theories/Demo.v000066400000000000000000000034741417001151100223250ustar00rootroot00000000000000From Tuto1 Require Import Loader. (*** Printing user inputs ***) Definition definition := 5. What's definition. What kind of term is definition. What kind of identifier is definition. What is 1 2 3 a list of. What is a list of. (* no arguments = empty list *) Is 1 2 3 nonempty. (* Is nonempty *) (* does not parse *) And is 1 provided. And is provided. (*** Interning terms ***) Intern 3. Intern definition. Intern (fun (x : Prop) => x). Intern (fun (x : Type) => x). Intern (forall (T : Type), T). Intern (fun (T : Type) (t : T) => t). Intern _. Intern (Type : Type). (*** Defining terms ***) MyDefine n := 1. Print n. MyDefine f := (fun (x : Type) => x). Print f. (*** Printing terms ***) MyPrint f. MyPrint n. Fail MyPrint nat. DefineLookup n' := 1. DefineLookup f' := (fun (x : Type) => x). (*** Checking terms ***) Check1 3. Check1 definition. Check1 (fun (x : Prop) => x). Check1 (fun (x : Type) => x). Check1 (forall (T : Type), T). Check1 (fun (T : Type) (t : T) => t). Check1 _. Check1 (Type : Type). Check2 3. Check2 definition. Check2 (fun (x : Prop) => x). Check2 (fun (x : Type) => x). Check2 (forall (T : Type), T). Check2 (fun (T : Type) (t : T) => t). Check2 _. Check2 (Type : Type). (*** Convertibility ***) Convertible 1 1. Convertible (fun (x : Type) => x) (fun (x : Type) => x). Convertible Type Type. Convertible 1 ((fun (x : nat) => x) 1). Convertible 1 2. Convertible (fun (x : Type) => x) (fun (x : Prop) => x). Convertible Type Prop. Convertible 1 ((fun (x : nat) => x) 2). (*** Introducing variables ***) Theorem foo: forall (T : Set) (t : T), T. Proof. my_intro T. my_intro t. apply t. Qed. (*** Exploring proof state ***) Fail ExploreProof. (* not in a proof *) Theorem bar: forall (T : Set) (t : T), T. Proof. ExploreProof. my_intro T. ExploreProof. my_intro t. ExploreProof. apply t. Qed. coq-8.15.0/doc/plugin_tutorial/tuto1/theories/Loader.v000066400000000000000000000000421417001151100226330ustar00rootroot00000000000000Declare ML Module "tuto1_plugin". coq-8.15.0/doc/plugin_tutorial/tuto2/000077500000000000000000000000001417001151100174215ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto2/Makefile000066400000000000000000000003741417001151100210650ustar00rootroot00000000000000ifeq "$(COQBIN)" "" COQBIN=$(dir $(shell which coqtop))/ endif %: Makefile.coq Makefile.coq: _CoqProject $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq tests: all @$(MAKE) -C tests -s clean @$(MAKE) -C tests -s all -include Makefile.coq coq-8.15.0/doc/plugin_tutorial/tuto2/_CoqProject000066400000000000000000000003471417001151100215600ustar00rootroot00000000000000-R theories Tuto2 -I src theories/Loader.v theories/Demo.v theories/Count.v src/custom.ml src/custom.mli src/counter.ml src/counter.mli src/persistent_counter.ml src/persistent_counter.mli src/g_tuto2.mlg src/tuto2_plugin.mlpack coq-8.15.0/doc/plugin_tutorial/tuto2/src/000077500000000000000000000000001417001151100202105ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto2/src/counter.ml000066400000000000000000000006651417001151100222300ustar00rootroot00000000000000(* * This file defines our counter, which we use in the Count command. *) (* * Our counter is simply a reference called "counter" to an integer. * * Summary.ref behaves like ref, but also registers a summary to Coq. *) let counter = Summary.ref ~name:"counter" 0 (* * We can increment our counter: *) let increment () = counter := succ !counter (* * We can also read the value of our counter: *) let value () = !counter coq-8.15.0/doc/plugin_tutorial/tuto2/src/counter.mli000066400000000000000000000003201417001151100223650ustar00rootroot00000000000000(* * This file defines our counter, which we use in the Count command. *) (* * Increment the counter *) val increment : unit -> unit (* * Determine the value of the counter *) val value : unit -> int coq-8.15.0/doc/plugin_tutorial/tuto2/src/custom.ml000066400000000000000000000001441417001151100220530ustar00rootroot00000000000000(* * This file defines a custom type for the PassCustom command. *) type custom_type = Foo | Bar coq-8.15.0/doc/plugin_tutorial/tuto2/src/custom.mli000066400000000000000000000001441417001151100222240ustar00rootroot00000000000000(* * This file defines a custom type for the PassCustom command. *) type custom_type = Foo | Bar coq-8.15.0/doc/plugin_tutorial/tuto2/src/dune000066400000000000000000000002121417001151100210610ustar00rootroot00000000000000(library (name tuto2_plugin) (public_name coq-core.plugins.tutorial.p2) (libraries coq-core.plugins.ltac)) (coq.pp (modules g_tuto2)) coq-8.15.0/doc/plugin_tutorial/tuto2/src/g_tuto2.mlg000066400000000000000000000471471417001151100223110ustar00rootroot00000000000000(* -------------------------------------------------------------------------- *) (* *) (* Initial ritual dance *) (* *) (* -------------------------------------------------------------------------- *) DECLARE PLUGIN "tuto2_plugin" (* Use this macro before any of the other OCaml macros. Each plugin has a unique name. We have decided to name this plugin as "tuto2_plugin". That means that: (1) We write the following command in a file called Loader.v: Declare ML Module "tuto2_plugin". to load this command into the Coq top-level. (2) Users can then load our plugin in other Coq files by writing: From Tuto2 Require Import Loader. where Loader is the name of the file that declares "tuto2_plugin", and where Tuto2 is the name passed to the -R argument in our _CoqProject. (3) The above commands will succeed only if there is "tuto2_plugin.cmxs" in some of the directories where Coq is supposed to look (i.e. the ones we specified via "-I ..." command line options in _CoqProject). As long as this is listed in our _CoqProject, the Makefile takes care of placing it in the right directory. (4) The file "tuto2_plugin.mlpack" lists the OCaml modules to be linked in "tuto2_plugin.cmxs". (5) The file "tuto2_plugin.mlpack" as well as all .ml, .mli and .mlg files are listed in the "_CoqProject" file. *) (* -------------------------------------------------------------------------- *) (* *) (* Importing OCaml dependencies *) (* *) (* -------------------------------------------------------------------------- *) (* * This .mlg file is parsed into a .ml file. You can put OCaml in this file * inside of curly braces. It's best practice to use this only to import * other modules, and include most of your functionality in those modules. * * Here we list all of the dependencies that these commands have, and explain * why. We also refer to the first command that uses them, where further * explanation can be found in context. *) { (*** Dependencies from Coq ***) (* * This lets us take non-terminal arguments to a command (for example, * the PassInt command that takes an integer argument needs this * this dependency). * * First used by: PassInt *) open Stdarg (* * This is Coq's pretty-printing module. Here, we need it to use some * useful syntax for pretty-printing. * * First use by: Count *) open Pp } (* -------------------------------------------------------------------------- *) (* *) (* How to define a new Vernacular command? *) (* *) (* -------------------------------------------------------------------------- *) (* This command does nothing: *) VERNAC COMMAND EXTEND NoOp CLASSIFIED AS QUERY | [ "Nothing" ] -> { () } END (* --- Defining a Command --- These: VERNAC COMMAND EXTEND and END mark the beginning and the end of the definition of a new Vernacular command. --- Assigning a Command a Unique Identifier --- NoOp is a unique identifier (which must start with an upper-case letter) associated with the new Vernacular command we are defining. It is good to make this identifier descriptive. --- Classifying a Command --- CLASSIFIED AS QUERY tells Coq that the new Vernacular command neither: - changes the global environment, nor - modifies the plugin's state. If the new command could: - change the global environment - or modify a plugin's state then one would have to use CLASSIFIED AS SIDEFF instead. --- Defining Parsing and Interpretation Rules --- This: [ "Nothing" ] -> { () } defines: - the parsing rule (left) - the interpretation rule (right) The parsing rule and the interpretation rule are separated by -> token. The parsing rule, in this case, is: [ "Nothing" ] By convention, all vernacular command start with an upper-case letter. The '[' and ']' characters mark the beginning and the end of the parsing rule, respectively. The parsing rule itself says that the syntax of the newly defined command is composed from a single terminal Nothing. The interpretation rule, in this case, is: { () } Similarly to the case of the parsing rule, the '{' and '}' characters mark the beginning and the end of the interpretation rule. In this case, the following Ocaml expression: () defines the effect of the Vernacular command we have just defined. That is, it behaves is no-op. --- Calling a Command --- In Demo.v, we call this command by writing: Nothing. since our parsing rule is "Nothing". This does nothing, since our interpretation rule is (). *) (* -------------------------------------------------------------------------- *) (* *) (* How to define a new Vernacular command with some terminal parameters? *) (* *) (* -------------------------------------------------------------------------- *) (* This command takes some terminal parameters and does nothing. *) VERNAC COMMAND EXTEND NoOpTerminal CLASSIFIED AS QUERY | [ "Command" "With" "Some" "Terminal" "Parameters" ] -> { () } END (* --- Defining a Command with Terminal Parameters --- As shown above, the Vernacular command can be composed from any number of terminals. By convention, each of these terminals starts with an upper-case letter. --- Calling a Command with Terminal Parameters --- In Demo.v, we call this command by writing: Command With Some Terminal Parameters. to match our parsing rule. As expected, this does nothing. --- Recognizing Syntax Errors --- Note that if we were to omit any of these terminals, for example by writing: Command. it would fail to parse (as expected), showing this error to the user: Syntax error: illegal begin of vernac. *) (* -------------------------------------------------------------------------- *) (* *) (* How to define a new Vernacular command with some non-terminal parameter? *) (* *) (* -------------------------------------------------------------------------- *) (* This command takes an integer argument and does nothing. *) VERNAC COMMAND EXTEND PassInt CLASSIFIED AS QUERY | [ "Pass" int(i) ] -> { () } END (* --- Dependencies --- Since this command takes a non-terminal argument, it is the first to depend on Stdarg (opened at the top of this file). --- Defining a Command with Non-Terminal Arguments --- This: int(i) means that the new command is expected to be followed by an integer. The integer is bound in the parsing rule to variable i. This variable i then can be used in the interpretation rule. To see value of which Ocaml types can be bound this way, look at the wit_* function declared in interp/stdarg.mli (in the Coq's codebase). There are more examples in tuto1. If we drop the wit_ prefix, we will get the token that we can use in the parsing rule. That is, since there exists wit_int, we know that we can write: int(i) By looking at the signature of the wit_int function: val wit_int : int uniform_genarg_type we also know that variable i will have the type int. --- Recognizing Build Errors --- The mapping from int(i) to wit_int is automatic. This is why, if we forget to open Stdarg, we will get this error: Unbound value wit_int when we try to build our plugin. It is good to recognize this error, since this is a common mistake in plugin development, and understand that the fix is to open the file (Stdarg) where wit_int is defined. --- Calling a Command with Terminal Arguments --- We call this command in Demo.v by writing: Pass 42. We could just as well pass any other integer. As expected, this command does nothing. --- Recognizing Syntax Errors --- As in our previous command, if we were to omit the arguments to the command, for example by writing: Pass. it would fail to parse (as expected), showing this error to the user: Syntax error: [prim:integer] expected after 'Pass' (in [vernac:command]). The same thing would happen if we passed the wrong argument type: Pass True. If we pass too many arguments: Pass 15 20. we will get a different syntax error: Syntax error: '.' expected after [vernac:command] (in [vernac_aux]). It is good to recognize these errors, since doing so can help you catch mistakes you make defining your parser rules during plugin development. *) (* -------------------------------------------------------------------------- *) (* *) (* How to define a new Vernacular command with variable number of arguments? *) (* *) (* -------------------------------------------------------------------------- *) (* This command takes a list of integers and does nothing: *) VERNAC COMMAND EXTEND AcceptIntList CLASSIFIED AS QUERY | [ "Accept" int_list(l) ] -> { () } END (* --- Dependencies --- Much like PassInt, this command depends on Stdarg. --- Defining a Command that Takes a Variable Number of Arguments --- This: int_list(l) means that the new Vernacular command is expected to be followed by a (whitespace separated) list of integers. This list of integers is bound to the indicated l. In this case, as well as in the cases we point out below, instead of int in int_list we could use any other supported type, e.g. ident, bool, ... --- Other Ways to Take a Variable Number of Arguments --- To see which other Ocaml type constructors (in addition to list) are supported, have a look at the parse_user_entry function defined in the coqpp/coqpp_parse.mly file. E.g.: - ne_int_list(x) would represent a non-empty list of integers, - int_list(x) would represent a list of integers, - int_opt(x) would represent a value of type int option, - ··· Much like with int_list, we could use any other supported type here. There are some more examples of this in tuto1. --- Calling a Command with a Variable Number of Arguments --- We call this command in Demo.v by writing: Accept 100 200 300 400. As expected, this does nothing. Since our parser rule uses int_list, the arguments to Accept can be a list of integers of any length. For example, we can pass the empty list: Accept. or just one argument: Accept 2. and so on. *) (* -------------------------------------------------------------------------- *) (* *) (* How to define a new Vernacular command that takes values of a custom type? *) (* *) (* -------------------------------------------------------------------------- *) (* --- Defining Custom Types --- Vernacular commands can take custom types in addition to the built-in ones. The first step to taking these custom types as arguments is to define them. We define a type of values that we want to pass to our Vernacular command in custom.ml/custom.mli. The type is very simple: type custom_type : Foo | Bar. --- Using our New Module --- Now that we have a new OCaml module Custom, in order to use it, we must do the following: 1. Add src/custom.ml and src/custom.mli to our _CoqProject 2. Add Custom to our tuto2_plugin.mlpack This workflow will become very familiar to you when you add new modules to your plugins, so it is worth getting used to. --- Depending on our New Module --- Now that our new module is listed in both _CoqProject and tuto2_plugin.mlpack, we can use fully qualified names Custom.Foo and Custom.Bar. Alternatively, we could add the dependency on our module: open Custom. to the top of the file, and then refer to Foo and Bar directly. --- Telling Coq About our New Argument Type --- By default, we are able to define new Vernacular commands that can take parameters of some of the supported types. Which types are supported, that was discussed earlier. If we want to be able to define Vernacular command that takes parameters of a type that is not supported by default, we must use the following macro: *) VERNAC ARGUMENT EXTEND custom | [ "Foo" ] -> { Custom.Foo } | [ "Bar" ] -> { Custom.Bar } END (* where: custom indicates that, from now on, in our parsing rules we can write: custom(some_variable) in those places where we expect user to provide an input that can be parsed by the parsing rules above (and interpreted by the interpretations rules above). *) (* --- Defining a Command that Takes an Argument of a Custom Type --- Now that Coq is aware of our new argument type, we can define a command that uses it. This command takes an argument Foo or Bar and does nothing: *) VERNAC COMMAND EXTEND PassCustom CLASSIFIED AS QUERY | [ "Foobar" custom(x) ] -> { () } END (* --- Calling a Command that Takes an Argument of a Custom Type --- We call this command in Demo.v by writing: Foobar Foo. Foobar Bar. As expected, both of these do nothing. In the first case, x gets the value Custom.Foo : Custom.custom_type, since our custom parsing and interpretation rules (VERNAC ARGUMENT EXTEND custom ...) map the input Foo to Custom.Foo. Similarly, in the second case, x gets the value Custom.Bar : Custom.custom_type. *) (* -------------------------------------------------------------------------- *) (* *) (* How to give a feedback to the user? *) (* *) (* -------------------------------------------------------------------------- *) (* So far we have defined commands that do nothing. We can also signal feedback to the user. This command tells the user that everything is awesome: *) VERNAC COMMAND EXTEND Awesome CLASSIFIED AS QUERY | [ "Is" "Everything" "Awesome" ] -> { Feedback.msg_notice (Pp.str "Everything is awesome!") } END (* --- Pretty Printing --- User feedback functions like Feedback.msg_notice take a Pp.t as an argument. Check the Pp module to see which functions are available to construct a Pp.t. The Pp module enable us to represent and construct pretty-printing instructions. The concepts defined and the services provided by the Pp module are in various respects related to the concepts and services provided by the Format module that is part of the Ocaml standard library. --- Giving Feedback --- Once we have a Pp.t, we can use the following functions: - Feedback.msg_info : Pp.t -> unit - Feedback.msg_notice : Pp.t -> unit - Feedback.msg_warning : Pp.t -> unit - Feedback.msg_debug : Pp.t -> unit to give user a textual feedback. Examples of some of these can be found in tuto0. --- Signaling Errors --- While there is a Feedback.msg_error, when signaling an error, it is currently better practice to use user_err. There is an example of this in tuto0. *) (* -------------------------------------------------------------------------- *) (* *) (* How to implement a Vernacular command with (undoable) side-effects? *) (* *) (* -------------------------------------------------------------------------- *) (* This command counts how many times it has been called since importing our plugin, and signals that information to the user: *) VERNAC COMMAND EXTEND Count CLASSIFIED AS SIDEFF | [ "Count" ] -> { Counter.increment (); let v = Counter.value () in Feedback.msg_notice (Pp.str "Times Count has been called: " ++ Pp.int v) } END (* --- Dependencies --- If we want to use the ++ syntax, then we need to depend on Pp explicitly. This is why, at the top, we write: open Pp. --- Defining the Counter --- We define our counter in the Counter module. Please see counter.ml and counter.mli for details. As with Custom, we must modify our _CoqProject and tuto2_plugin.mlpack so that we can use Counter in our code. --- Classifying the Command --- This command has undoable side-effects: When the plugin is first loaded, the counter is instantiated to 0. After each time we call Count, the value of the counter increases by 1. Thus, we must write CLASSIFIED AS SIDEEFF for this command, rather than CLASSIFIED AS QUERY. See the explanation from the NoOp command earlier if you do not remember the distinction. --- Calling the Command --- We call our command three times in Demo.v by writing: Count. Count. Count. This gives us the following output: Times Count has been called: 1 Times Count has been called: 2 Times Count has been called: 3 Note that when the plugin is first loaded, the counter is 0. It increases each time Count is called. --- Behavior with Imports --- Count.v shows the behavior with imports. Note that if we import Demo.v, the counter is set to 0 from the beginning, even though Demo.v calls Count three times. In other words, this is not persistent! *) (* -------------------------------------------------------------------------- *) (* *) (* How to implement a Vernacular command that uses persistent storage? *) (* *) (* -------------------------------------------------------------------------- *) (* * This command is like Count, but it is persistent across modules: *) VERNAC COMMAND EXTEND CountPersistent CLASSIFIED AS SIDEFF | [ "Count" "Persistent" ] -> { Persistent_counter.increment (); let v = Persistent_counter.value () in Feedback.msg_notice (Pp.str "Times Count Persistent has been called: " ++ Pp.int v) } END (* --- Persistent Storage --- Everything is similar to the Count command, except that we use a counter that is persistent. See persistent_counter.ml for details. The key trick is that we must create a persistent object for our counter to persist across modules. Coq has some useful APIs for this in Libobject. We demonstrate these in persistent_counter.ml. This is really, really useful if you want, for example, to cache some results that your plugin computes across modules. A persistent object can be a hashtable, for example, that maps inputs to outputs your command has already computed, if you know the result will not change. --- Calling the Command --- We call the command in Demo.v and in Count.v, just like we did with Count. Note that this time, the value of the counter from Demo.v persists in Count.v. *) coq-8.15.0/doc/plugin_tutorial/tuto2/src/persistent_counter.ml000066400000000000000000000031511417001151100245010ustar00rootroot00000000000000(* * This file defines our persistent counter, which we use in the * CountPersistent command. *) (* * At its core, our persistent counter looks exactly the same as * our non-persistent counter (with a different name to prevent collisions): *) let counter = Summary.ref ~name:"persistent_counter" 0 (* * The difference is that we need to declare it as a persistent object * using Libobject.declare_object. To do that, we define a function that * saves the value that is passed to it into the reference we have just defined: *) let cache_count (_, v) = counter := v (* * We then use declare_object to create a function that takes an integer value * (the type our counter refers to) and creates a persistent object from that * value: *) let declare_counter : int -> Libobject.obj = let open Libobject in declare_object { (default_object "COUNTER") with cache_function = cache_count; load_function = (fun _ -> cache_count); } (* * See Libobject for more information on what other information you * can pass here, and what all of these functions mean. * * For example, if we passed the same thing that we pass to load_function * to open_function, then our last call to Count Persistent in Count.v * would return 4 and not 6. *) (* * Incrementing our counter looks almost identical: *) let increment () = Lib.add_anonymous_leaf (declare_counter (succ !counter)) (* * except that we must call our declare_counter function to get a persistent * object. We then pass this object to Lib.add_anonymous_leaf. *) (* * Reading a value does not change at all: *) let value () = !counter coq-8.15.0/doc/plugin_tutorial/tuto2/src/persistent_counter.mli000066400000000000000000000003761417001151100246600ustar00rootroot00000000000000(* * This file defines our persistent counter, which we use in the * CountPersistent command. *) (* * Increment the persistent counter *) val increment : unit -> unit (* * Determine the value of the persistent counter *) val value : unit -> int coq-8.15.0/doc/plugin_tutorial/tuto2/src/tuto2_plugin.mlpack000066400000000000000000000000521417001151100240310ustar00rootroot00000000000000Custom Counter Persistent_counter G_tuto2 coq-8.15.0/doc/plugin_tutorial/tuto2/theories/000077500000000000000000000000001417001151100212435ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto2/theories/Count.v000066400000000000000000000002471417001151100225250ustar00rootroot00000000000000Require Import Demo. (*** Local ***) Count. Count. Import Demo. Count. (*** Persistent ***) Count Persistent. Count Persistent. Import Demo. Count Persistent. coq-8.15.0/doc/plugin_tutorial/tuto2/theories/Demo.v000066400000000000000000000016361417001151100223240ustar00rootroot00000000000000From Tuto2 Require Import Loader. (*** A no-op command ***) Nothing. (*** No-op commands with arguments ***) (* * Terminal parameters: *) Command With Some Terminal Parameters. (* Command. *) (* does not parse *) (* * A single non-terminal argument: *) Pass 42. (* Pass. *) (* does not parse *) (* Pass True. *) (* does not parse *) (* Pass 15 20. *) (* does not parse *) (* * A list of non-terminal arguments: *) Accept 100 200 300 400. Accept. Accept 2. (* * A custom argument: *) Foobar Foo. Foobar Bar. (*** Commands that give feedback ***) (* * Simple feedback: *) Is Everything Awesome. (*** Storage and side effects ***) (* * Local side effects: *) Count. Count. Count. (* * See Count.v for behavior in modules that import this one. *) (* * Persistent side effects: *) Count Persistent. Count Persistent. Count Persistent. (* * See Count.v for behavior in modules that import this one. *) coq-8.15.0/doc/plugin_tutorial/tuto2/theories/Loader.v000066400000000000000000000000421417001151100226340ustar00rootroot00000000000000Declare ML Module "tuto2_plugin". coq-8.15.0/doc/plugin_tutorial/tuto3/000077500000000000000000000000001417001151100174225ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto3/Makefile000066400000000000000000000003741417001151100210660ustar00rootroot00000000000000ifeq "$(COQBIN)" "" COQBIN=$(dir $(shell which coqtop))/ endif %: Makefile.coq Makefile.coq: _CoqProject $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq tests: all @$(MAKE) -C tests -s clean @$(MAKE) -C tests -s all -include Makefile.coq coq-8.15.0/doc/plugin_tutorial/tuto3/_CoqProject000066400000000000000000000002771417001151100215630ustar00rootroot00000000000000-R theories Tuto3 -I src theories/Data.v theories/Loader.v src/tuto_tactic.ml src/tuto_tactic.mli src/construction_game.ml src/construction_game.mli src/g_tuto3.mlg src/tuto3_plugin.mlpack coq-8.15.0/doc/plugin_tutorial/tuto3/src/000077500000000000000000000000001417001151100202115ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto3/src/construction_game.ml000066400000000000000000000174511417001151100242760ustar00rootroot00000000000000open Pp open Context let find_reference = Coqlib.find_reference [@ocaml.warning "-3"] let example_sort sigma = (* creating a new sort requires that universes should be recorded in the evd datastructure, so this datastructure also needs to be passed around. *) let sigma, s = Evd.new_sort_variable Evd.univ_rigid sigma in let new_type = EConstr.mkSort s in sigma, new_type let c_one sigma = (* In the general case, global references may refer to universe polymorphic objects, and their universe has to be made afresh when creating an instance. *) let gr_S = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "S" in (* the long name of "S" was found with the command "About S." *) let gr_O = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "O" in let sigma, c_O = Evd.fresh_global (Global.env ()) sigma gr_O in let sigma, c_S = Evd.fresh_global (Global.env ()) sigma gr_S in (* Here is the construction of a new term by applying functions to argument. *) sigma, EConstr.mkApp (c_S, [| c_O |]) let dangling_identity env sigma = (* I call this a dangling identity, because it is not polymorph, but the type on which it applies is left unspecified, as it is represented by an existential variable. The declaration for this existential variable needs to be added in the evd datastructure. *) let sigma, type_type = example_sort sigma in let sigma, arg_type = Evarutil.new_evar env sigma type_type in (* Notice the use of a De Bruijn index for the inner occurrence of the bound variable. *) sigma, EConstr.mkLambda(nameR (Names.Id.of_string "x"), arg_type, EConstr.mkRel 1) let dangling_identity2 env sigma = (* This example uses directly a function that produces an evar that is meant to be a type. *) let sigma, (arg_type, type_type) = Evarutil.new_type_evar env sigma Evd.univ_rigid in sigma, EConstr.mkLambda(nameR (Names.Id.of_string "x"), arg_type, EConstr.mkRel 1) let example_sort_app_lambda () = let env = Global.env () in let sigma = Evd.from_env env in let sigma, c_v = c_one sigma in (* dangling_identity and dangling_identity2 can be used interchangeably here *) let sigma, c_f = dangling_identity2 env sigma in let c_1 = EConstr.mkApp (c_f, [| c_v |]) in let _ = Feedback.msg_notice (Printer.pr_econstr_env env sigma c_1) in (* type verification happens here. Type verification will update existential variable information in the evd part. *) let sigma, the_type = Typing.type_of env sigma c_1 in (* At display time, you will notice that the system knows about the existential variable being instantiated to the "nat" type, even though c_1 still contains the meta-variable. *) Feedback.msg_notice ((Printer.pr_econstr_env env sigma c_1) ++ str " has type " ++ (Printer.pr_econstr_env env sigma the_type)) let c_S sigma = let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "S" in Evd.fresh_global (Global.env ()) sigma gr let c_O sigma = let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "O" in Evd.fresh_global (Global.env ()) sigma gr let c_E sigma = let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "EvenNat" in Evd.fresh_global (Global.env ()) sigma gr let c_D sigma = let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "tuto_div2" in Evd.fresh_global (Global.env ()) sigma gr let c_Q sigma = let gr = find_reference "Tuto3" ["Coq"; "Init"; "Logic"] "eq" in Evd.fresh_global (Global.env ()) sigma gr let c_R sigma = let gr = find_reference "Tuto3" ["Coq"; "Init"; "Logic"] "eq_refl" in Evd.fresh_global (Global.env ()) sigma gr let c_N sigma = let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "nat" in Evd.fresh_global (Global.env ()) sigma gr let c_C sigma = let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "C" in Evd.fresh_global (Global.env ()) sigma gr let c_F sigma = let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "S_ev" in Evd.fresh_global (Global.env ()) sigma gr let c_P sigma = let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "s_half_proof" in Evd.fresh_global (Global.env ()) sigma gr (* If c_S was universe polymorphic, we should have created a new constant at each iteration of buildup. *) let mk_nat sigma n = let sigma, c_S = c_S sigma in let sigma, c_O = c_O sigma in let rec buildup = function | 0 -> c_O | n -> EConstr.mkApp (c_S, [| buildup (n - 1) |]) in if n <= 0 then sigma, c_O else sigma, buildup n let example_classes n = let env = Global.env () in let sigma = Evd.from_env env in let sigma, c_n = mk_nat sigma n in let sigma, n_half = mk_nat sigma (n / 2) in let sigma, c_N = c_N sigma in let sigma, c_div = c_D sigma in let sigma, c_even = c_E sigma in let sigma, c_Q = c_Q sigma in let sigma, c_R = c_R sigma in let arg_type = EConstr.mkApp (c_even, [| c_n |]) in let sigma0 = sigma in let sigma, instance = Evarutil.new_evar env sigma arg_type in let c_half = EConstr.mkApp (c_div, [|c_n; instance|]) in let _ = Feedback.msg_notice (Printer.pr_econstr_env env sigma c_half) in let sigma, the_type = Typing.type_of env sigma c_half in let _ = Feedback.msg_notice (Printer.pr_econstr_env env sigma c_half) in let proved_equality = EConstr.mkCast(EConstr.mkApp (c_R, [| c_N; c_half |]), Constr.DEFAULTcast, EConstr.mkApp (c_Q, [| c_N; c_half; n_half|])) in (* This is where we force the system to compute with type classes. *) (* Question to coq developers: why do we pass two evd arguments to solve_remaining_evars? Is the choice of sigma0 relevant here? *) let sigma = Pretyping.solve_remaining_evars (Pretyping.default_inference_flags true) env sigma ~initial:sigma0 in let sigma, final_type = Typing.type_of env sigma proved_equality in Feedback.msg_notice (Printer.pr_econstr_env env sigma proved_equality) (* This function, together with definitions in Data.v, shows how to trigger automatic proofs at the time of typechecking, based on canonical structures. n is a number for which we want to find the half (and a proof that this half is indeed the half) *) let example_canonical n = let env = Global.env () in let sigma = Evd.from_env env in (* Construct a natural representation of this integer. *) let sigma, c_n = mk_nat sigma n in (* terms for "nat", "eq", "S_ev", "eq_refl", "C" *) let sigma, c_N = c_N sigma in let sigma, c_F = c_F sigma in let sigma, c_R = c_R sigma in let sigma, c_C = c_C sigma in let sigma, c_P = c_P sigma in (* the last argument of C *) let refl_term = EConstr.mkApp (c_R, [|c_N; c_n |]) in (* Now we build two existential variables, for the value of the half and for the "S_ev" structure that triggers the proof search. *) let sigma, ev1 = Evarutil.new_evar env sigma c_N in (* This is the type for the second existential variable *) let csev = EConstr.mkApp (c_F, [| ev1 |]) in let sigma, ev2 = Evarutil.new_evar env sigma csev in (* Now we build the C structure. *) let test_term = EConstr.mkApp (c_C, [| c_n; ev1; ev2; refl_term |]) in (* Type-checking this term will compute values for the existential variables *) let sigma, final_type = Typing.type_of env sigma test_term in (* The computed type has two parameters, the second one is the proof. *) let value = match EConstr.kind sigma final_type with | Constr.App(_, [| _; the_half |]) -> the_half | _ -> failwith "expecting the whole type to be \"cmp _ the_half\"" in let _ = Feedback.msg_notice (Printer.pr_econstr_env env sigma value) in (* I wish for a nicer way to get the value of ev2 in the evar_map *) let prf_struct = EConstr.of_constr (EConstr.to_constr sigma ev2) in let the_prf = EConstr.mkApp (c_P, [| ev1; prf_struct |]) in let sigma, the_statement = Typing.type_of env sigma the_prf in Feedback.msg_notice (Printer.pr_econstr_env env sigma the_prf ++ str " has type " ++ Printer.pr_econstr_env env sigma the_statement) coq-8.15.0/doc/plugin_tutorial/tuto3/src/construction_game.mli000066400000000000000000000003011417001151100244310ustar00rootroot00000000000000val dangling_identity : Environ.env -> Evd.evar_map -> Evd.evar_map * EConstr.t val example_sort_app_lambda : unit -> unit val example_classes : int -> unit val example_canonical : int -> unit coq-8.15.0/doc/plugin_tutorial/tuto3/src/dune000066400000000000000000000002541417001151100210700ustar00rootroot00000000000000(library (name tuto3_plugin) (public_name coq-core.plugins.tutorial.p3) (flags :standard -warn-error -3) (libraries coq-core.plugins.ltac)) (coq.pp (modules g_tuto3)) coq-8.15.0/doc/plugin_tutorial/tuto3/src/g_tuto3.mlg000066400000000000000000000023221417001151100222750ustar00rootroot00000000000000DECLARE PLUGIN "tuto3_plugin" { open Ltac_plugin open Construction_game (* This one is necessary, to avoid message about missing wit_string *) open Stdarg } VERNAC COMMAND EXTEND ShowTypeConstruction CLASSIFIED AS QUERY | [ "Tuto3_1" ] -> { let env = Global.env () in let sigma = Evd.from_env env in let sigma, s = Evd.new_sort_variable Evd.univ_rigid sigma in let new_type_2 = EConstr.mkSort s in let sigma, _ = Typing.type_of (Global.env()) (Evd.from_env (Global.env())) new_type_2 in Feedback.msg_notice (Printer.pr_econstr_env env sigma new_type_2) } END VERNAC COMMAND EXTEND ShowOneConstruction CLASSIFIED AS QUERY | [ "Tuto3_2" ] -> { example_sort_app_lambda () } END TACTIC EXTEND collapse_hyps | [ "pack" "hypothesis" ident(i) ] -> { Tuto_tactic.pack_tactic i } END (* More advanced examples, where automatic proof happens but no tactic is being called explicitly. The first one uses type classes. *) VERNAC COMMAND EXTEND TriggerClasses CLASSIFIED AS QUERY | [ "Tuto3_3" int(n) ] -> { example_classes n } END (* The second one uses canonical structures. *) VERNAC COMMAND EXTEND TriggerCanonical CLASSIFIED AS QUERY | [ "Tuto3_4" int(n) ] -> { example_canonical n } END coq-8.15.0/doc/plugin_tutorial/tuto3/src/tuto3_plugin.mlpack000066400000000000000000000000461417001151100240360ustar00rootroot00000000000000Construction_game Tuto_tactic G_tuto3 coq-8.15.0/doc/plugin_tutorial/tuto3/src/tuto_tactic.ml000066400000000000000000000126121417001151100230670ustar00rootroot00000000000000open Proofview let constants = ref ([] : EConstr.t list) (* This is a pattern to collect terms from the Coq memory of valid terms and proofs. This pattern extends all the way to the definition of function c_U *) let collect_constants () = if (!constants = []) then let open EConstr in let open UnivGen in let find_reference = Coqlib.find_reference [@ocaml.warning "-3"] in let gr_H = find_reference "Tuto3" ["Tuto3"; "Data"] "pack" in let gr_M = find_reference "Tuto3" ["Tuto3"; "Data"] "packer" in let gr_R = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "pair" in let gr_P = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "prod" in let gr_U = find_reference "Tuto3" ["Tuto3"; "Data"] "uncover" in constants := List.map (fun x -> of_constr (constr_of_monomorphic_global (Global.env ()) x)) [gr_H; gr_M; gr_R; gr_P; gr_U]; !constants else !constants let c_H () = match collect_constants () with it :: _ -> it | _ -> failwith "could not obtain an internal representation of pack" let c_M () = match collect_constants () with _ :: it :: _ -> it | _ -> failwith "could not obtain an internal representation of pack_marker" let c_R () = match collect_constants () with _ :: _ :: it :: _ -> it | _ -> failwith "could not obtain an internal representation of pair" let c_P () = match collect_constants () with _ :: _ :: _ :: it :: _ -> it | _ -> failwith "could not obtain an internal representation of prod" let c_U () = match collect_constants () with _ :: _ :: _ :: _ :: it :: _ -> it | _ -> failwith "could not obtain an internal representation of prod" (* The following tactic is meant to pack an hypothesis when no other data is already packed. The main difficulty in defining this tactic is to understand how to construct the input expected by apply_in. *) let package i = Goal.enter begin fun gl -> Tactics.apply_in true false i [(* this means that the applied theorem is not to be cleared. *) None, (CAst.make (c_M (), (* we don't specialize the theorem with extra values. *) Tactypes.NoBindings))] (* we don't destruct the result according to any intro_pattern *) None end (* This function is meant to observe a type of shape (f a) and return the value a. *) (* Remark by Maxime: look for destApp combinator. *) let unpack_type sigma term = let report () = CErrors.user_err (Pp.str "expecting a packed type") in match EConstr.kind sigma term with | Constr.App (_, [| ty |]) -> ty | _ -> report () (* This function is meant to observe a type of shape A -> pack B -> C and return A, B, C but it is not used in the current version of our tactic. It is kept as an example. *) let two_lambda_pattern sigma term = let report () = CErrors.user_err (Pp.str "expecting two nested implications") in (* Note that pattern-matching is always done through the EConstr.kind function, which only provides one-level deep patterns. *) match EConstr.kind sigma term with (* Here we recognize the outer implication *) | Constr.Prod (_, ty1, l1) -> (* Here we recognize the inner implication *) (match EConstr.kind sigma l1 with | Constr.Prod (n2, packed_ty2, deep_conclusion) -> (* Here we recognized that the second type is an application *) ty1, unpack_type sigma packed_ty2, deep_conclusion | _ -> report ()) | _ -> report () (* In the environment of the goal, we can get the type of an assumption directly by a lookup. The other solution is to call a low-cost retyping function like *) let get_type_of_hyp env id = match EConstr.lookup_named id env with | Context.Named.Declaration.LocalAssum (_, ty) -> ty | _ -> CErrors.user_err (let open Pp in str (Names.Id.to_string id) ++ str " is not a plain hypothesis") let repackage i h_hyps_id = Goal.enter begin fun gl -> let env = Goal.env gl in let sigma = Tacmach.project gl in let concl = Tacmach.pf_concl gl in let (ty1 : EConstr.t) = get_type_of_hyp env i in let (packed_ty2 : EConstr.t) = get_type_of_hyp env h_hyps_id in let ty2 = unpack_type sigma packed_ty2 in let new_packed_type = EConstr.mkApp (c_P (), [| ty1; ty2 |]) in let open EConstr in let new_packed_value = mkApp (c_R (), [| ty1; ty2; mkVar i; mkApp (c_U (), [| ty2; mkVar h_hyps_id|]) |]) in Refine.refine ~typecheck:true begin fun sigma -> let sigma, new_goal = Evarutil.new_evar env sigma (mkArrowR (mkApp(c_H (), [| new_packed_type |])) (Vars.lift 1 concl)) in sigma, mkApp (new_goal, [|mkApp(c_M (), [|new_packed_type; new_packed_value |]) |]) end end let pack_tactic i = let h_hyps_id = (Names.Id.of_string "packed_hyps") in Proofview.Goal.enter begin fun gl -> let hyps = Environ.named_context_val (Proofview.Goal.env gl) in if not (Termops.mem_named_context_val i hyps) then (CErrors.user_err (Pp.str ("no hypothesis named" ^ (Names.Id.to_string i)))) else if Termops.mem_named_context_val h_hyps_id hyps then tclTHEN (repackage i h_hyps_id) (tclTHEN (Tactics.clear [h_hyps_id; i]) (Tactics.introduction h_hyps_id)) else tclTHEN (package i) (tclTHEN (Tactics.rename_hyp [i, h_hyps_id]) (Tactics.move_hyp h_hyps_id Logic.MoveLast)) end coq-8.15.0/doc/plugin_tutorial/tuto3/src/tuto_tactic.mli000066400000000000000000000002221417001151100232320ustar00rootroot00000000000000val two_lambda_pattern : Evd.evar_map -> EConstr.t -> EConstr.t * EConstr.t * EConstr.t val pack_tactic : Names.Id.t -> unit Proofview.tactic coq-8.15.0/doc/plugin_tutorial/tuto3/theories/000077500000000000000000000000001417001151100212445ustar00rootroot00000000000000coq-8.15.0/doc/plugin_tutorial/tuto3/theories/Data.v000066400000000000000000000033361417001151100223110ustar00rootroot00000000000000 Inductive pack (A: Type) : Type := packer : A -> pack A. Arguments packer {A}. Definition uncover (A : Type) (packed : pack A) : A := match packed with packer v => v end. Notation "!!!" := (pack _) (at level 0, only printing). (* The following data is used as material for automatic proofs based on type classes. *) Class EvenNat the_even := {half : nat; half_prop : 2 * half = the_even}. Instance EvenNat0 : EvenNat 0 := {half := 0; half_prop := eq_refl}. Lemma even_rec n h : 2 * h = n -> 2 * S h = S (S n). Proof. intros []. simpl. rewrite <-plus_n_O, <-plus_n_Sm. reflexivity. Qed. Instance EvenNat_rec n (p : EvenNat n) : EvenNat (S (S n)) := {half := S (@half _ p); half_prop := even_rec n (@half _ p) (@half_prop _ p)}. Definition tuto_div2 n (p : EvenNat n) := @half _ p. (* to be used in the following examples Compute (@half 8 _). Check (@half_prop 8 _). Check (@half_prop 7 _). and in command Tuto3_3 8. *) (* The following data is used as material for automatic proofs based on canonical structures. *) Record S_ev n := Build_S_ev {double_of : nat; _ : 2 * n = double_of}. Definition s_half_proof n (r : S_ev n) : 2 * n = double_of n r := match r with Build_S_ev _ _ h => h end. Canonical Structure can_ev_default n d (Pd : 2 * n = d) : S_ev n := Build_S_ev n d Pd. Canonical Structure can_ev0 : S_ev 0 := Build_S_ev 0 0 (@eq_refl _ 0). Lemma can_ev_rec n : forall (s : S_ev n), S_ev (S n). Proof. intros s; exists (S (S (double_of _ s))). destruct s as [a P]. exact (even_rec _ _ P). Defined. Canonical Structure can_ev_rec. Record cmp (n : nat) (k : nat) := C {h : S_ev k; _ : double_of k h = n}. (* To be used in, e.g., Check (C _ _ _ eq_refl : cmp 6 _). Check (C _ _ _ eq_refl : cmp 7 _). *) coq-8.15.0/doc/plugin_tutorial/tuto3/theories/Loader.v000066400000000000000000000001031417001151100226330ustar00rootroot00000000000000From Tuto3 Require Export Data. Declare ML Module "tuto3_plugin". coq-8.15.0/doc/plugin_tutorial/tuto3/theories/test.v000066400000000000000000000013111417001151100224060ustar00rootroot00000000000000(* to be used e.g. in : coqtop -I src -R theories Tuto3 < theories/test.v *) Require Import Tuto3.Loader. (* This should print Type. *) Tuto3_1. (* This should print a term that contains an existential variable. *) (* And then print the same term, where the variable has been correctly instantiated. *) Tuto3_2. Lemma tutu x y (A : 0 < x) (B : 10 < y) : True. Proof. pack hypothesis A. (* Hypothesis A should have disappeared and a "packed_hyps" hypothesis should have appeared, with unreadable content. *) pack hypothesis B. (* Hypothesis B should have disappeared *) destruct packed_hyps as [unpacked_hyps]. (* Hypothesis unpacked_hyps should contain the previous contents of A and B. *) exact I. Qed. coq-8.15.0/doc/sphinx/000077500000000000000000000000001417001151100144345ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/README.rst000066400000000000000000000445701417001151100161350ustar00rootroot00000000000000============================= Documenting Coq with Sphinx ============================= .. README.rst is auto-generated from README.template.rst and the coqrst/*.py files (in particular coqdomain.py). Use ``doc/tools/coqrst/regen_readme.py`` to rebuild it. Coq's reference manual is written in `reStructuredText `_ (“reST”), and compiled with `Sphinx `_. See `this README <../README.md>`_ for compilation instructions. In addition to standard reST directives (a directive is similar to a LaTeX environment) and roles (a role is similar to a LaTeX command), the ``coqrst`` plugin loaded by the documentation uses a custom *Coq domain* — a set of Coq-specific directives that define *objects* like tactics, commands (vernacs), warnings, etc. —, some custom *directives*, and a few custom *roles*. Finally, this manual uses a small DSL to describe tactic invocations and commands. Coq objects =========== Our Coq domain define multiple `objects`_. Each object has a *signature* (think *type signature*), followed by an optional body (a description of that object). The following example defines two objects: a variant of the ``simpl`` tactic, and an error that it may raise:: .. tacv:: simpl @pattern at {+ @natural} :name: simpl_at This applies ``simpl`` only to the :n:`{+ @natural}` occurrences of the subterms matching :n:`@pattern` in the current goal. .. exn:: Too few occurrences :undocumented: Objects are automatically collected into indices, and can be linked to using the role version of the object's directive. For example, you could link to the tactic variant above using ``:tacv:`simpl_at```, and to its exception using ``:exn:`Too few occurrences```. Names (link targets) are auto-generated for most simple objects, though they can always be overwritten using a ``:name:`` option, as shown above. - Options, errors, warnings have their name set to their signature, with ``...`` replacing all notation bits. For example, the auto-generated name of ``.. exn:: @qualid is not a module`` is ``... is not a module``, and a link to it would take the form ``:exn:`... is not a module```. - Vernacs (commands) have their name set to the first word of their signature. For example, the auto-generated name of ``Axiom @ident : @term`` is ``Axiom``, and a link to it would take the form ``:cmd:`Axiom```. - Vernac variants, tactic notations, and tactic variants do not have a default name. Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: .. cmdv:: Lemma @ident {* @binder } : @type Remark @ident {* @binder } : @type Fact @ident {* @binder } : @type Corollary @ident {* @binder } : @type Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- The signatures of most objects can be written using a succinct DSL for Coq notations (think regular expressions written with a Lispy syntax). A typical signature might look like ``Hint Extern @natural {? @pattern} => @tactic``, which means that the ``Hint Extern`` command takes a number (``natural``), followed by an optional pattern, and a mandatory tactic. The language has the following constructs (the full grammar is in `TacticNotations.g `_): ``@…`` A placeholder (``@ident``, ``@natural``, ``@tactic``\ …) ``{? …}`` an optional block ``{* …}``, ``{+ …}`` an optional (``*``) or mandatory (``+``) block that can be repeated, with repetitions separated by spaces ``{*, …}``, ``{+, …}`` an optional or mandatory repeatable block, with repetitions separated by commas ``{| … | … | … }`` an alternative, indicating than one of multiple constructs can be used ``%{``, ``%}``, ``%|`` an escaped character (rendered without the leading ``%``). In most cases, escaping is not necessary. In particular, the following expressions are all parsed as plain text, and do not need escaping: ``{ xyz }``, ``x |- y``. But the following escapes *are* needed: ``{| a b %| c | d }``, ``all: %{``. (We use ``%`` instead of the usual ``\`` because you'd have to type ``\`` twice in your reStructuredText file.) For more details and corner cases, see `Advanced uses of notations`_ below. .. FIXME document the new subscript support As an exercise, what do the following patterns mean? .. code:: pattern {+, @term {? at {+ @natural}}} generalize {+, @term at {+ @natural} as @ident} fix @ident @natural with {+ (@ident {+ @binder} {? {struct @ident'}} : @type)} Objects ------- Here is the list of all objects of the Coq domain (The symbol :black_nib: indicates an object whose signature can be written using the notations DSL): ``.. attr::`` :black_nib: An attribute. Example:: .. attr:: local ``.. cmd::`` :black_nib: A Coq command. Example:: .. cmd:: Infix @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @ident } This command is equivalent to :n:`…`. ``.. cmdv::`` :black_nib: A variant of a Coq command. Example:: .. cmd:: Axiom @ident : @term. This command links :token:`term` to the name :token:`term` as its specification in the global environment. The fact asserted by :token:`term` is thus assumed as a postulate. .. cmdv:: Parameter @ident : @term. This is equivalent to :n:`Axiom @ident : @term`. ``.. exn::`` :black_nib: An error raised by a Coq command or tactic. This commonly appears nested in the ``.. tacn::`` that raises the exception. Example:: .. tacv:: assert @form by @tactic This tactic applies :n:`@tactic` to solve the subgoals generated by ``assert``. .. exn:: Proof is not complete Raised if :n:`@tactic` does not fully solve the goal. ``.. flag::`` :black_nib: A Coq flag (i.e. a boolean setting). Example:: .. flag:: Nonrecursive Elimination Schemes Controls whether types declared with the keywords :cmd:`Variant` and :cmd:`Record` get an automatic declaration of induction principles. ``.. opt::`` :black_nib: A Coq option (a setting with non-boolean value, e.g. a string or numeric value). Example:: .. opt:: Hyps Limit @natural :name Hyps Limit Controls the maximum number of hypotheses displayed in goals after application of a tactic. ``.. prodn::`` A grammar production. Use ``.. prodn`` to document grammar productions instead of Sphinx `production lists `_. prodn displays multiple productions together with alignment similar to ``.. productionlist``, however unlike ``.. productionlist``\ s, this directive accepts notation syntax. Example:: .. prodn:: occ_switch ::= { {? {| + | - } } {* @natural } } term += let: @pattern := @term in @term | second_production The first line defines "occ_switch", which must be unique in the document. The second references and expands the definition of "term", whose main definition is elsewhere in the document. The third form is for continuing the definition of a nonterminal when it has multiple productions. It leaves the first column in the output blank. ``.. table::`` :black_nib: A Coq table, i.e. a setting that is a set of values. Example:: .. table:: Search Blacklist @string :name: Search Blacklist Controls ... ``.. tacn::`` :black_nib: A tactic, or a tactic notation. Example:: .. tacn:: do @natural @expr :token:`expr` is evaluated to ``v`` which must be a tactic value. … ``.. tacv::`` :black_nib: A variant of a tactic. Example:: .. tacn:: fail This is the always-failing tactic: it does not solve any goal. It is useful for defining other tacticals since it can be caught by :tacn:`try`, :tacn:`repeat`, :tacn:`match goal`, or the branching tacticals. … .. tacv:: fail @natural The number is the failure level. If no level is specified, it defaults to 0. … ``.. thm::`` A theorem. Example:: .. thm:: Bound on the ceiling function Let :math:`p` be an integer and :math:`c` a rational constant. Then :math:`p \ge c \rightarrow p \ge \lceil{c}\rceil`. ``.. warn::`` :black_nib: An warning raised by a Coq command or tactic.. Do not mistake this for ``.. warning::``; this directive is for warning messages produced by Coq. Example:: .. warn:: Ambiguous path When the coercion :token:`qualid` is added to the inheritance graph, non valid coercion paths are ignored. Coq directives ============== In addition to the objects above, the ``coqrst`` Sphinx plugin defines the following directives: ``.. coqtop::`` A reST directive to describe interactions with Coqtop. Usage:: .. coqtop:: options… Coq code to send to coqtop Example:: .. coqtop:: in reset Print nat. Definition a := 1. The blank line after the directive is required. If you begin a proof, use the ``abort`` option to reset coqtop for the next example. Here is a list of permissible options: - Display options (choose exactly one) - ``all``: Display input and output - ``in``: Display only input - ``out``: Display only output - ``none``: Display neither (useful for setup commands) - Behavior options - ``reset``: Send a ``Reset Initial`` command before running this block - ``fail``: Don't die if a command fails, implies ``warn`` (so no need to put both) - ``warn``: Don't die if a command emits a warning - ``restart``: Send a ``Restart`` command before running this block (only works in proof mode) - ``abort``: Send an ``Abort All`` command after running this block (leaves all pending proofs if any) ``coqtop``\ 's state is preserved across consecutive ``.. coqtop::`` blocks of the same document (``coqrst`` creates a single ``coqtop`` process per reST source file). Use the ``reset`` option to reset Coq's state. ``.. coqdoc::`` A reST directive to display Coqtop-formatted source code. Usage:: .. coqdoc:: Coq code to highlight Example:: .. coqdoc:: Definition test := 1. ``.. example::`` A reST directive for examples. This behaves like a generic admonition; see http://docutils.sourceforge.net/docs/ref/rst/directives.html#generic-admonition for more details. Optionally, any text immediately following the ``.. example::`` header is used as the example's title. Example:: .. example:: Adding a hint to a database The following adds ``plus_comm`` to the ``plu`` database: .. coqdoc:: Hint Resolve plus_comm : plu. ``.. inference::`` A reST directive to format inference rules. This also serves as a small illustration of the way to create new Sphinx directives. Usage:: .. inference:: name newline-separated premises -------------------------- conclusion Example:: .. inference:: Prod-Pro \WTEG{T}{s} s \in \Sort \WTE{\Gamma::(x:T)}{U}{\Prop} ----------------------------- \WTEG{\forall~x:T,U}{\Prop} ``.. preamble::`` A reST directive to include a TeX file. Mostly useful to let MathJax know about `\def`\s and `\newcommand`\s. The contents of the TeX file are wrapped in a math environment, as MathJax doesn't process LaTeX definitions otherwise. Usage:: .. preamble:: preamble.tex Coq roles ========= In addition to the objects and directives above, the ``coqrst`` Sphinx plugin defines the following roles: ``:g:`` Coq code. Use this for Gallina and Ltac snippets:: :g:`apply plus_comm; reflexivity` :g:`Set Printing All.` :g:`forall (x: t), P(x)` ``:n:`` Any text using the notation syntax (``@id``, ``{+, …}``, etc.). Use this to explain tactic equivalences. For example, you might write this:: :n:`generalize @term as @ident` is just like :n:`generalize @term`, but it names the introduced hypothesis :token:`ident`. Note that this example also uses ``:token:``. That's because ``ident`` is defined in the Coq manual as a grammar production, and ``:token:`` creates a link to that. When referring to a placeholder that happens to be a grammar production, ``:token:`…``` is typically preferable to ``:n:`@…```. ``:production:`` A grammar production not included in a ``prodn`` directive. Useful to informally introduce a production, as part of running text. Example:: :production:`string` indicates a quoted string. You're not likely to use this role very commonly; instead, use a ``prodn`` directive and reference its tokens using ``:token:`…```. ``:gdef:`` Marks the definition of a glossary term inline in the text. Matching :term:`XXX` constructs will link to it. Use the form :gdef:`text ` to display "text" for the definition of "term", such as when "term" must be capitalized or plural for grammatical reasons. The term will also appear in the Glossary Index. Examples:: A :gdef:`prime` number is divisible only by itself and 1. :gdef:`Composite ` numbers are the non-prime numbers. Common mistakes =============== Improper nesting ---------------- DO .. code:: .. cmd:: Foo @bar Foo the first instance of :token:`bar`\ s. .. cmdv:: Foo All Foo all the :token:`bar`\ s in the current context DON'T .. code:: .. cmd:: Foo @bar Foo the first instance of :token:`bar`\ s. .. cmdv:: Foo All Foo all the :token:`bar`\ s in the current context You can set the ``report_undocumented_coq_objects`` setting in ``conf.py`` to ``"info"`` or ``"warning"`` to get a list of all Coq objects without a description. Overusing ``:token:`` --------------------- DO .. code:: This is equivalent to :n:`Axiom @ident : @term`. DON'T .. code:: This is equivalent to ``Axiom`` :token:`ident` : :token:`term`. .. DO .. code:: :n:`power_tac @term [@ltac]` allows :tacn:`ring` and :tacn:`ring_simplify` to recognize … DON'T .. code:: power_tac :n:`@term` [:n:`@ltac`] allows :tacn:`ring` and :tacn:`ring_simplify` to recognize … .. DO .. code:: :n:`name={*; attr}` DON'T .. code:: ``name=``:n:`{*; attr}` Omitting annotations -------------------- DO .. code:: .. tacv:: assert @form as @simple_intropattern DON'T .. code:: .. tacv:: assert form as simple_intropattern Using the ``.. coqtop::`` directive for syntax highlighting ----------------------------------------------------------- DO .. code:: A tactic of the form: .. coqdoc:: do [ t1 | … | tn ]. is equivalent to the standard Ltac expression: .. coqdoc:: first [ t1 | … | tn ]. DON'T .. code:: A tactic of the form: .. coqtop:: in do [ t1 | … | tn ]. is equivalent to the standard Ltac expression: .. coqtop:: in first [ t1 | … | tn ]. Overusing plain quotes ---------------------- DO .. code:: The :tacn:`refine` tactic can raise the :exn:`Invalid argument` exception. The term :g:`let a = 1 in a a` is ill-typed. DON'T .. code:: The ``refine`` tactic can raise the ``Invalid argument`` exception. The term ``let a = 1 in a a`` is ill-typed. Plain quotes produce plain text, without highlighting or cross-references. Overusing the ``example`` directive ----------------------------------- DO .. code:: Here is a useful axiom: .. coqdoc:: Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. DO .. code:: .. example:: Using proof-irrelevance If you assume the axiom above, … DON'T .. code:: Here is a useful axiom: .. example:: .. coqdoc:: Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. Tips and tricks =============== Nested lemmas ------------- The ``.. coqtop::`` directive does *not* reset Coq after running its contents. That is, the following will create two nested lemmas (which by default results in a failure):: .. coqtop:: all Lemma l1: 1 + 1 = 2. .. coqtop:: all Lemma l2: 2 + 2 <> 1. Add either ``abort`` to the first block or ``reset`` to the second block to avoid nesting lemmas. Abbreviations and macros ------------------------ Substitutions for specially-formatted names (like ``|Cic|``, ``|Ltac|`` and ``|Latex|``), along with some useful LaTeX macros, are defined in a `separate file `_. This file is automatically included in all manual pages. Emacs ----- The ``dev/tools/coqdev.el`` folder contains a convenient Emacs function to quickly insert Sphinx roles and quotes. It takes a single character (one of ``gntm:```), and inserts one of ``:g:``, ``:n:``, ``:t:``, or an arbitrary role, or double quotes. You can also select a region of text, and wrap it in single or double backticks using that function. Use the following snippet to bind it to `F12` in ``rst-mode``:: (with-eval-after-load 'rst (define-key rst-mode-map (kbd "") #'coqdev-sphinx-rst-coq-action)) Advanced uses of notations -------------------------- - Use `%` to escape grammar literal strings that are the same as metasyntax, such as ``{``, ``|``, ``}`` and ``{|``. (While this is optional for ``|`` and ``{ ... }`` outside of ``{| ... }``, always using the escape requires less thought.) - Literals such as ``|-`` and ``||`` don't need to be escaped. - The literal ``%`` shouldn't be escaped. - Don't use the escape for a ``|`` separator in ``{*`` and ``{+``. These should appear as ``{*|`` and ``{+|``. coq-8.15.0/doc/sphinx/README.template.rst000066400000000000000000000237441417001151100177470ustar00rootroot00000000000000============================= Documenting Coq with Sphinx ============================= .. README.rst is auto-generated from README.template.rst and the coqrst/*.py files (in particular coqdomain.py). Use ``doc/tools/coqrst/regen_readme.py`` to rebuild it. Coq's reference manual is written in `reStructuredText `_ (“reST”), and compiled with `Sphinx `_. See `this README <../README.md>`_ for compilation instructions. In addition to standard reST directives (a directive is similar to a LaTeX environment) and roles (a role is similar to a LaTeX command), the ``coqrst`` plugin loaded by the documentation uses a custom *Coq domain* — a set of Coq-specific directives that define *objects* like tactics, commands (vernacs), warnings, etc. —, some custom *directives*, and a few custom *roles*. Finally, this manual uses a small DSL to describe tactic invocations and commands. Coq objects =========== Our Coq domain define multiple `objects`_. Each object has a *signature* (think *type signature*), followed by an optional body (a description of that object). The following example defines two objects: a variant of the ``simpl`` tactic, and an error that it may raise:: .. tacv:: simpl @pattern at {+ @natural} :name: simpl_at This applies ``simpl`` only to the :n:`{+ @natural}` occurrences of the subterms matching :n:`@pattern` in the current goal. .. exn:: Too few occurrences :undocumented: Objects are automatically collected into indices, and can be linked to using the role version of the object's directive. For example, you could link to the tactic variant above using ``:tacv:`simpl_at```, and to its exception using ``:exn:`Too few occurrences```. Names (link targets) are auto-generated for most simple objects, though they can always be overwritten using a ``:name:`` option, as shown above. - Options, errors, warnings have their name set to their signature, with ``...`` replacing all notation bits. For example, the auto-generated name of ``.. exn:: @qualid is not a module`` is ``... is not a module``, and a link to it would take the form ``:exn:`... is not a module```. - Vernacs (commands) have their name set to the first word of their signature. For example, the auto-generated name of ``Axiom @ident : @term`` is ``Axiom``, and a link to it would take the form ``:cmd:`Axiom```. - Vernac variants, tactic notations, and tactic variants do not have a default name. Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: .. cmdv:: Lemma @ident {* @binder } : @type Remark @ident {* @binder } : @type Fact @ident {* @binder } : @type Corollary @ident {* @binder } : @type Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- The signatures of most objects can be written using a succinct DSL for Coq notations (think regular expressions written with a Lispy syntax). A typical signature might look like ``Hint Extern @natural {? @pattern} => @tactic``, which means that the ``Hint Extern`` command takes a number (``natural``), followed by an optional pattern, and a mandatory tactic. The language has the following constructs (the full grammar is in `TacticNotations.g `_): ``@…`` A placeholder (``@ident``, ``@natural``, ``@tactic``\ …) ``{? …}`` an optional block ``{* …}``, ``{+ …}`` an optional (``*``) or mandatory (``+``) block that can be repeated, with repetitions separated by spaces ``{*, …}``, ``{+, …}`` an optional or mandatory repeatable block, with repetitions separated by commas ``{| … | … | … }`` an alternative, indicating than one of multiple constructs can be used ``%{``, ``%}``, ``%|`` an escaped character (rendered without the leading ``%``). In most cases, escaping is not necessary. In particular, the following expressions are all parsed as plain text, and do not need escaping: ``{ xyz }``, ``x |- y``. But the following escapes *are* needed: ``{| a b %| c | d }``, ``all: %{``. (We use ``%`` instead of the usual ``\`` because you'd have to type ``\`` twice in your reStructuredText file.) For more details and corner cases, see `Advanced uses of notations`_ below. .. FIXME document the new subscript support As an exercise, what do the following patterns mean? .. code:: pattern {+, @term {? at {+ @natural}}} generalize {+, @term at {+ @natural} as @ident} fix @ident @natural with {+ (@ident {+ @binder} {? {struct @ident'}} : @type)} Objects ------- Here is the list of all objects of the Coq domain (The symbol :black_nib: indicates an object whose signature can be written using the notations DSL): [OBJECTS] Coq directives ============== In addition to the objects above, the ``coqrst`` Sphinx plugin defines the following directives: [DIRECTIVES] Coq roles ========= In addition to the objects and directives above, the ``coqrst`` Sphinx plugin defines the following roles: [ROLES] Common mistakes =============== Improper nesting ---------------- DO .. code:: .. cmd:: Foo @bar Foo the first instance of :token:`bar`\ s. .. cmdv:: Foo All Foo all the :token:`bar`\ s in the current context DON'T .. code:: .. cmd:: Foo @bar Foo the first instance of :token:`bar`\ s. .. cmdv:: Foo All Foo all the :token:`bar`\ s in the current context You can set the ``report_undocumented_coq_objects`` setting in ``conf.py`` to ``"info"`` or ``"warning"`` to get a list of all Coq objects without a description. Overusing ``:token:`` --------------------- DO .. code:: This is equivalent to :n:`Axiom @ident : @term`. DON'T .. code:: This is equivalent to ``Axiom`` :token:`ident` : :token:`term`. .. DO .. code:: :n:`power_tac @term [@ltac]` allows :tacn:`ring` and :tacn:`ring_simplify` to recognize … DON'T .. code:: power_tac :n:`@term` [:n:`@ltac`] allows :tacn:`ring` and :tacn:`ring_simplify` to recognize … .. DO .. code:: :n:`name={*; attr}` DON'T .. code:: ``name=``:n:`{*; attr}` Omitting annotations -------------------- DO .. code:: .. tacv:: assert @form as @simple_intropattern DON'T .. code:: .. tacv:: assert form as simple_intropattern Using the ``.. coqtop::`` directive for syntax highlighting ----------------------------------------------------------- DO .. code:: A tactic of the form: .. coqdoc:: do [ t1 | … | tn ]. is equivalent to the standard Ltac expression: .. coqdoc:: first [ t1 | … | tn ]. DON'T .. code:: A tactic of the form: .. coqtop:: in do [ t1 | … | tn ]. is equivalent to the standard Ltac expression: .. coqtop:: in first [ t1 | … | tn ]. Overusing plain quotes ---------------------- DO .. code:: The :tacn:`refine` tactic can raise the :exn:`Invalid argument` exception. The term :g:`let a = 1 in a a` is ill-typed. DON'T .. code:: The ``refine`` tactic can raise the ``Invalid argument`` exception. The term ``let a = 1 in a a`` is ill-typed. Plain quotes produce plain text, without highlighting or cross-references. Overusing the ``example`` directive ----------------------------------- DO .. code:: Here is a useful axiom: .. coqdoc:: Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. DO .. code:: .. example:: Using proof-irrelevance If you assume the axiom above, … DON'T .. code:: Here is a useful axiom: .. example:: .. coqdoc:: Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. Tips and tricks =============== Nested lemmas ------------- The ``.. coqtop::`` directive does *not* reset Coq after running its contents. That is, the following will create two nested lemmas (which by default results in a failure):: .. coqtop:: all Lemma l1: 1 + 1 = 2. .. coqtop:: all Lemma l2: 2 + 2 <> 1. Add either ``abort`` to the first block or ``reset`` to the second block to avoid nesting lemmas. Abbreviations and macros ------------------------ Substitutions for specially-formatted names (like ``|Cic|``, ``|Ltac|`` and ``|Latex|``), along with some useful LaTeX macros, are defined in a `separate file `_. This file is automatically included in all manual pages. Emacs ----- The ``dev/tools/coqdev.el`` folder contains a convenient Emacs function to quickly insert Sphinx roles and quotes. It takes a single character (one of ``gntm:```), and inserts one of ``:g:``, ``:n:``, ``:t:``, or an arbitrary role, or double quotes. You can also select a region of text, and wrap it in single or double backticks using that function. Use the following snippet to bind it to `F12` in ``rst-mode``:: (with-eval-after-load 'rst (define-key rst-mode-map (kbd "") #'coqdev-sphinx-rst-coq-action)) Advanced uses of notations -------------------------- - Use `%` to escape grammar literal strings that are the same as metasyntax, such as ``{``, ``|``, ``}`` and ``{|``. (While this is optional for ``|`` and ``{ ... }`` outside of ``{| ... }``, always using the escape requires less thought.) - Literals such as ``|-`` and ``||`` don't need to be escaped. - The literal ``%`` shouldn't be escaped. - Don't use the escape for a ``|`` separator in ``{*`` and ``{+``. These should appear as ``{*|`` and ``{+|``. coq-8.15.0/doc/sphinx/_static/000077500000000000000000000000001417001151100160625ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/_static/CoqNotations.ttf000066400000000000000000001121441417001151100212250ustar00rootroot00000000000000 FFTMt˕HGDEF5*GPOS5N@GSUBD:ROS/2r`cmap{1jcvt :[)v \zfpgmvD#gaspglyfZ- :tzhead,6hhea/d$hmtx alocav maxp name5xpostP0NprepXû 8#̡p3_<O~>V~`/Ys2  P [DAMA [1> V W"^U.(~q/#0+)#.1&-v-((E 66??-6*-G.66('  .q1m,>,$!?$1?$??Y6$; C??.& 1Sh2-qll(qq;J2 2$- ^Uq/q1m??r622EOmJJJowwMMM`\]ZfkG~ - dH ~    !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`ardeixpkvjsgwl|cnm}byqz@?XUTSRQPONMLKJIHGFEDCBA@?>=<;:98765/.-,(&%$#" ,E#F` &`&#HH-,E#F#a &a&#HH-,E#F` a F`&#HH-,E#F#a ` &a a&#HH-,E#F`@a f`&#HH-,E#F#a@` &a@a&#HH-, <<-, E# D# ZQX# D#Y QX# MD#Y &QX# D#Y!!-, EhD ` EFvhE`D-, C#Ce -, C#C -,(#p(>(#p(E: -, E%EadPQXED!!Y-, EC`D-,CCe -, i@a ,b`+ d#da\XaY-,E+)#D)z-,Ee,#DE+#D-,KRXED!!Y-,%# `#-,%# a#-,%-, ` <<-, a <<-,CC -,!! d#d@b-,!QX d#d b@/+Y`-,!QX d#dUb/+Y`-, d#d@b`#!-,E#E`#E`#E`#vhb -,&&%%E#E &`bch &ae#DD-, ETX@D E@aD!!Y-,E0/E#Ea``iD-,KQX/#p#B!!Y-,KQX %EiSXD!!Y!!Y-,EC`c`iD-,/ED-,E# E`D-,E#E`D-,K#QX34 34YDD-,CX&EXdf`d `f X!@YaY#XeY)#D#)!!!!!Y-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%% F%`#B< XY%%)%%)%% XY%%CH%%`CH!Y!!!!!!!-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%%%% F%`#B< XY%%)) EeD%%)%% XY%%CH%%%%`CH!Y!!!!!!!-,% F%#B%%EH!!!!-,% %%CH!!!-,E# E P X#e#Y#h @PX!@Y#XeY`D-,KS#KQZX E`D!!Y-,KTX E`D!!Y-,KS#KQZX8!!Y-,KTX8!!Y-,CTXF+!!!!Y-,CTXG+!!!Y-,CTXH+!!!!Y-,CTXI+!!!Y-, #KSKQZX#8!!Y-, IQX@# 84!!Y-,F#F`#Fa#  Fab@@pE`h:-, #Id#SX<!Y-,KRX}zY-,KKTB-,B#Q@SZX TXBYY-,Eh#KQX# E d@PX|Yh`YD-,%%#>#> #eB #B#?#? #eB#B-@vn)uodtrdrC)qodpndoBnBia)gedeC)dBcadaBYQ)XBWUdUC)TBSQdRBQBLD)JHdHC)FDdDBCA/B?B* * U*U*U * U*U*U *U*UTSKRKP[%S@QZUZ[XYBKSXYKSXBY++++++++t+++s++++++++++++++++++++++++++++kk [[||f|P}fPkr{d}fk{}ofk{}fTkvfPP<<::dd_@~rU*W!Gxo{9jZ\.r+z4Y*CrrQY=****b "8>N6  h  J r  N  8pNN |l &< VplRLhFh""\  !>!b""<"#X###$$$%T%&J&`&&''8'p'( (),)**2*+$+:+^+|+++++,F,X,f,x,,,--"-0-B-j---. .Z..//\/00040P001V122223"3v34.445Z566T66727|78@89 9x9:P:;@;<<.<|<=M./<2<2/<2<233'3#rbbM-M6 @qo u ??10#.=3#"&54632Af 3 33 3$:44 44:$(,,(&,,Wm 6@n n@@ H ??/+]_q10#.=##.= H B G 0:><<><00:><<><00jMLO M LM LpM M@ M@M MгM L| L M M@  MML @e My  y!yy  r r??]22/223??22229]9]]]]]]]9]910++++]+++]+++++++]++++]++733733#3##7##7#537#537#fPf>RgzfPf753.#"#?M& V08,!08,!6'r.GO*0% &'B0%=.r' b %5'4,! a\g " !+:'5-!k='3' M" M M@ M MMM@BM""""|(|(((|` p  55...|44+@,H+}} p}%}1111??99//]]]]?]?+]]]99//]]]]10+++++]+++#34632#"&74&#"3264632#"&74&#"326[YY<45<<54<p<36<<63<  <k#LOOLLOOL*$$**""zLOOLLPPL*##**"";)5A¹?@ M%@ M"@ M M @ M9@ M. M. M-@ L9-  &C""y#C?<_<3267'.'326>54&#"m "H#-C-11.96*:>,f& <#*.f ">.>"0X H((<("6"0\ 24F8`&"F"|$B * "2x.<"d= =nR  @ H   H H ?++]q+_q10#.== H B:>>>>:B^B @ p ??107.5467m`9~9`q{8LKKL8UB @ p  ??210%4&'7'>&q`9~9`m|8LK犉KL8.6@Y@<@M<:.'4"'@M AB/-;4 ' ?9/2+9=/2+107'7>7*./7.=3>?"#'. ^ #$" %   t   $ "$# ^ "" B  n  $" "$  p D ""(  M@ M@M@M@ M M M@4 M@M@M R`@ P  U ?]]9/]]32]10++++++++++3533##5#(jj,d~Z[@z {??107>7.54632~%!&*3'*/1N8 4 '280#F<,q7?10!!q6xTS_M@He L H M HjM@Hez {?10]++]++++]++%#".54>32T6#  #6,. !! ./J-@v??/+}10#3~`#= #U@4d d%d$!!!@e e ??9/]]]/9/]]]]10%#"&546324&#"326'#"&54632niinpgino532662350joojkoou''((00/@@ M a  ee ??9310+7'>733!5H(882T{*Z f "&ff+7%&@c ' c&g! e ??210!!&654>54&#"'>32(2 " l%9A9%.#@=(i4&F6  <84 $$f 2TF>84&*"V*(,B)=0- M, M+ M M M @* M L)c&, c,,21g!))e! g ?2?9/9=/2]310+++++++".'732654&+532>54&#"'>323/( I3?3N<&.&)$#E,Y84M2*#05:ZI g 6(1'f"'Z%/@%%DN6+H500@ ae  ??9/932222231033##5!5>7&%" {CC{7!##".'732>A3^L <|z>\=2.% D3$/ +TKf Fh^+I6 f  .8->@&c`p/+cc @ H .e&g% g ??9+]107"32>54&'2#"&54>3>( # *$6K00J3hm5bT(L@/ $  5*$/3f 6H($J>'Z^0i!4' 10)a c@ H ee ??+10>7!5!$3<@9+<@|.f^pP&>+76@ M4 M2@ M0@ M/ M/ M. L, M , M@ M @ M M5 M/ L#M# M#"@ M"2#d M@& H95d9//d&dP82##),g)g ??9/9]/]++910]+]++++++++]++++++++#".5467.54>324.'326">54&*-9/3R2 *@&0R D&B4"X8(*,$$",8"$-9*@@%))c c@  ,&c 0@+e&g% g??9]]//10267>54.#"".54>32#'267*  # ,$6L01J2hn4aTP~' 4*&02f 6F*$J>&|Z^0jDH T#".54>32#".54>32T6#  #66#  #6,.    .,. "" .vT2##".54>32>7.54632T6#  #6%!&*3'*/1N8,. "" . 4 &280"F<,-!*@??9=/999910% %-""2lmml(e!@ UU?2210!!!!(\\f\f(!.@??9=/9999910%'-7##âlmmlE=01@22q'''1,u" g??]107&5467>54&#"'>32#".54>32*"(* <#"#^.9J*#(##3  (H*^.80*$$&&,  zz5EZ>M8@M!@M!8 M @ M M@M: M:@ M @ M @M@M@M MM@ M@ LLMML@rL@ M@ M@ M@ M M MP==@>>y 505`55GO6_66y+!G|+F@Py=@APAAy &> = = &}0 }&/]?99//999]]]]9/]]]22]10++++++++++++++++++]++++++++++%#".54>3254&#"3267#".54>323275.#"G&A0$9*0.!;+3U?/6 BrS0)FZ2)H6  8 1O9*J7 &5FuYFlI& W$Yol['6W>( " 0 D @' L L   H ????9/9910++'##6733.g,Z*,W)  y <,β$>>> >>>67#0T@'F Fp20D @ H   1#H$$,J J ??9/9=/^]+2]]10%#"&'>32%32>54&#'2654.#"&AY3)X+#T)FX2+$?- (90/0  8K- Y /:,DN)#-'f)$=:`  @H!F J JJJ ??+2]210"&54>32.#"3267(*Kg=!5( A*7+IS0?YIMxS+ b1R=jq a67!6@ F#D@ H" HH ??^]+}/]107232>54.#"#"&'>32 *9# 4)(+Li?!?$*J =eF') 8M.(L:#VyL#\ $Mw?0 Q@ 0  D 0 H@MM@ M H H??9/+++]2]10!#3#!?w<lfff?0 J@/@ M@ D@ M0 H H???9/]+2^]]+10!!3#?{<lff=#4@ %F$ D0  !JJ ????9//]10"326753#".54>32.8,<'FD  {]B:]B$*Hb8$9* @ 9O0oi)QyPOyR) b-0 ;@  D  D@ H  H@???9/]?+2210###335{{{0l60 .  D@ H H H?2?2+1053#5!#3673^ )27{{0+$ '29@9.<$PNBlDDBDLN$NX^,G0, HD@ HH??++10!3!{| <l0o M@ M0 M@[ M@ L?@ M? M M L @ M M @ M @ L @ L?  @M L M M@M@ M M L@ L@ M@ M M M? M@1 M@ M@ M/?@ M DD@Mo  ??9/9??9/9|]]+/9=/9999/10+]++]++]+++]++++++++++]++++++++]+]]+++]++>73###>7 f qLZIq 0BLN $PH@RPLB.0S@ @ M @ M?/ ?  DD@ H  ????99+2]210]+]+.'#33c*c9oc85-o54.#"4632#"&& '& &}vimrtjms-O:"";N--O:"";N-63?@ F`D @ H  HJ??99//?+2]102+#>"32654&vx,{-., ' 2;<;3anocck/96-j&/@F (F' J J  "J??32/9104632.'.732>54.#"vimrZU UJuw QW}& '& &G! XKF-N;"";N--O:"";N63$Y@D` p  &F`p&!DD@ H%H!!  J?????9/2+]]22102#.'##>4&#"3265V>!,3&$! =#@{+,(|<1 ?832M46X?EE54.'.54632.#"#"&'7) "*<1 qc9U$=(b'?5"qsM\$K  (;,Wb_G -C3W_!d0@DH???210###5ۣ|0ff'6/@ D`pD@ H J ??3+]10".5332>53X6}'5 !5'u6X>! 0@ MMH@ M$ M+M@0LG @ M   /  ???3??9]]910+]++]+]+]++.'3>7360& !  #-67( r f,0f r T8DN*4x|~@FL<~|x4 LJ> 0f MM M @'M    ??9??//99=//9910++++.'#>7373d7: '/4bg|4-" <4|><~4LX\,$*^ZL 0>@!D     ???9??9/]29=/210>73#5.'3  &*0|7S&8FNP 4`^\2`fH.0f@ M M@ H H @ H @ HHH???/+]++99+]10++!!5>7#5!+H>7 h2>I,Btj`0fH*`l~JfqJ/@_@_@ nrr??]]]]10!#3!q```1J*@v???/+}103#1~mJ/@n@@ Prr??]]]210!53#5!ꠠ``D)@r?29=/9910%''3o~~piz6660@ U??10!!0lsS}?10'՞-SoCT,.S@ $QS0, S@$ H/_oWP"`"""@W(@ M(W ?+?]9/]]+10]72675.#"2#".54>3254&#"'>& "$0:M.jB-I4 6F&0%,6#ATfl  2F,$<**8""0 d >[ [@ @ M @ M@ M@ MS` p  !QQ@ H UW W????+]10++++%4&#"3267#"&'7>32Y+,' 08}:T60d"{0.F/BF @L8\@"  "BZ,5@0@`p!S@ H W W ??+]10]74>32.#"3267#"., BeF*D#1&,;$IX?J2HgB2ZD& b $4BJ f &BZ[ `@@@@ M@ M@ MQSU W W????210+++]q]q]q73267.#"#"&5463257/4$,("b/nq_]0{>P F pt %\@7LS ?  '% Sp& @MO _  U@W0  W ?]?]9/]]+]2]10+74>32!3267#".%4.#"&>P*hlL<%C H-<_C$; "# >^> zv .6h >Zl"$$UV MP@) MP  Q`pOU W U ????]]22]10]+]+#5354>32.#"3#gg!8H&'R!E`pf:J.d&f!> 'G@'LQQ`  )S`"p""(W W% @W?]?9/]]10+32675.#"#"&'732>=#"&54632*-# lo{,Q%@0 * +\_nMZX:= sn h # tgtr?UB@Q@ ` p  QQ@ H W ??????}/+]107>32#54&#"?{*4F*{*! a 9O0B:$V L@. MQp?!! !`!T `pWUX ??/]]]]]10+#"&54632#".=#533267. -- .&C0A'$0$**$&**6L0f*(1OM@ MQ @! H!T @PpUX W??/]+]10+%#"&'73265#5!'#"&546321C'0R&&G"9. -- .9M/h%2Wf$**$%**?UQ@ @`0 Q@ H0p  ??9??/]3+22]]2107>73#.'#7%# .11:8/ ',-||/2/684CHH?=7$X6@!`p? Q P ` p   U W???]]]10#"&5#533267"1UN&%2D f`f0" '@^+@M@M/?_R@M/?OR_) 0@R(W" % W ?3??9/?]]]]+9/]++10_]]2#4.#"#54&#"#>32>W'2 d  dd'L).0J0 ".^l0&  ? - QQ@ H 0W ?]??+10>32#54&#"#?!a98J+{.{ 9O0B:&@S@ M!S W W??+10%#".54>3232654&#" :Q22Q; !;R01Q; -201-2018\B$$B\86\@$$@\8>NN>@LL?= 6@S` p  QQ@ HW  W???+]104&#"#3267#"'#>32X23 (,*}.F./2{"c0mtD>R FF6\@$l = !5@QQ`p#Sp  "UW W ???]]103267"&#"4>32#5#".+,% 33}9U630+{3.F/DBF R:6Z@$  "@\Y !Q @ H W ??+10.#"#>32 !" 2|1gE %*+d61L/@ MS &&@ H&3S .@ H.2!W+W ??99++]10+72654.'.54>32.#"#"&'7-0#8.1L5.OH-/!!:/3R<=V#Ol   0&4* h "2&4& h$0f@@ M 0@M@ Mo ` QP`pW U ?32?]22]^]]++]+10#53573#32>7#".5kk{ "O%+B.@fvf  j (J:; 2@ Q Q @ H   W???]+]10%#".533273"a9:J+{-|$ :P0@@pPM M M L@ L   ???910+++++7.'3>73-]*   }*f-Y&VWR""RWV&Y (@(ж M`('M'г M!@ M@MM M@ M@M M@ M M M M M @ M@MMMM L LM@> M(`$$  ) %`%%*@ M0)(%   ??9/9??]+]9///q33333]310+++++++++++++++++++++++]+%.'#.'3>73>73B   ] i  Y  i '415 324 ,kyDWX&A;77;A&.TSS-[)@     ????//91073#.'#>7'3`}.*" 60}'+/RAB<3P (L/=?>9"/@"Q$ #Q   ???/9]10#"&'73267.'3>7#&+%-7%3$, ,X$  9Byt7(:% g 0!X%TTQ##QST&CW H H@ H@Pp U U????/]/+99++]103!5>7#5!4==664]?NU$fK&PMFf?J$7@"n n%r rr ??9/9=/2107;#"&=4&+5326=46;#"6/ ]hZE66EZh] / D@q*&`NJ*+`+)JN`%+q@DJ9(@_ @??]]]2103#||?J(C@%(@ M@ Mn&n* )r !r r ??9/9=/210++%.=4&+532;#"+5326=46960 \g-=%77%=-g\ 0 D@q*&`&9%*+`+)%9&`%+q@DW@  ?]10%".#"'>3232>7Y/,) [!2#/,( ["18.60M5 #@q ou  ??_]1074>73#4632#"&f 3 33 3c$944 449$$'++''++.`![@2 M #QQS 0P"W" W??29/2]]]]10+746753.#"3267#5..Y`|-/$S@ES<2|0F-T|| h @<<@j ~ *7#5354>32.#"ꔔ MM4K/#?!$$f&&$fH|2f4BZ6b * )۵%@MMȳ M LM@ L@ M M @ L M@ L M@ M@ M M@ M L @/ L@ M@ M P$*+ '*!??9/]]10++++++++++++++++++++27'#"&''7&547'764&#"326,&BS@@TB''BS@@TB$v++++?Q>$,,$>R@  ?Q>"..">R@&((&&(( 0! @E MD# !!  "#D" ss ????9/93222?9=////////99333310+%##5#535#53.'36733#3|e)&! 3$G% #'^,hhT>T"LJH@H|FLL"T>J9$@ n???221073#3#||||`1h4D'@ M @ M=@L5@>LF.F5=/@?@@d((F#F8d0044E&5= =5& g1 g??9////]]]]9910++++732654&'.5467.54632.#"#"&'>54.R$$(!#%-#=. &d^(V#!=()* d[E` 5- 5  &2#$@$,EMb  0q&8-#KG)(S>  @ 0 ??]107"&546323"&54632))**))))$ $$ $$ $$ $1ED MC M?@G L: MO94 M7?(_(((G@AF O 0  <2-<# ??99//]]]]99//9910+]++++%#".54>32.#"3267%4>32#".2>54.#"G%++#  !  &?Q,+R?&&?R+,Q?&8-,98--8#-.$6( %:=\==\==[==[w.D,+C..C+,D.hG +l@ )@M @MM M@* M!`-) 0`p, }(%} ?/]9]]]210+++++%2635.#"2#"&54>3254&#"'> ( +:#N4FQ)6 %2 CF$2  2>*  N 2* [@ _y R_y@ H     ??99=//99=//+]]10'%'A^^?OB]]@(*(*-Y@ yr?10%#5!5!iZfq7!!q6x1ED MDC?@! L:@ M: M4 MK4`M@MM@tM@M@M@ M @M_  @H  ? O  7(?(_(((G@HAF<2-/?<# ?q?99//3+q2/23]]]]+q+2//2/10+++++++]++++]+7#5>32#./32654&#"4>32#".2>54.#"6"97(  9  $  &?Q,+R?&&?R+,Q?&8-,98--8M,*0/%2 J=\==\==[==[w.D,+C..C+,D.l&s?10!!l &Xlo@||  }}?102#"&5462654&#"5&&5BNP@ $4 4$HB@J(} M@ Mp    Rpp@& MpUp  @ L@ / ?  U?]]+]22]+?]]2]2]10++7!!3533##5#(\jjIffqD7@ | }t?9/]992103!5467>54&#"'>32y. /-,.H(F;B(4 R,F& D">qA*@ "@ M!@ M MM@ M@ M(M(M(M( M( M(@O L@L@M L@ L M@M ?,'+($}}, }t?9=////2/]10+]+++++++++++++++++72654+532654#"'>32#"&'7 ?%%'("C $1 $:*E*$IF%! -* O sS}?10%'7s-TCo;87@ Q  Q@ H W ????+22]1032673#"&'#  | ^30{8$, r:  &T|n>@) 0  R 0R r???9/]]107.54>32#.#LM#A\90e&e 0f\V0J4 NTS%#".54>32T6#  #6,. !! .gX@   ??10#"'73254/>73,!!,-# ")$P . ' ? $ H;@!@M @L  yys   ?9/9992210++7535'>733d%&FMRHQ  N&QJFd M M M M MM@M_@  tt?]]10+++++++%#".54>324&#"326.@''@..@''@.f%#"'&##%*B..B**D..D*+11+)002* 8@         ??9///99910%'7'7'7'7;A^^?B]]@*()()3  ӹMM M M ML MM@L M@ L  "@"! !!  !    ?2?2?????99//9]]22ԇ+}10++++++++++%3##5#5>7>73#53#$$P|- "2?Uf 3Ed=?''-+L/3 )k3(,o@A @M M*),+*+,)*)*). |.+-##$-%(-",+ )#" ??????9]Ї+}10++!#&4546?>54#"'>32>73#5%#)  ): '= $"2?Ud=? #&   5(*"  yk 7(5:>@U+ M(@M@M<>=>;<;<;@ |  @=?(? %  ?>= < ;6:,0/ 5). ??2?2????9=///9///932Ї+}10+++32654+532654&#"'>32#"&'3##5#5>73# '; ( 840 *"-3$$P|-  3Wd=z  :  6 ( , @&&.*L0@(j2q2@!0@ 0`p??]]10!'!22}2v$V #"&54632#".=#533267. -- .&C0A'$0$**$&**6L0f*(-T*e@ a,$@,,@ H  0Q@ H+*#   ???99//9??+]22/2/3+]10.'#3>73.7>7 $&rr  *3$:-"  E`$PNBlDDBDJP("RXX& "4,(2X0   U@ @L@L@M 0@@CM@ H!/?Oo`p/O_  ????99?]]]q+]+]q10+++.'#33'>54&'{WK  X  E`4^^b6xj&Z^`,j&.($(F& 1g?10#51OgP9 ?103#"&'52>5_=8# 80.< MQ ?10%#&67mB>,JR<*(/P?10#5/MP^B 7.5467m`9~9`q{8LKKL8UB %4&'7'>&q`9~9`m|8LK犉KL8q7!!q6x/J#3~`zz5E%#".54>3254&#"3267#".54>323275.#"G&A0$9*0.!;+3U?/6 BrS0)FZ2)H6  8 1O9*J7 &5FuYFlI& W$Yol['6W>( "qJ!#3!q```1J3#1~mJ!53#5!ꠠ``?J$7;#"&=4&+5326=46;#"6/ ]hZE66EZh] / D@q*&`NJ*+`+)JN`%+q@D?J(%.=4&+532;#"+5326=46960 \g-=%77%=-g\ 0 D@q*&`&9%*+`+)%9&`%+q@D#|%'7![ee0.rk%#".54>32#3 2##2 3#3((34((46(!!6w(\(!! (\#7'7ee[.0O5 74>73#4632#"&f 3 33 3c$944 449$$'++''++2* '%'A^^?OB]]@(*(*2* %'7'7'7'7;A^^?B]]@*()()E8 23267#".54>5'4632#".Q*"  * <#"#^.9J*#(##3 K (I  ^/7/'#"$',  OT@ ??10/'>l6zgl6zT,pD,p^V#+@$$ !?]10'7#".54>324&#"326R#    8  T8.L" "   mc >@%o0 JJPJJ ??]]]]]107#"&54632#"&54632%3#"!!"""""~Fnb @ J  J ?10%#"&54632'3#3""""~GJx|-@J  JJ J???]10'4632#"&'4632#"&k9x"!!""!!"|*Jx|7@!JJ JJ ???]]10'7#"&54632#"&54632tx8jr!!!!""""B*Js JM@"!H JJJJ ???]+10+7#"&54632#"&54632%77!!!!"""",UW-j4@@4rob> M@ M /?  ?]10++%#".#"'>323267'3#  1    (   \HiF?10%'7i>>Zw}F?10%''7}&\^&9669_w}F@ ??107'77&^\&`8668MW C@*JOOJ@  J J?]?]?]q]]10%4632#"&'4632#"&7'5"!!"""""V-^P5EMW K@2 J//?OOJ@JJ  ?]?]?]]]q107#"&54632#"&54632''7"!!"""""_]-U(E5PMW j@JJ  O _   O_J@P`JJ  ?]?]?]?]q]]]]107#"&54632#"&54632%77"!!"""""%1/'V:.##.N[,<@@&-F #@#`#p##>F7Fp=2W :J(W ??9/]]1074>7.54>3:63#"#".%4.'326 5+$)?,($! #R&  -!;F9Q12Q; :$#5**38:88*4$Z "&hH6Z> 32+3"32654&{CCCC*+)lwxn"Ȗ '300XXcHbVabWGc-+)"`F@  ??107".'732676*\""\*6!0##0 Ii?10'7IGW1Ke &6%'##6733.>7.546323#"&5473g,Z*,W)  y  $'B3N #8=_.в"@<>""><@ " ""(P", <.0e &6%'##6733..546323#"&5473g,Z*,W)  y 3B'$  #8=_.в"@<>""><@0"P("" " <.0e *:%'##6733.%'>7.546323#"&5473g,Z*,W)  y l; " ?0" #8=_.в"@<>""><@," ""(P", <.0e *:%'##6733.%'.546323#"&5473g,Z*,W)  y l;.0> " ! #8=_.в"@<>""><@, R("" " <.0e *:%'##6733.''7>7.546323#"&5473g,Z*,W)  y ~;l! "!@/ #8=_.в"@<>""><@v," ""(P", <.0e *:%'##6733.''7.546323#"&5473g,Z*,W)  y ;l0?!" !| #8=_.в"@<>""><@v, R("" " <.0\= ,>N'##6733.'.#"#>32>7.546323#"&5473g,Z*,W)  y    E'11'   ""] #8=_-ϲ#?=>!!>=?s%5##5%    !- ;.0]= ,>N'##6733.'.#"#>32.546323#"&5473g,Z*,W)  y    E'11'h#"   #8=_-ϲ#?=>!!>=?s%5##5%!     ;.0Ze +###335>7.546323#"&5473{{{ $'B3 #8=_zj" ""(P", <.0fe +###335.546323#"&5473{{{3B'$ & #8=_zj"P("" " <.0e /###335%'>7.546323#"&5473{{{Ll; " ?0r #8=_zj," ""(P", <.0e /###335%'.546323#"&5473{{{Kl;.0> " ! #8=_zj, R("" " <.0e /###335'7>7.546323#"&5473{{{~;l! "!@/R #8=_zj$," ""(P", <.0e /###335'7.546323#"&5473{{{;l0?!" ! #8=_zj$, R("" " <.0= !3C###335.#"#>32>7.546323#"&5473{{{}   E'11'   "" #8=_Rk(%5##5%    !- ;.0= !3C###335.#"#>32.546323#"&5473{{{}   E'11'h#"   c #8=_Rk(%5##5%!     ;.0keG>7.546323#"&54734>323#5>54.#"#53. $'B3 #8=_)BU-,UC)$*Q"5 + + 1&T*%" ""(P", <.0JjDDjJB~323#5>54.#"#53.D3B'$ E #8=_)BU-,UC)$*Q"5 + + 1&T*%"P("" " <.0JjDDjJB~7.546323#"&54734>323#5>54.#"#53.zl; " ?0 #8=_)BU-,UC)$*Q"5 + + 1&T*%," ""(P", <.0JjDDjJB~323#5>54.#"#53.zl;.0> " ! #8=_)BU-,UC)$*Q"5 + + 1&T*%, R("" " <.0JjDDjJB~7.546323#"&54734>323#5>54.#"#53."~;l! "!@/7 #8=_)BU-,UC)$*Q"5 + + 1&T*%V," ""(P", <.0JjDDjJB~323#5>54.#"#53.!;l0?!" ! #8=_)BU-,UC)$*Q"5 + + 1&T*%V, R("" " <.0JjDDjJB~32>7.546323#"&54734>323#5>54.#"#53..   E'11'   "" #8=_)BU-,UC)$*Q"5 + + 1&T*%*%5##5%    !- ;.0KjCCjKB~32.546323#"&54734>323#5>54.#"#53./   E'11'h#"   [ #8=_)BU-,UC)$*Q"5 + + 1&T*%*%5##5%!     ;.0KjCCjKB~""><@ <.0-o ###3353#"&5473{{{" #8=_jb <.0 h73#"&54734>323#5>54.#"#53.* #8=_)BU-,UC)$*Q"5 + + 1&T*%" :.0LjB BjLB~?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`a      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNuni00A0uni00AD SF030000.001i.loclkgreenlandic.casenapostrophe.case caron.alt commaaccentrevcommaaccentcaron.alt.short Parenleft ParenrightHyphenSlashAt Bracketleft Backslash Bracketright Braceleft Braceright GuilsinglleftBulletEndashEmdashGuilsinglright Exclamdown GuillemotleftGuillemotright Questiondown double_grave ring_acutedieresis_macron dot_macrondieresis_gravedieresis_acutedieresis_breve tilde_macron acute.asccircumflex.asc caron.ascdieresis_grave.capdieresis_acute.capdieresis_breve.capafii10066.locltengeroublekratka tonos.cap uni1F88.alt uni1F89.alt uni1F8A.alt uni1F8B.alt uni1F8C.alt uni1F8D.alt uni1F8E.alt uni1F8F.alt uni1F98.alt uni1F99.alt uni1F9A.alt uni1F9B.alt uni1F9C.alt uni1F9D.alt uni1F9E.alt uni1F9F.alt uni1FA8.alt uni1FA9.alt uni1FAA.alt uni1FAB.alt uni1FAC.alt uni1FAD.alt uni1FAE.alt uni1FAF.alt uni1FBC.alt uni1FCC.alt uni1FFC.alt SF540000.001 SF530000.001 "}~ *cyrlgrek8latnJBGR MKD SRB "AZE 2CRT BMOL RROM RTRK ^afrc2case:loclBloclHloclNnumrTordnZss02` $,4<DLTHLPTblz6L6L6L {tu l|DR {tutu{  #>?@^`cm}D] <>cyrlgreklatn(!zOcoq-8.15.0/doc/sphinx/_static/ansi-dark.css000066400000000000000000000042661417001151100204550ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* IDATxwxlKo$A4Ek^]UQrUWEET, ("vEK)$f˔. 鰛ه;̙3s;̌n:+ طoWOzF!B!8\Ns27p`Q~Wtg <>}iZ}W!Bq0MիWl D\\իş.o,|>awZB!UUq8빙5l(ڶ+/MFFYB!BK,But]'5%l>ҧO;}B!Bc\6miwB!80HOp1 ˲;MB!BaGt!B!D#"B!S!V!k'b9ɜy8W!۷nݺ*PkҠA7n|X "B!u&߳ٴMؔƴ>tlC9Kw/|nG4퇽2!UXd Æ 85k0MMҥK8ٻw/rReKBL 7.7AN'^M[ٯbG\k,Gk1o2)C#BDOαnݚ͛7{nE0 ?7##@ PeK "ǃ>|\t.@4@^&Y |*#f]8uH:Q(//~7~U[u4)ܲBHh~]²L9Xkr$ >o4U7(} wl66l]¿uؑN:Z BQ'@|5)d>BAQ,L>{gk=6Nя|,+΢O?ݺFy`_^$r(Xoҽx. |һB+cn8XQY|>o1yrX:g_?.FBYlY@`ҥ8^eYdggӣG2339pу^!qԋ%b1Í!Y1y\/zWo]inOhu.GJYE,"YiҤ߳f`lr˶,uֱo>ti_>]Ν+YBqВ[2 ryG/h*CŕeeQdB@%!%5(g\?\,J-_<7+"+XGVxWr"vhuWj+ EE >&8F *r$Be2C ؿ?:tk׮@qqFOӦMIJJ%B〥d0V~52[4'iP\.ۏS~?]4~1=܎f.壥{hdeMX9ںm?.݅]ic]y&qr:ٶ+6Y&ȐM!7ouFY?GdeeгgOLӤe˖8|>_5K*O9r-[<]B!ikj/;wgSیn:qB\2]G?+ObS3q$MM/'uPعi ;wgK?vҤ{?gF v =tgGg_͚-At?1t[6;ngGNj.p!BޙgI^ɉt]'..-[СC,z[nE9s5p@v$6gb RuGG{0Aގd@*!c%m@^}'C!QK!B!D !B!"B!B:'B!I "B!s!B!"B!_(!3v]|?.%yZVcfp7ARi&SMb>EOs_űߏY_n6=Q~} us$ƻ <4Mx}ZNmU J?G^l~8 zP%!^7#\2圸SY-Yxu`(b5sywYa/9svb^?NV6<ěb T+?ͤåI}ߤgW#[xXz5|v<ێ[!Oó#SpU3Yނs'5ӗ^g7pbLƑسY=fʄ{oglLޘt;ۯ-ӳ]/Μ lSSzϘ|w&ke/ƼC ߶矚 P;0l\9 Os|: KU[9y8fUR x~Ϸ@b&<>6 Jt.L3xL''.MXTu ;?qϕnH'ԠVTzq*v/|x,i.@QS;r˹!a|#^8? V y*ge4pރ^x Cְ",@Z͒qb#i<̨Ekx鱅_&>g{Vl-\/&.xEygGq^yܸ'_~[^gv.,~>#nR> Ѐŷ<0‘3h]ۦi5T~vO>ec5gcc[Wy{a  \: 'fyViKy}tv:3-t\X_IڈX?ٙAcЃ0MZ3t^*oZ5˿eo)JUgun1+b완`Ye 1MȖ"YٛJ[vTF>݌3IW*~91\e]R~,W۾YQ/!C!_G˲F1fƌ%}GR@ c2qMzuM̯0,y#G,9аv~Ϯ\ԷB/Gq^KWd~qC2t;Z>46Phd^NVǑt?d|re۽&IÓl31z~> ?ru,tRz.~4bZUc8Xv :5ѧwk>#c6͝_s(g/,"ьnrمU.Cv~jm( <j8W/, 1=JـmmD2Q9 Hְ ΥPEw+SCUHQ1KmPВkUO͓@gZ w/]4+Ö No`&w}Z:}Z!ۚiv%6e6^>gOaeڔ͟T6[FI!N)[t$A \VO52;){~L1WyGnRYͱ[{KUt_L%-E3m46ۺ-Ԡ5g{~`s\OT/*Jme6)55`9ܞLR^[/%I@FIMBQpSyq%'] -x߯prw]\Xr+n3hx:ecw(As-XRqz#Ȭva乖ԐX^ n%q*n[~uq n{LW^jtO3-@ͺb-> g/ dTtUOXw]/gsig8A*F#ؑ-9:Ԯ,Uw~Dh O/6z9-܆;Wv74T,P" M@6ʖb*O{nHQa:*Bzr뺯\rzH"XϙJFq.Zʧf:8ᥥ|˥meUcU3ںJ#Xm[X$ϦsQ錬x%//9-?Z|7y{ٺv9‰e蘦Y1L,tz>e,IO`sNXKug㶂Z`lƔn-[~d`zR\߾@ze.͹l5ݕB w+n+tH!dsfK\wL7R>26eRX\mF0P]/3 >4YE=eAls9i\^\Q z2x N)USe9YOQĦmh y5c d߲?ha_ rFRIPci/ۼXXv~G%_Uy_'3nivcv?o}}lߴ"K/S9{>w OWNmR|"MiWTU?VoZ ꪺ(! !Z"\$?sFryWb\v.sN'Xk8{"}ELy ˝ #/=]_K=$B~Ch1_jMS%W:7/nm7EmNȟn4hqv(_M`aD `p Ӄ[>3bng61ZԶL6oeܨQ\x*Z^(q1ўg} C*ȏUGWn~dosa/MZu>U89_c9ќ{kcnaxC$aՒiKoy{,Mŗ:TwъN卮O]'syqe3WÓOO~ {&^w2:.s*ږCeV*}\.FLd_y eAut|]Q"7xyV>C[ch4NqSH}_YoQΏ@ >jr.D9s_ؼPYz1=[Y*%<+݇B+VTݧjTpPSo4˜XWe [qhڎ" -C2̡˭j}>>cm[u*U4r(Ma,b[7'iY;wvȖ1feWaT^9(h ddd0b&LfXS4N=Z7D54McذaD 5bccСQTTDzz:EEE᲻m6|>;v0Pz ׭co@Rt@qܹ^yʩ(G͸qt(7M_NN=3gά '555j7M0,P,TTEtJ(YP0- ,~roB8@$ԳPQ bFxD4McѢE l6np0)Sصk/0|#eUzEPU: 5̙@ڰЕޅ ( yyy|駼 |\s5鎔,_wxb:r˫@$CQnO?Mbb"+VtKXy5kF-"/ ?謁Zҁ&&@tv=/~oͫ`n3|^uNg ,={pWp3vبXE`=ؖ֗!n!֑>,DS5_DzƲ|SѿU /K !ZGfL]'TUeɒ%K$%%Off&7|s2$>>$^z%,Y-NO]l6}QG}F0t]|l6CDt=ONN6m駟駟fӦM䔙&òBnW)=MӴipYHNN&&&\bbbHNNOפI#UU̓n6Np@qh4*]VBU?ijZ\6h'z*B~~>SLa$$$iHC65M#>>fϞ͔)SϏx0M]`` ٝX*s<-/g_vbٝACa IɲBBWkҚVB:fFD>kƺuHNN.n ++L#nn: 3ԨO;g\.{wa̘1`0tw6iӦ{Mƌ3~n75MtUU ?iF4]5/^|_|15HZEhz ѬT7Soh .SNe5G;̣hHm;Dn>`8N4h3(7ŋ#jYgU rp\dee1yd.2;(="l^SQ<'4m/ r<7܊׾##"BT簇fU5t2|r|GyDh0f֬Y|999%!gFt8`HӴJބn ݬi~,XJ ׯ 44BOI UUyw$&&L}];HfӦMc͚5K/{q UR,XPBƍ ŋw2/XW0ڦ%oP%> @`?_{'|[֓ԫ&@ޢh8%2JOAA?8۶mAz]7o^nHO˲*\GiN43g_5wy'G{D~>M&W\.G⴩1iv=XGJnn.ĔYСCqx^^/nC&&&Trsskߵ  &p b+1vmGUU m`ߵUU#VFhݺ5)))U'jZn]pXE@Q6IM8!}?V8M^ܹsƑ`Y_&-"??\b&#=v-"%!݆n z1_nW`l6X? c:t;"94="Pqƅfu$ &Y{i֢ *ٹmTi˖]&A=w!86wI&N{2W+UH>)̇z]}mC=w5NN8S+椓NSNmhfxWhԨVjX+w} yYd :6}5͡{e/_^ic ))˗sꩧF50z\xl޼\ /Fti2`6mDVVVߖ/_駟x`&˗//;Nv(H, 4%w7xpo\֪E~<Ï+'J_ui>76X=@UU0aׯg5:6#ݫ[rǨQԩ A?o~߶{ۮҋ'v갊%B@˲qWmݺ5. ˲1bDae1w#KyrHMMtTn7?{x^?|o^aCϏxC4UU1cP|/M7TtӧOJ`i+V`РA(nOΖ-[HIIA4 g/^Yf\F6W_}FYݎW_{ڰPP~e+.xOeYرcq8#ڵkW崱{A۶m+&Ieۙ6m@\ңG~w0}t뮨<*7noFƍQ}:\TD}raUUeРA,Y$|Cp鞴+"&ԣG";41 'Y(>/?ùĝ;]+|+qd XX|GL>UU شi-" 7Dr`ܱzd\\#F]vaK b&f;qGI?b5lzLOM7p*Jrb*,!5R@DuvJ޽Q4tCj W894L0# 8q"Tݻws'YA/b`*K]i޼9{B=zyQU˲|'G4ABCر#em߾}-[rӾ}{| E ߸_P`VW (ýÇ}1o޼rcbb*}0yhذaD*fY4b&O w&m]u' .2|?4(>0 hڴ)^{-˖- )Cղ,tСC>D ? CIzkbI\}(<O8X޶g= |oFA7 O!5R mH{>]fwy=*,0.cܹ`ipgik׮f1~xýx:t(۷o`Сוh4hu=V 4CB/T WĬa_T" #%ŷٸ XGPwdذaQ+U1 aÆE44 *&r}W4J-qS&`:9;ٓ5[bkW7p&Y覅eXX!zViuLhhiذ!}]i&O̐!C]!?>d.~wN?2Ck >|8..] pԨQy'7Ʌ|> g}6={w!//{Hm6wqGy'"XEÆ 4io1h ֬YݻLۤIwΊ+0 &eQ` Hy:ڼtL@UVj:c9pr6%O+i/hԵ@ fYa"`ZAN^Q>۟=h1L⊷HnM ¢8F!ՋKLӤ!CxذaC;H]|$&&2xHCBLQQQ!!a+:wyg8p j뭨e2e⋈'T[nݺ = \wu|ɼK򐒒ĉ|t9|zđ,|'ޥ+1)j׋@|D89^Z@q@jePɼ!e`E( (7+`XK$z̙3MѠ*qqq̟?]vѴiSƎkץ*:)lU[nnnT2_պ+Yl6S:uD~u=h8  ~??ݻOއO0B)vcCZe{:ώaxkz!Ǘ_~%z="!iz3f ˖-cСx^ BsGFʕu:[_e݇*ݰuaƽxfe wЈIKKe+ɍīhUP UQQi?[ֵvBSnV?\>|8n[n0il?˲=>e|V\ɯYER⪣u,EQhѢC n~BQ: DxEnnn#=i4ZlI˖-#<)OB!jB!B!?!B!\ B!BB!B9 DB!uNf !B!"B!B:'CB!uN!B!Dlذ!B!8_B!B'VX!CB!uV 8At]4NQGUU4MpwRB!D-I R|>n$UUq\$''$ !BZ@A yw:EEE\MӰ"!BQS"2\(F:yyyy晘INNN=r0`~) !BE"B|>yz("%%%<4M|>111&GZZ kB!8 DBÅ޽; (R_i;fX~xG!BO@Du8@q8J1pЭ[7֬YCBBB}'G!BzU& tL h(5,K !BN{D,˒XQ(JA(<裼[*u8\r%}!BJO͒QjB˲裏{d !BG/(߅ Z49.%B!Dq jjo˲AȕڳيwKbb"@\*Bbbbx:!B!6dk@ƍyشi;wYfk׎1cо}{yj-^W?!Bq(<,Xݎn'115j_=i#5t =ex_!BqPk;C(ꪫ0$%%rGRR,\ЭC~y|?#ޞ|]FD:Ӆie>QݻԧF&͵b~ʭ<_B!ı7(̙3"999˲HNN&++3gr5הjoxsSIlDJJCT_^$uT[?~鞢5NQz](W6[zx~},{ BJB!/z|(lܸe˖fCUaÆĜ9scٲe 2mۖ,$󓻹9B^']5u{0O2()21k~tK"j(ɇyRGkhuT5LKlhZ醿wxY6ØTkB)/5bo{Y8#/MdzYr4u,%~éh~m|qsqB>&['ij7~1&M̼Z.[ p5!BW˗il {DN:$Ə(x<}bbbX|9m۶-YJ13d>{^խ/X4|[yc'Ļ#: IDAT}MꆄN\\gf)CLcCykz >w~`G%.NF3]7^I'q̩t[r%e'߳ˣѤߕ{ٴ"G7~/_Hѯ붗\+iȮy7roIyfޥL}}b5< ;g:c &\pO\"s>gpto.ka#Hn}te8n7xn0︟ԧ5mKm%g Oi$*~|j'B!P@DUUAH BEyL<χ4i$|>++ UUK pk[r獹<5kgJ:ŀ{Ͼ7Z{i /|ygBK :_3kXk>'*hڧoc/^ur/3H~%.oi3,vV^3sA̼@wxUB !P7iA*(JG"M U7i&d|B ^\̜339ϩHݵi_"r5 EVhZcV\kcvpQ!}"&Oc 4y1f#p*A6pUDҡ ^25R9`TnWlTLh' oWt !B"!+^<FfΜ7F4l”)SkcccŎ0Z lJECwQ1U۴,x:GװMm["TcZu0xS~<r9#U6^Q/}Ix^INVbF" +81t2z:JCRr Dbbb DfΜ}+-S׍0yjQL^v-&!~#;0¨i/@D cVTw\/wgS F<0*:v]`뜉_1'٭o*ъY'Z+٠7 wO!BꊦiFGڵ Ʉ;vٳgfCu\]o,L?uZ2{Km1Oh<, -̩џRlgQRz)R]是W JA[`T:wˋ b3NNNTX16kYt]1#p>dxWb?M `ȡ]C'1m^YTϤJڟ>v3c(-;òOy;O!B% Dt]'22;v*.]fi躎i$$$0uTݳ+:օZxVwś~ʊ_8R|>5$m{+JƄc }9obq*݂j-/ GI8y0 _dlξ*h1]TZV .8/&g.%ɷԩSz趟a۱8;\T2{#j)>y{m>xR|"*͟d*U:'K/o6zi875{n'&tχIyc !B#-"ua͚5]?@>}4S( ׵L&ԩ`&GSUYڇ(ۃ _|MF]v1o<0LhF*UHMM%>>EQ VB߾}V 2OOO֯_Or8|0}ŋy:(/͛o^~}:t耦iW3o<ʕ+{A'G!B8 :::fl6j֬Çٻw/9d%{&ݎ`vԬYq~eLNIKKc*UD D!B]5Kuf3cݬXֿУGVJzz+?wlɓӽ{w!Br@V7--J*Q\9<牏XbQBL&iii4zWիVm۶ٿB!0S rlFUUThDU4-{l' :cZ%B!]"th&G!-@B!0`uwrMB!Ȓ_ !B!D6 DB!Nf#B!Bd))W\4JOO^UãSreh!B= Dt]'..'Da:gϞ-d!Bk(r|S йs,!.]tB!m,[[#r "OU!BV]5I!B!(n D4M+t!B!P9""B!w!B!-d !șng9nwBB!Z.P} |"/唅-O}ϒ +_bXyufiLj2%d,&7Ĵ!I"rvɝ;ułJmzPv;Dd~Z)o@z#et')]F~yNMl+O2 W2fD`:fϩM؈7h0{p&B^kS.~76eP< ,;6^ٚ2Q6Ӕkl_4r!#rk.mXK?:q"1dW/4M=s~2 Yc=p*/c.7б%p I9{`dS"@q Q atjB䧛]Kb׍f&IMWX7yvdtPCEVMф x=/߇XioNi5|.>U[ ;/TdAl;i8e)U&`4xp~+ kJrc>F#mWO)'v~^;xXcxEcM6UrFJq!Jx++Gd~I#DORƛAM?I|ԅ7~q3Yc+Yb3e.X]2+kRmLW:ד> kO_&^O>M\ۇn}|Pļ3_c*xhI7Ut'1UƔE|-O.PHszWWGx~ ְzN c-؝)֦-_v/d xxO^R jC\LsvP M-r6f$lr4ߊ!ҧgphwljB؛%ÉUtvGwO 8,~ӘǦD H\u_) xxS_[A=ɾn=)J`Z;U +PڅmKfn#CqƯ|^]t Dφ}l/$xV#^OtM/'2b Yx ίqBM6>+Kч*oet4'?c;QٽH(!k9E$jRrK k̆X z&"XND{i! ،xgrCAI=ta}#_©$V;өqޙ&͈OI?{3_Kzgw6tJh0 _dsC,8ѡ&i7N8qÏ P\H3L34ɗp*ɇs@\*^):X2aQ՘U7s1%;3x)8k[b']WzݜC;6ccgaE-VO*O3жc'fgctҍ*Yΐ b%bͱbE J3›PS.0z\} tMAǷ; X?gN[--[E-EFflW\!/=!GYoW{kNk4mfڴ[PܜGŭ{ 4 )5\{ޫ/8|8?w3K5as./䞂gǎ lsHfw,95-޲bGy有r-{h4fòf; $Ic)N^n]R/{sՀ{1.؈;ְd`KM\ق?BQ(8M܈;&$'o#敇H6esAI? xb|1n0lNgnj,Ogtbhdut1O7EbeHXSFJ[Ε3BbIa^ 7g6{0<䁱xq| QGF5DT|V-IA~{Cze7ndH;?L g1o덟TPn:NǑJgK0E;A +Sۙc/ ]s>n|(*--vAu_%X Jn%i_&MKᓯAݻ'Og'er|{sŎAor{|bHqNM;^< &l+mhv2S7i$MF#&'gFD;4+VR8hxK^Oyg/( ر=(M !k:5 ~%=0v]_͇i+lZlg}-u@҄c~;!ZM5 ι(7\ɞ:͟i͜(J:;>Z4<;:a v:T쨰3.xfi=U9u!+NJ?n6tO+s|ˍ]^$-]Ś)?264sl4/%^llw%7&/f(R.p-JUpnZUQ-_t;Ъ e]@7yS!2whv1g[.+M_r_Ff1ٿI(?U'$mږ%уcgPf jEf+$56G֧; PcOYN+\߅u꒢_傴ڝVe5lQ q5cvS :&3㬎O3j]||V"Ӎp0o`ݯRW A-/w2'Jfp R0ؕ bXuce>.ixy'2>@)?_-:<{/%t^"oM8Jzo_JH$͟8'@;N~z#R ?MF|zuרb\1E=MAYͥD;;a;L>ՈϬaV-x"4I$Ppیom"z\~ ބhKkgg WX ɏ#{,Wj?8͞Oֳ(U|c)ҝ3gķJ2l³xV5՜ 9vAS78D֚@EO3HB[,.揧g )ӉQ^yaQ%}`JՕBO4iެY3RRR8s ݺuŋ7}x̙k׎2YLH_~-k^G-l,k/Fy"EKf 9:E g%~ !؈?vfjAO:o=WPnRhN帟OZʠArv;`kV$/ !ăV/^L ::/EC*t9&F$bmL/M\`|ijCzYqI9](uzb=ӷL+"B4VBDȋxłNxYnx51Bqܣݻ.-d䝓Jm޴.t!-1!(r1wB!"E"nIJB̢7ym濘᫊LB!(-dز IDATO}M@"#3xqR/)= !BF:Ig٦<}\{ ZdkDڔ& /$BGl"Not<ȃπWzݜ7FA7!-- T>{H7B!diƍV TkǠja*붒x[Ɣ+C'S*ElW7AO}e2}Iuiv5:SA|-VW;a jɨ[b>2udYȗ?J1 {ٹڕJܮ }-zdI,X ;xq4wh7/Advx S1"C,9-"Lb]·BI5jc,,z)x>B!"̧ws̐м/Ԫgˌ.'Vp/U9bKѪh.Ԫw @\)ABߊ:%qUTrԯGʉs `ԝNUqVT0 5K%Ns;F)9~+誀ɝZnOu` $9MȖI/ғX:L{ n'B!rbOK"{33$2o'z Eou Q * *UtYf;G/fb4s:6fgr)E#҇ pt^`45W/9ϒIFb<wChxO!B,iy_~&ZKO%Cqϭ/zcゆeQC=f[#g_?`ӧT6멒vJ ٷJZ4]L$E暛B!sXUaY ms=ƱggX/Vpc tO" [7ŀn Cw'? S?7lp9,oJ5pڿ{cq=;OQNT LuqEbl^{ͳt !B[IՎ۱Z,-Zv=yf&|T+s-YIccxhFD`|HC̙۽pq li_)dWXl0gۇaB-#{-gOyeşP`t=ŝX¨삗{U<T! +4|=l22-oiUFB!Oٻ84` ҶKa}QGpk?ΩeYӕEȩva+obl/ʤIf͚ӧ޽; 7}x̙k׎2Y% 3֩ x|>i) qۍ!x hqS:;6P>ux/~Ԏ~y좔OGyZB0DV???-ZDXX^^^DGG!B! 55>͢O.v%%kB1-< DB?IL]1sΛ$kx"RXw///׷oɠ5u! DPH RI*!DNϑ :B! P5K!B`*$gfD*Bpx{A'sf|/'aQ鲶= :]Bqc;5]JFT. _XO+r] (eA^׌BE$4*uuky)\}Քa(Ӳ !=]p!(pE:O{B53T<%y~n1K>%+Xf5O++TDΥ )n*7K!nd?_7%荁Lʊi z'/Yh^#PFףά1__dD,}ۉRŖHuݟ f.T݅*;S>5Q!͑ h|pZ9E9rE-O1[KoKs)?o\AhWoFꕟe6B )YѴJ?r#/ yKd.BHms$i\af7s$fϋ< 2<?zbmtʄP02Ŀ;Qtr)n~2?ѱ7l i G-Bo>#mlxzr.L}DcoR5xW =I]ޗF񼰱*}ځ;T5K0g˭6]\ґT}q =-a~TG\l%M&a%oόLp9I8P;7y~I=ґ7 GU"W5Oą֧t L92O% `ğq&D!QLjf3*&>KɄAOlсٗI'i~ws70ЪF( -I{Ku##i0)8P!+=};ӏgt7UpnȘ6b9F¼w<Rs-IF6~W>)ΔCH+]]#0q^|IG0ӱO[˱A( |j# 5b>.O{HDTrSj1S'GZ7%2KSCh؅WN'_Q:< >LYD/Wqwt 8;a^ :tb~)xv9}T "DJ" %- 7ݯDj"S jэOýyA[ŒjkGIf=n..iVU-?[\oKը`/Ω_%&[ȬauB{H"j@ 8gN#ho e˙Q,7uU= ZE9 tDReDOZ.VBK JӬd$]\C{ ~78Z,R~þ*47t%] bg܌ZVAZ i%F?l4޹xٓX>Kqj%w< G{{iͥY{'t-D!TŷU.b?$թ ~}^@M1rF^Ih6;UGu6IZS6ŽJ_fޔ3ɧT "ca&duX <RͶSXѽM9[8C^\OyzOlQ<; S?ӿEr+&|X<{a4к'.͜T,!Gb<;4,ûSIY4cS5! "zPᝎ$ŭ7aU] ܊.KwoݹW/7U,|;HvWw] 3qd}a,_jDzչWo6_7&uk,bxqD(ok=>тs4Cd߻կ;mϑN0OEW/ ^2+EuׯL{зr.Y:^hՁ1(CV_5^!S9iȸ0X[̬7h#Υry3󊳑rOd;~WkIW{F0Z0B;Q&M7k $Μ9CIHH3gΤ]vD6!}=Jgf_ ֩ |R !AZHJn1Kŧ}8$B!%/{=Gi1zI1~n65+T)($cDT|ƧQܻq"/kr*ŨeTAC!%B!w$̞xOgB]d|,bv( I׬ұ6dp(N1m_Eƛd8gTLzπ/ɞo\+p8K/H8}_/ Yfȇ.K IHeƳibJ5#v],Gս; X53sשn^ ^!;+3`ޞ?Sp7/?fK9YqC.L}'ViEFY E/5F1lx^$.#4-8 [xI^ kٳ2jxae|˹ط3yg֥USAV9CRp;>IZN=?F:_tp/V˿ik;CW|Yߩ W:/H)yef'HN}n4 <޸j_9)  jgMY=6kj0}\_뀅UjSqZWA:çٙSnv.l9Al5E:)Gh]܇dꀖʁaZG1jy[Bwcɤx.,w2u3sas qS9"6Ŗu cd\7XH#i~=pFRww?!:NIW4bW煍UwB*dѺ|w*MaW\f%M&a%oόl9̩Lka+56]\NL?8S)RQ#}"z~K=w[8JfWw95w>;oж~/N؉Zi6R ́'D""kM{߬us,k< 8{Yu7AF%-XOnn4x9ijRP*=ђԽG9dY=2R@St}f;PQ>x˗B{Zd0?o_H]=ñ| (q}LT\é58˥ě_Α31gR9ỷ=Qbc+f$,@(?"[K\ZYJ{^MSԨyy܏7]~B5>LßeE7_&_j0ѥ% Zu|xy̔?ϧؠA>O ɷK9 tz7nR>m*r]K[#xW>֒޿o*EQQ|yvⅿb X+C唼ߟ?[?Bmy}+4,qyS?ܚN;Z=.8R SR(|Ns5o3`TdNJC)&ԩ!n*(!eQm %(}|&{  BܚE|x ӫ[|Z+^-#W{* N"ׯr_g#Ʃ?_~D;Ωd_:OP; 銳?5:F~ 3W4>]":_LBW Yg$1E;QM8'发ILCUQջ1^ùW/72_z`;O:PπWϞbKk ׆C7ܙ=Mf9;ՒYD߁qh-VՅbY v$'}#pK<_OG]_H(ބ~'șN0OEW/ ^2Je=ʰ`<}߻EuׯL{зro B'nF'u6.%(Ъc0P~ݛa`ӣB2^B%2FV;fӓw՜pa͖\Pu [浥bI&͚5 ))ӧOӣGn̙3i׮e&/Gɰ46K>E{*5q|Vk'hN! '-eРA9n".w(S}ŪgJABq… h) !NEL$K"O)$O:T,B!,1|ٵՇlscV8:!D~)$cDT|'*]kk],B%RWFt:!B!XY&o IDATsA'E!BqK7"zB!BqG9XB!((z 6,Y]Hќ ܚ=[R]EK6Ĺ9u>7+= %~+?[_g3q N-2B![>g+.U>wԅ{Hnq>Ɖfk/Җ, w|U׹#7{fC^"CP@ZZkֺSu.D "" "7;!B ~>~?;NfQ۝UNDDDDDzMD̔M #hCϋ{ڳE[ى՛>8Bh:COV6&E1xx'B62C"ik'$Yf2߾?Dfٶf-pp:O' ,ӃiDp-E*DwBKi1<  $'3SCEDDDDzº2I,C,Cv^6t {hgzF䇍)gMw3^ىEDDDD]Κe h>}C>AMh 8;(ÈW4?\;ـoÆق=i^"\0j4X41\}u7`/<}+Ԃeq\yzS7Cph}X.;ƳG %b>r2bf`\")/P]{adca[ƗHmq|<-f^CϤV؇El@w,xiW4ϗQMz N>H՜=6ƗV?=Ysglڗf[Z'VqM? d}0X>q!y>;㽯zrL`Ye"n'2꯽hzV;oz ñپ%Z?w,?abg$Mʤ\<eڋ] (ӵs?tf ChQJ.bFENRf6"ZɜxA$"&` Q} 6+x֔xhlŞ\cX1_< b[v_N7AX}bφhl7v[PXnvDW]^Fn;cx>LT|x6nr"װ<EuCz{WO>>fK\aPpZ\|Eygt%:N7qg(/al!x*,Y2H%;V z4*v͞1ls63.ebկH!v֧8}Age|כ+w MG,z,Ot]Cڢ^ɍ=}!3|{)nzc|+|הKt8 9CF'cgO#ƑƛeNK0pF|ѮZEDDDJQ+VFيg ?a[hVl ui-`kp֔bz{<& 3> Ƕ' #V_"fO[CD[)`q Ve*&(lojwߛ0kc[9O'.-mgvpA3⮊vN涠inrՎD/^7F+Hs0CC<[\l~t%ﲈC[0mu~t'"@8n۶՚AD3p=z殁!DT9f̣l3TZGTGNI/ۖ}qXd[T.ZꚕlbcpVi:m\XMNH6 w?ƣ +Pz%".wr:iê\$ $:ZϽ*ޥݧ0ި!vUNv>F?ΜDr$%8Jn=+<Oq;{SðoOQ_D7΁Ht66;q"?xގ\@5l_܃/.`{Pwb`˙ ;XY;fvnÁIDkA?]meeyXLwa(bU![QAf(4>c%(W<(ZtĮ!>+VEƢxcg` EGӦuH[$p&Ory\>`k䒞X`"O@c\vp4%ZnruAfVDCBpD)tmTA+EuhӍv kX2'3k ršDDDDDUWaHמY ,ΣXIc69 VE[`fbDf`3L6:5Mpq |N)F cU_ i(F{c0_\I|b6W_:o3y|w"v5Δ'[n<~<< WTFh3:wfÚL-Q7IF&4-R"}iBzm=مЬxֿa3q|tz q0c\6;[ӣf 1BT(9-6bG5Ưa 0I[kc=g͓E|uCzTeV> KN6%F->X`|VTR,؂6b0i7qfk^̏;a[~Xs}g޽% < FjBplw+Vasa<;` DĹ f/ʂ|HSx1GY̞Ž<$~f;Hp@-@[ZS[99uPYlpEI`>{ 2mH}hMݗ#Fs{Om -}tzr"Ϭdd8##<W- gRqab݇s4;63t?s1UODDD&YfYÇ %%xLBrYM̞=cұUN/dXEIg˝S$n\+!إ* XRp g.L.,9g=7k!3f(u]9Oll,aaaX{0]^];s)("""RwjoHԨED]3[g"""" ZDDDDDD딈)S"""""""^DDDDDDDNx:%""""""uJDDDDDD딈)S"""""""^DDDDDDDNx?,˪/dVȹee#JgT5)Oi9Fvf oD8-͜Yn0]6OeSNwy$[dgAč./5#U򲈈;É&(#e;29.cpn\O|Id{AE-Y$ D^)q!k~=cT||$.I;DF= 1ź3!'7ʳĖk3 4ohqrn&l ֶo\eam<00v7ǏO~\Oܵ%um?~_$u(c5&"""""ԨE V|)_M;?"d"~vC jG=ܿ1[%^Ey= J>IߟUro6Eˣ"މe?mDk@~~4j/헯&(ˤm9<$۩m>Ye.y's0I_;ҏ0Fc ؉AAH~dbAA|iRtFN\eo>S|ƳgfرtlU8"ϼ403/DynBf̘Q꺒!""r~); <U@ C[HvNęs_DDDaD?u!G>2s͆brWU]{>/]FǦh:Ihڄg%ŷ0J?}(Fuw7޺Hs Ui#]j;e~l7]ΰz#""Atr9BDF%±驪>'O+ʭ1SY7-V9&W7+YnNn-i{01Mrx4 'k_*7#GΏݯX]S.ӹ$ޢ#64 %[HmsW" n?[*p=G:ozs:{Wu(T+}ʞtF3 nY8Z/zY׬tǔϋao>] Wʿ8 ;o^At<-fѴؕ(Qa5"w3[ƓGIѥMj\pbڃ.zև>6i#?q|\͋η8 (|Ic4p M6 nˀ|'ˤѭb-L>) [3mxBEͷ1#Ȫ\;7EF5˥Al!+GCr}VoGy:Y 빤G`]֑&tm$[ӭ|yr̎07}R뛻i=˞XρZ^։!uX}a]4D;f榑mPk žJbs\J _uM[_Fr{}(Zw.bWLyuWx8O7O8t[ve IDATa|huK+j+'")|Kc ϵ7rb<ȷЬǕ:@vգve]p0~1nd ]m.aA?/h}, gRqab݇s4;62:;w]V~.k*<4s8r \o `Dfy9I5b4Ȉ7OP8M{a?s;Gn {Wjk&q]ѿrxr9VF8*WZ֜#*c#~Q@jpr k:[CӘnL[-&vgͮHYfYÇ 992m4xٌ; /dV{fu)'o| 27^bG \>o ̷Lf̘Q꺒!""r~ynRÙ;w.qqqbŊ"`˿K}qα۴{¹a$ց{Hŕ#.$DԑH'vﮠKr9RQ_"""RěL;PQJTs,DD*-#Nkljh7쯄B>}O;*7xYm//ZI¼8Uڱhl dGcEfbiRvZDl[X(ƢhlӜX7`5W,TGXjH-4̇vysf/ S}nk=|$M#|$VZ Aˣ5#X&QlFW5]Gѡ=FNLDDDDD@ApPbdM1ˆ#dc[kJVfaCpa7H<{Rsܠ]X6m.aqׁ(?ɉYNz,C{a\7nCơ| Nu#u% ({<[iۼ\?auL?'Ұpn&|λ붑_‹HuB3k:8~0WLOlC3&wÝE+W µXjǫEʰNn}}cu<8ּt"""""RjkMto< H> S7-_k """""QDē\Yl~V._r|;|֔<ž/gsY':7O[' ?b1X~C>8aHmRO$˪DDDDDD cԨk"""""rR,:%""""""uJDDDDDD딈)S"""""""^DDDDDDDNx:%""""""uJDDDDDD딈)S"""""""^DDDDDDDNxeYJg(TZY"""""uJDDDDDD딈)S"""""""^DDDDO; iH0 CQiҤI4`jӛEDDDDN""54PJDDDN,]Kw""@iԉѣGIԉADD0uS"""""""^DDDDO; iHD4PDDja4 %s V:1iҤADD0Y]DDDDDެ.""""" fH8x qqqŋY\AH1MJ+:tRnnzFDD%"""R'Fq""R&:{[w|#Gb&IIIyחCl2"""N'"WkQW=iȔWIVV~~~JBPnn.gt}+OVVI0ygt];_OYi(לoOk]%.>>.]t²,222زe Pu 8oOiUˈ4$JDDDDcnCѥK|||Ȩꕏ]ta֭DDDקdyHCDDDDOvv6e50 ߟX=vQ@233k;sN~~>gt];Oii(ԣd9q\@`N')yyy]˲/e7ݍjag${)Y^DD7w߿AaYYYtһwoV^]a"Tr!):|-{Jދgf񞴴4|}}-HZZZEp9r 6QkAAAуf͚y1qW. %""R*yx>?/raV^asݬ^AS raS""""RI˦M !44chظq#M6ԹfyEzT4Qu+ !T#---[R徾XZW[il\ v3kb 3ߗx'qCqޘ1ѣ}.kQ Ae/gӟiK[ x_f3S_3ŋ Xz| ?ǯg!rxub(V~NƓ(Y+pF<_`ث q#"Ui~\~w|)8uVE|c]-c[kͥSiu'L|Zxd "%]SlFvHFltWqLݳ.Q1DkZV3]_ۿ3kVXZ_fMW]S˫9+\-wEDDD#ޓ۫HفE+||s~ē~?_siF\nOᥗ˸gbpCWrI־otlXIUxDr;R-iQ1Q?,9P8xs 7q8}MoV|v-zWꂫݿ>{~g-60tS v|9-_WJ×u102>Md-F1:3ؒz:oe+唓|s zaB~䥻9Y^oQ4=&#Sc |^fjDxym2-[Ʋe+n{ 8pO'2ȍͻëCЩ+8E=xjCZ4J?%O;;>!0\>sVeZEJmI$3fL:[ns١ey{c8ӆ7elw{ryQtn.mR|e +cǩeaZFt4ʕ_r8LσCl)H=P""""r\/zCj#cyMkkY*T?q7RW剏(eƎpOCA@x*bM+RDx!iL$((D Tc'N ((fͪaLvv6U7??lU1v[^o(k"dDDD;|||hݺ5ׯk׮VD QRR7oSN\./F=^zb@ݗ1YYY9raÆv2B+\XJ15KDDJپ};(JTBCCi׮x1jϧe˖l6֭[ǁ*}}BBB>|8͛7'33ӋQW߅V^%""""0 p\WI ((Jm.V,t4i”)S4-!//R[ڲ>,HCDDDD= Prrr; +.FDDDDDN-""""h]Z.9) .ů1M^Ui2p>߯Oj ԵW""""^q>?qn,bӦMJN-ݵk*DDDDBw]q\@-ZTJDDD+xWRRR} "futlSaRpJmWZ{EDDDDF<OQ""""RSY?q+{,:W YDBDDDD1(^N3W}廬?{R<Ǭ3m]^>HPJDDDlw""JDDDa7n>|s+gr-<hqOaؚU.L~5C~ԓsWgT&J:ctpWC@.{b1ޘ0+s*NoIK "r!Q""""^S~ӽm xXy ;K /q˅O)[!^Nh8C֭ZSٞR=_3$>mJ"""%Uk#""" `n9aHk\t:Ӌ/bYV\Ǫ*gtKb5 &M3siš474frh5_ze""B4Ӟˇ:W4 R@""l9g|=B3ϒ~cX;I'G<Ɯܗɉπ r J7$;us.wA_xZT}- IDATMxFЬG9"Ya)>ǘ`:'[-&}$ *T6ͫS>s=﬘ϯ}A܆xh+P sAWB$RU7>˗凰zLtA➡zE֧sWI?ao">X_?(( 5T!0֓\v'g'jL`.s/5IM74_!eYe$ƤRg&""E~>lE^L"K>mCM6u>#" x< L]xFZ{4G KgϚ>OfӳcjOsCDY1wL٧uIg;wb3Wm[öcLvmLe}h{S̪=ϵ|JǻNԚ&_Vz[Bxٔn+ߝ8;&i<O=pvzڮ1jw">n9rsR/ގKNՉq R3 ;L&E_0LOԣk2K;6V#Rmp먦т/>f]y_ǷbS}`b+uiع+[5qOe t|!!aX֥ۈs8q20ݑD?H=Sgx/X͉6qnT4n_-fi 1y7͇ ^J j<E[6ˆݨʰo1V>֝'|"Vw]'#B }1y2`ڇZ='L;S ssI9vwcآس3cLAY2vN'!f4+_%=98*X''dL-g>Gh˚}Vtu ދ}&6G;mksDٿ[m`߄M;'C=/jº. n^_Ā2fy8q~XՃcX~08yy)޼+=;p{f|KjT QɟraOOQqV(__ĞG/>numGo(M@i[˖_+WE>hk޴hɈɤfXZ>z0mu <37O\/G yydʶ=ְ{kLcC LfăyR5U xaaAgoR6W}Guyjԥgd g/O }|V Lq4WD~'\~k2'JDDDe n6E<ꗥ{f f L7& LXL60e=JRݥLxy\(l`Z"S3 pf`ϰNҿx?+`O' r mɉ^{{hg vWw..6Iۦ}^ږ^qԹn HKt.P\ټhHT|eg`ONMwkΟ3QhoфW⪢fȵ~{㞔ou~,gDŽ }&eym&Lر߆Gn:Eڥ"b?~ۍcmDɺg|i10ɇw\Z~yq+ۀj93Y0O6O_ze3g``lrhebv:%9Ke eLCۿ"TOŶTsirywk]K̿Cx叴Wq S""""%f|6`=0?ʋ Oq/1vU1dd-(X%?Ԁqd/t{y^~r}ֵg~?L`;}f%fI巜Ƶ8ؾm[0&,t[&,fX 3ޞHy7y7g>3rl yqh_q=yysSx1~DXf?(|oY+iv0Y١?6-"߯ɚ̿NH~(/c'޼g[~Ib~*6̄1 )CzΏp(/y䉇i}m:tȝOG`b67rhGt؝yk1[|92:):X׭JuefnOY7wperK&> &8tWpt p4jfƍ/Y3z|C|iw8y}DLOUE] ja}7{a7k`>E'_ij?˿Fmam/sY_C/q&1`ΝKF 2OPPӧO',,rf V)V!:'=)?\:<;;[8_W/R6q.y{HN$"j""""R}:Bm'zǽhQ+-dkɂ 7AdZN%`qVIDgP^^9dW mVMxy\bl`:rZPvugaR55kR\9֬Yq=%"""2^):+S""""""".wI"&s%;#}PDD\&sEDɈ\{4@}FN؊:DD.'C]DDDlzc$v4߱{p=XDDAHn>FMe=u úۜ ;};G揥338)xP %"""vE`or $r 9g\tz9a'yz< gF'"rsW""""<{Z2L1lnV&RI4QANg5[SGG0wB־ߍ+ޥ'7?0GrVeOnEHQ3yxiHF.X8^x#Gv&*tlZ8Lx{;eBkPNM}9{&qeOJDDDek,spujzd<0Α'Rk*y䶴 _q;IβOU?0o R;]^m^?sa'"Rs}W""""]{o 沱EFd֌ & g[DoN Ì_ Q0p^=oFR3 WHDDDDsya j1DKiŤBp>[J9Ow"psj]HqDDDD\v:Ƀ|ƒM+n 3>po*\+|ED$g7]-"""RXWED9%"""RXҋТCDDrF"""""rJDDDIf#wpG{rHa&sN؊:")JDDDDp w\d5#צe6b>~;H1""""ь輞oM+QHeGبu""FLq}u)֜l|hܵ]Aa&w{[QAN'$`~чvsнW':7>O)uwKf 7c@2?=v?E~~坾u1xgW̤6ͼ 7+<>DJ\/yS\+V7Ig3}e4S9<#fMc) ^Đ[D=^8sOO?xyYݨʰo1V>֝'|"VwM糢_9Po]ێUye*lᛃymm9{]uJv䶜OѠ~'3LjX֣#z!kFɕ ғ# ˷F%@jT\JOQq_9)y4&UDDDE3 Ar)>{{pjZYe/<3ᕼ،ܖ3Qhfa+ن;eBkPNM}9yl↛ل ɁՖ[o'j>a)8Zo^kY/(=>g}"W7}Y]DD$Wޕ2^uRMDlJ)܁$xv.|[$9$siz'Ǫ`PMG.6ߕAʼnR2_~w̉lbk(; YXf&Lر7T~%""""Ig0 zV^xFfHـ??9Yq3xDx}UaƯc8/7#ڞf B)eݿ4BMٟ=w^b !8a'߮!jقBy[K^#"EDkv#aݾv0ܩվ& Oݨ'oދc" r WfRTyzY+ww!~<ԋ&O{.D*"r.P,q9%""""""rJDDDDDD唈)(H5^U"qϔ%*ȵEg}hFb&_Vz[Bxٔn+ߝ8;&i<O=pvzڮ1jw">n9rsRPvKgf0oqR{N+\\cD:t&avwcآس3cLA,i, y;w3oƒ xDe[ra,ۓQwqtW\(Z{y? [?i~^+)s}W"""",:~%nG)# S{Ӣ'Ƿ&{"n溁F-Cx'6đR= Y~7Ndx$:'rr+DDD ^n sOD-^9OӱtRٰ:<'=ٰ{z⎳tD~_KBtLh ԩIX0GSmQ |"%q 7gㇸ݄L{3cb"We.EK{Jzg3ɑv?V+X>Jm:rwyyإT|"Ŝ^+""RR=h Vυ=58'g|4+Lzo>B2['jN ʝI $((ӧӴiS*Te@MFxxk֬Q,q=%""""""r$"z̹F%#""RX _Ǚk{v5KDDDDD\Nq9%""""""rJDDDDDD唈KmY""%[^JDDDeL+S""""""".DDDD\F]DDJ&uq9%"""%"R""Rl))4FDDDDDD JDDDDDۉet@&wn$u@rcf^4՛˶5Ha?#}uzVMhڹɸ*yToq]2%z2&E\ﮈ;ћ[0ױ@zQ$""(s) l^ y).X= IDAT?%#E;xGh.FlI rуhu& C`;n:6D]GGA9<#fMc) ^Đ[;e˟[|q`TSrv|&379ϕ?g7M-"""29WRb,g-Ҫ厬L|M1& +T0_vm,eQ/)WDTџ1/E2UH;7.c{5ֵu qd :30AFv&bF <ߙ<_?[CJG~K%Mn5KmM'-lwMd l;AP2xf- ?*ߓ&|tx`p÷'f A-\Y9+MYIkbiuӱ:࿌2{|rg_សW'%"""R 3oc*hV&,YOCU~Qmeޕ&7f Iؼabk>vRpW%<a }g١󕜘jgvzd= *Xޔ/m!f)=Ӳf`fpNX}^1R w =8 AX(nR65 2/U1etېu쎿3&/Jh]LDDKj8c\΁]+9k%|֎}?㿹UlFjG&gIڱa1S'nKq;vUcHn_cRpX}0; ;Ngr uIwX4q?WVx87WKlqXd7v0sgd&8?C7S#`főtl{Xw ˙9HP+<>&lg֢c.?>l%_kZDDD%m{m6:>Â5'y򗯰,Uޓ&-K#^l Ͻhô~m:tЪ5i0'C4^!)<}NxN˻s>ӝrnI软1oXl\NyibSc٣^!4WLg[?4oyA30ηFr=RYɁV}\OiqΞU-K"Dg: -]cEǵȣ =P1\_瑱HDDDDDDD\Nq9%"""^)J+S""""""".DDDDDDD\NƈLgIDTIHAҗEDDDDXP"""".Vw@f6%""""""rJDDDDDD唈˹u}o_a""""""׊<'"JDDDd:ۛppA;FJJJ&""s8x{{Bhh(fCʖ+|EDJ^8nJPPڵ;̞={߈0IDD$GJDߏuf OD9L&j"55ÇSZKDD$GJD9Bdd$iiinjRjUnJ5:$+0 v{a$""W0 V1}"""Q%KDp8l6%"""R+qww'-- R1H>9ӱX,JDDDXW"RJ>L͚5Uщ#vRzbw}k{EDJ^󜈘fԩO?Į] K]DD 99( àVZ.HvI"Lc20Lmۖݻw""Ed2Mhh(k.4Xٲe9XQd5eċ֭Kݺu 0<)DžVE$ݞcS~Y"""yu"4iRQ"""PLjvN ȵrt-Zpa4""J/O+9O_zw;5Aq9%""""""r9V1c%  СCc"b20S"""""""4#I4DDDDD唈)S""""""".DDDDDDD\Nq9%""""""rJDDDDDD唈)S""""""".DDDDDDD\Nq9%""""""rJDDDDDDr`a #n8Dp\yС\v;˒8&MDDDDDd5]KﲉHhh+ckޚ%""""".[DDDD$222زe gΜ)P(UM4bu(FH!زe AAADFFu(R۷۷ӤI()4mڴÐ"<t{e gާ7k$ fyc̶9Z%E|(N= ^動S>WQeL\|8gÉ_}EprdfYΖqdI:x|ywUtr+>jĒeS/?Bd3_-z<҇00"'>I9ޞÖDtfȈi^:{L}- {";ݩxm=a?;ȻOQtۧ?ǫﮢR{`Ǘi %> )Gٽ;P_DL"bҗxlJ7rN_%R4lSVTsexx=87둌{k%5_o+ފ1g p÷b#7+g x/%*ϪnAO[M(/[uO lm'.xw4ieGiO24f'mf#s.6CL|eZfC7R:e3ݔ43_oǁ%|0+P~ "}1Sr"Oגg|;潕у>Gxۨa#nWL}s}d?ϠKSygۼwFG?ˮ&J`G֍| (sqH_̚:C?V|Ӹߥ3-dByۨq{lϞ3x<2bާ?e;3??ѶWp;?= y=X[c+၍3xo#΃n;mI{fr7'0cacŻ9i=˲`\kREFǓ||2ݞAo}_R>8~7ǧ߇_5ovI`*ONeٽw^oNvI9bW?>N.;`^ϧ3s׻3fRdدI,^Ĭ1-84mҦ?;V"U5Z=ؿXc4?9=w ?bys| gegֲ yO.$̝L 3'ŲV|Ew$\͟Ey,*2-:SilCal̠qZx_.rnS/L>!km5дoӍ?ŭ ZUd735秵'(ݲ-;V+iY  ĽyI:U(QK`Fyh=94_o{︁mZԶphLZs9RbH*KG/g#tϪUQ$<;p_x-nzf3 L2]'De3)Gʁ~`К~C֩'5N&ͨ⍛[wL߽:pJb#F6ݩ;āzYiڵ. (iyg'=%F$sR!%žk=f+O4;bϙn?q$FuٹcH EC߳k05>gU[F <]3P'~ R^d'`BKw%&ջ rޤHT&/fŮ$ \fǍ ;7^|t4KU/jq6Q1,Xv~%KY.y 7'>v`x5*v_i;^ZH׹Zևy& <Ҏ`rxևهR>Յ8B[]]=[7` <=wĭFh,~ou|XN^ҏj:[7WUwߧa-/٩f SNhG*)=>ᛡٳTϭ[-V(X;YeN:NBxpvxdvXß]Wh<Ț;˖F|jN4DpKqId)˅hr=6|<.MDD)gG#gUmU&ۮW J~3&7eW_:ϵ.̾-dW#m,e Ȳ_-A!x'#n:7VFdW $:LL-d;~%ZJ,x?cnc[>ac_iSQl鿛A tUF/„$L҆gY|3ǘvdH;1_=b=/QTߵpNtFKzi) w=4(s7HבNo߆}z>^|s4䋕IÖGHHL {!oQf&bc;X҃i_v;5/2Ѭ}ճ<i?NlZ6w'qfNI?9+ݦF.UgN0UIHѲe%1SY{J94o[5LJ-zɫYV}i/x>g&/-q; =#s ~=r#i'_._2)EĖJjj*6|ӻ{65 :%c0/_.n,n5gkL!;ikq&%vn5g!a'odmۇցVOF,ܟٌ18{q/vOaXVV9] -Kx^9/$82z<}$`Y=cٚN=@jn2þ\EBL4Ӧjp3$!Ef2υt+/=M;%EDjG%38kBo{0sl\. | Sbet`pCIDAT 0ģr ` ~ ֔3:m% Q &'m[JqI\ܙ}7 AFǞDOHOt%J`0/\~b\ڮ^5֜0~ȁz*v-"1΄ddzmwuo:'y%̎3źONr'i]Ee}kc-ImM#vr8y4 ǂxd15,襇[6sqEeiB7QB4Naܗ F 7gDj cYA%M83"kKř4`;h::"ags`yvG>`틒J!xTa0lF0_<#3Kȟ)}=3ճT IDATxwEowO. QD0dP$$8SQ==;=08z0 ݝ :؝as]i"B!BUUN')))4mÁn(((`mۖÇiZCW!Bq0M׳qFN;4bbbP֯_omٲcbY^0:B!B8}:v숶;˼!/CB!aeY::yy{5tB!'mے-i7tB!'80,0 ,j4 !B!NersB!^B!B+ DB+ Cќ\pS$8tݺuCUUxhƍiԨM6=!':o߲vݏl?!FtbSڜ1!=Pv}oQO (3QW&+V`ذaaYÇٰaii]t !СC\.lBIe pФ4.ҕhX}5`+|hr8,2,;B9:ǦҦMvQ08x P(T~-B -pEWpsgmOd8oхC9َEp/Pm݋1W悖I;-"4Y&;؟-ϠUbYXI kkW}}t$v&?jBնuYl6l¾}Buؑ38Z B;yw6!BAQ,L>ys lң/mY?QW^Hkc =u&Ϧ;r(X@I%RpocN5#˧Ka,].@g> p%݈[BQ+V\2?mYу4> @RR=z 33-="Bq0"|1qEP#xHl-̘ͥ:[q[]NLKfycX%ìJ"eO+G9u 3'e-ѽb5方,ٻ}]-İjժj:thD|˲Xvm(f͚5tE9"BqPѶҿ Ivr2PP@E^N& 7=},X3BܹmY6mСCt4ټysΝ;U0"C$%ugdy6^&#U+1;)ȠJ$iJlh5(k%l{.tSIO%#4+!;`z1׌i+ EE >&8A *s,BU2HOOCt(mFzz:͛7'>>˖@D!Nʀ ӻHkՒdA~v&FLӗse+xg>ilޮCTO=9MζW4qop&}lޑ@oզ5I;m;[w-]ibSyfqf&ڽksZϓIё!!oB(vQ SN=gϞI֭q8xT6r[n}cB,{ 3 {݂n:qJL]Gˎؗ#MoMIsք3:(ݾ3&LqҬ{߽mUgg^3zt@ώ*{,)1 ;4~:ͣ֯=lW=:IuTR+ !D]pՋ,Oubbbhݺ5iii СeQXXXڵ eրړBTq4J#ڡF':$&@B/#;#\ (܍hKΞXκ tnG#tXu<dciw7^ &9%*0 KB!"oժUGflBq\3榳/72yȨG'=y-/PAl;TK@V`=djB!Nd6!OBQ{ CBԕt!;7>0AΞ]@*!uc%NB"B!Bz'B!B; DB!N!B!D@D!BQ$B!B; DB?EQ: BQo*Q eΕwQ#x +rp4;/ί(Ij=>EbN|)?a9^cztx2"JLLʺNRΘ½^Bkv~ޤ7}41Q+Yn2}46:tRgb /w'e ye/NE9,l:_Rŷ;,yy7 L@P:2],i4w[\qˏy񞩼4qWe=1̹j#ܾY7Gk?7K[~kL@)t?dža0IOlʩ=#ZiܫΥ_q p:ly'aHz)T;L)Tm5K}|@բJ^ߚsv܇B4z-Ex/y>Y#yاw8Z3xf϶ VF y6gto`",@Xϊ8ݍbiWݿbvN"{އ݉|~ƹ#i<̨Ex᥸~U|SyYv-B.&<7_4> dϳ=yijc"zq!{Q|ꟑ}ҹ9 ?߂^@ׂ/鰂һcLʺRMXU-=VuUC "^u݇B?3Uaܸq7?= yJ=Xg A2i%(٬y#G,)Ѱ~Ə\ڧBpqkWd>_qC2t<;X e[ȷ 2֯&H\vou.'gVRNOU=;B'7]A'@_MS!MNutGnf'aÇljܑG9IRwXƞvU!σX{?a6? 8ù>~rڣPh6i62Q8کHа)GrXNQp赻WU~)*f6_9-jQZ|#jyb Ln{jY*)Y1l91pЩ hB;nC%6g඘xS5<͟ Rv|TElaŗ/*luw(X<ȬX0#\yNg nI-VaQ*?k>zp_ ٞwZvS6 n1=XITq4̷$֢j$\bܸ\Fu݇B?f YnZ|"S챸o*q8}_y =v~)/DE)l+htLbʝxn[޴]̎3λ};Mv8ZK?↧V2n瑡F6߿?߉ գx}orF0 JmݑhE8eWd@T215xgMDQ,3WlɴNP1͒+qg2}ڬaчm@-o.=IU8:鿿ǂgs5reƄdMǰnplY v´ipۃ)PKsCY.m0ӻ|riL bPɨ~;s`WW@ nPCt".^t(;洆%,3pӵw#~;c{U6: íe{/;Τ(s~ϗz5cu֢Ћ$<\,e%!ڇ i\"jq#4N߇W@Sݩ$6ģJ ujbF [t4uHKNQ5xelO >zYrz XeTݺ-o$M=SKkxΞTlԖ+4s >ͳ/+9Zx7Xڊ󜙨Q.&܊[) [+6-(Si1I8 SdָJ\oLtAb9PHTXT<6L#'tsָ=z9*t|. g9-0stVF٩sNyn%zi=-vT~~.pXW@aE|ԑlj>]!ڍ:m86q0C^AvmNV?2tL,&l3 IxoGNXKue#:IeF8e]*VmA,]ώwSxf_R1H:\\_Zz.-^~(ݕH#/WV3+xH!_~]w:n+yltˤ`2j]!K cqu5>;4u3p`.z˂讣9#}/}oXZyls>=+Y~ _^)SUY|q@&j%ǰO㷿&=`aեt9cFUyW>\FTpݝ/igὍ:M,{bN^1Q)4s݅XX~%1_uy_/3niEwc%|9>^{x\Rs9-OaΥ3tw^pnT|jQWG9 K>W],W@įGl\4Q_hkd\tEşKe Փ﹐`ŗ2m'#7p:4fL=r c/{訒` QNQeTUvJj.X{ukYM9=fcViZ:LLw:cs1L~eDgp Ճ[S7v27,69mVyE~4f̙A⇷2i&%I-9 hgnJjѕdEUϧJQ qGe/M›<Wk&)3ds:TwhMQ*FFUZOzsg0,ǧf/rTc̿ UjVƩSM \6~,'_σomja=\NS9y*}[ھ?JJIz5nѪ˷*^aP]Pǡ̟?8p`?jFtttIsQ{Á+2GUUN'6[qX:>Vc+[wyvv;b60ʤpp8PnX/tԴ. N ٶ|,nMU%&&UU|JQreO4 1 UU)]PPi)ǚat4>"ev٥4P~?t:iO  p-** Vf-_S-ƨUʖYzWp8lXP6+˧2_z9Gm)=p,l Vt4`\( @WfTNK ߕ-yp8gf<$9*xj<jWu|V[ "AUCCUm%(z@Q>祏eϗUՏjTW^ߪ`,]Ot,Q}(c[fM}F%gN>ofѪjA@@{+ʧX*2t]/Sɔ_nuzz ijRen?YaI ص!6-IvZl\̛{Nad(L_s0*y/LKA_y,=w79YuV2k*a!-5.U/wrc6Y~.OQ3azc6*.㳺ߪU8%*,˪t; |.}~nTWVUC!_dwJLQ}\OEnBKhÅ)V4c6^ݛY38i˲,&L) ڋ)Du @dȑ^G !ڡ 62 IDATYUK|"8D-HUDOǯfW8U6DM; @O7yB!I "B!w6@nB!B+B!B;B!B;B!B;B!B;B!B;B!B; DB!Nf !B!"B!Bz'CB!VEAUUTUEQj, 41MS!r̞|~?@aq8aJB]UUyꩧxwkv;cǎn0cJ8qI)f ˲uEtۂ>l򼪋N-*lN#gǰO!ĉNCEaʕ֪Gdʕ̚5+ii-0 , kyleSazP^uil*__֭>l >~E) QG|>] '`ࡠ#Xx;ƖBRozkNgnBOfvEa$&&Vh (Ç8q"ehVU_00M3iifel.OQ4Mc~RSS1bSNfi{<_}UDI4 Vo^Ď`c~":::iEEEPTT*wұcLj '&v[o[ءMs,󰥺eH(/y4,P(*`Y %eRP E20CIFr/BթGIJPcn]ip2n@hҥKYlQ%NEQ0Mk#F0rȈSUʆ%K5@}YYY<̟?A3gr}Ia( *aU:L} \)n|lQ e`aÇ] BQ G{* D MWi,[gy 7 F5kgAQ`IJ*Z/2A… ׯ_D6һtRE!''?gy?k64ݱuիWNQ/_^Xa=999 D|(®]xlj`͚58 iu5k Oؾ%j\(*194Cg ovz-^~eNg ,pWrW0a„XEc=ؖև!n!:~,DS$Ǵo4z'zcׁ%BF#bBAFm b {9qݤq7*Ûo4n7 iYml~@qb{/bkٚ|\ztu<6Yg'_Ga4jԨBK˖-+iYV(tb& ./;$..P|gqn8핗a-0}t4M/V;iӦx^QZܹsC ͘J:u*Pм[#Val6͛а4y裏ӀJ+[fYydk8FQ222x}p8xӧ~nロ*v馛J[zAȝw޽{-sl4mڔz#yyyL4-Z0gΜc FEq`ΒSD3 ,x p60nOEk9 6˗h"hڴihڴ)̚5)SpSόeYt?"#²sxٝ3_ʸA#nG2"@#"v"~ \Jʲ,6m r6mZ{B7x#r U#!!SN9o1bA[t:+t[o{e*|ra(GfOZwǓO>{gӱP`r*$%%Uf}CPXXHaa!C&**$k߷  [!gpbƫ0ɩ8oҦMOe݉iӦBoѰ, c{Jhlc֥,.{>"_J"V<4NWݰpVXVyex<-8$mڴi,XiMzX)BZZ+W,s<ʕ+ӧ)))xPU|yfϞ|T[eOUnnnćh:w̹_4zsΡeY$$$T[nnn, =؏ص,hԢva7{_Z@w3_oNպ-bc1tۗo^XЬ@K''Mzձ0 'ddIr2+df"`m|F/;nt}"p7߿f͚U:\Y~Bv*f8\<>wػt]SNs9㏕NsgЩS64Kl<4iUUu 6,yްI<X_~V澚4TU 2W1իݻwDB&NȎ;*&NHaaaXi&gdddm 4(t4MV^]w(%%vSfYiչ;J~(m#ک-q=|;pۮ7`չ;a*SNe,YVf{ukScbb3f :u"???l/_мoRUydQVBZS bYn^mM6\.,bĈeQXŢE-%?%))ix<kĂBƏoViCsaoh*馛*nܹ 5Mc͚5 84Qܹsٹs'hF @UU8q\hf/ҤI:+cq\s=6 /r2]~a), ]י0a'|ݻw~u3gҧOK"z*7x#~2Mm?zEg0 r|$4CףP3eѢdRmXI^^mڴncɒ%l޼(.?sOumawz :uĘ1cBi ˲؜oYta=#w?(f@e,- |z!: 2 qЮ]jfiV4@X*-Μ9s=zO?KJJ"//sr]wEUߤIxW6i$t]ә^/^(碪>|8Pk׆*dŊK]y啡|$$$0l0@HQ}Q8H[+G ?///@ @V8sBCzEV"ZE^^6=zwo6;0=z'ݙw8-@Su" w? twxĞzQUK.۷l2^7t3P8*M0bڵkH xf\9G>Dh?}|0m 8}B7🥳Q2_@f !:"ӵkW:jo ސ|}1 㘮<M믿^"޽;]tP > ?yd@*K]iٲ%guV(ѣ-[ؐXU;9"5 Gǎ/3mCoݺ5 }x5)q2P ><;1|p #{5СC,^Ũ*' xb7nYEAl2Q3#ho_GOi밵D?1v[;Rf gGAl2ff9994oޜ뮻UVER|,.]0t-''"-ċ#Ngn`hhG< `Pf|4pA7 W!RklHwy{]Vfʔ)\|Ō;*,0HMM/gѢE`i0j(4Mcƍl6LBBBB R :t(C =TDeѭ[74i"^޽{ӿ-[Ɨ_~G u""0︩H66+{✪L0!AajԨQ|_L s#9/vo7Wu*n3@pߢ>;@=CޑaÆELV0 ^Lį_b8b$"~Iˁlع]p7p[覅eXX!zViMLphҔ)Shܸ1|Mif̘!C*\!?^?믿2hР2Ck>|8..] p̘1ay'Ѩ7{^FgϞp zf;<>h˲hܸ1ӧOW_E4Ȇ ؿi5kFYf a0uTbbb6Ѳ, ](z$t?mtGUVj:9|3'|4C4NK, 0K01ץy'#c=v SGqG Ta[XH"f{ii3d زeK跎;p]z1xHLQQQ!Aaԫ:wyg>|8b뭬e$%%1k,>쳰'TW iӦ =̜9UUQU3<{.T6m^Ν;H+˲w]J;&j{=3Ezo{:3'GQ=!'@ (Hc/yݙ?l\Hr{0"BQ0 #A~BQ3eVHPUz-G͙0as-TUv굧f\o}|uJVVV؇ffXn])Q:uo߾zq`$P>=N3BG8rOf䣃jϴl 6Vٞbh2u~KBO? 4),,dܸqZCRXX(AI.H=B*Vo {]6lX~?ԫnV`*{du+Hhb'ѭivTMCU4TEEACS3c:eG{Hq*]p !89f:^Çxfó,rH_vƳvZ~~j?*Wec)BVҿ?v]BK ,#!DIֺukZnIyBQjC'@!Bq@D!BQmhB!BIB!I "B!w24K!BQ$B!B;%B!w!B! `˖- !B!IЯ_NB!$f%B!N.:i6tR;i"B!Hzx<>|+H"!!8\.WC'I!Bԁ" $ . **t)**bڵhno !Bڪ4BW~p`&YYY |J "Bq eggza( .0"4zDEEIr^/2M!8T& ɡ{Ƣ(JCaY_~(FH9L$B!?e]9|0ݻwpx*]'Anذa !B!T n~Cq2H!BBC˲d8V("!BAOͲ,K< ˔%B!'ㄢ(lڴ7x[gZlIǎKܹ$B!ṕHʼeSyxbv;8233Yz5˗/gĉz뭒B!@6YUUٶm۷og޽hтv1n8ڷo/7gEQ}og}ax\{:Cv*B!N\G̟? p8ABB̟?B bf⚐ؘXě^Frd~ }C8W˗ :H{ԩS;v,\r 6h/_^1S6֯__Ή>}e؞B!ɨ@$m۶j*bbb\/@=SbۉaժUl۶ > #'2_z'?WyfiG?g/sy0߲loK?c\|ygbݼqd&L}oX.`1ь1}Eaez?fΜ_OTTM4nC4֭[ǓO>v{1eP0%|) F䙻_cwg#SXq˫XIR<:WѬKy%,]t}:`#Hn#֮[mE;cTV\rV0}bKcoͅ_X>v~1ǜB!⏣NUUU'x(lْ3fzQUf͚n@UU #8ٻ(M6JЫtH(*XП +"A!4Q# A*қP ɖy`B ޏsLfgޙ}nM=NGщ|5kJKO1iYW.dej!fSDzHb1J_ZѝQ.%֚_F%/_`ii#x6' ѡf͢Ǵަ|iO fez, Z  TU|w`XPU{hM`Vlfϡ8 şj+l[ !z2s{ qɨhxxi]iZ A|勾}U\4Gjxq6 U+5xҭ^:Jj+9ICӬSᇕ  CWV#yjDEEȑ#s;w2k,t:]ξ7͊@ZoJ&IAFtpaR' .΋o;#pј&iz74h% XkT/)hPSϒXyוnE{%Ԯ]WWW\\\tʙ3gX`ۋ!7~0$8g0&Mߙ{ۤIёt*>Mj:A%K#gյbLM¬ csbh4dY`0p!9s&q5 ˶8F0p[96P7^Vo2_4iL_۬;a7[6te4 ۼڵIהNG[jNrv?  P"kԩS7t:r?#zBQfϞ$ItRUNG:ul,leor:Mћn.@F`nL5M 3>gt9û,[vv$tÍͺҴU}yzf]ޜ>J9qGez=ǏGt{( ק~Mper g Ԉ)=wjz魟ue?e䜍}ɯ +:}?Oy{KAAO6mڬY39y$M6%555Z-`9*BժUIKK#>>IX,X,f3&޽{RO/lٲPN|F^ԩSt:z}NmjEPvmj֬hs'I~n6Q+Vѣser8::AAYb4ӧdڵZzIZzuFjO!JQz=uA$4=7(kw,AAA(@v0Nʕ \zx|||bŊt: 4U  O|"7FdYjժ93dAv1"F@*  “ K8AAAU '   d+AAAD "  B]  @$444g^`edd䬪TyrܼbhAA`0q8SU5gyAAx$鎟@SN9YPp:w\IAA`4pwǞ <==: %xVAAwŬYEL~  %=(EAAAPEDAAB'AAA }fܹh4"vArgZ4hP;Dw@nh4ڶmKX9;&eSAacrܯﶻ! “ARՔCJ&rXAAvXQ}ܻ {~*Urbu'~`4 3gXUL5d3;~>3Mc-$"3;ue E7tF x1k -/x+y?1 ?}螯TRڽ]qhIppulYmݷu߿ rѯt݃{|F^ؕޞIIl,S\=z'"5bSjQjՆ_سl(EA—1"rnhKmEv7{'~ނ9YK:%3`L24^eYc xo{.wQ$Y rNDwKdKQD1F}F?g~ZԩA =Nџzq#ާU}lKSrYN' t*{}Oe=-ϭ\"lW%vJVQp"ZWbЦ 0cGk=mo 2vaAԱ e$?jsBP-$mGpNyvx}}r.0HZA)o_,hphˀHۺs\M&jg=y&@%>NJsYǾJ/+.XIٷAS׳|:a0Mo'ήg3XUkKaۤn~{ s%CFNvTt'O͹8|9΃؎T1%r1 E$9QRS&IGb۞l OkIs/ſ=w3߂ ^{;IH7>tl~K_mwb~4WoҌTk]Sz.ϟ|nlfk8G߮)9_ۮ5ߗAj.쪤T<\G%ov"Hr`h*s8**{`^ϫIS>qO5U)XȷnɊ{K-ʞAg9[b*GkxgʵFX{R8/A˥K$Zwg%@}Y/ݩ;뷗բ*Br($67YE >~!$L6[uD|I.讟csY/G-Wjc޳ߵs;.K\KU[Ětg8uM3kkk: )Gr*ۜ>ǐƠzHK&dW1fK7"\vUĆCcKii.׼NT쟟Ǚ.CӾbnFOV vY\:|y9Mi2~0r\6Oir9fk!'~d@1rec>b~l>C'cەpDEѕc! !q/ˋ}̍`^?:zdsa[z!b 6 %dRfCGLݳ* ]ocCM7PUH:F8 [|,5b3l~vKpegF؀k|  XC8Ѽ.eҫhz@AYC:MF.ez̮P ; ii1Z&e,4a]2`6Z9qBC&&jh=!qP5;4q~4oOF&=\ ]ڴQ?g}ނyi@GJx;H3PˑiiȦޟfQk^jk%4ޥ oLƁyxqԣUdfeG3_cj_8dIwKHZ$ĤctT-["P \F./sӦ4lN#q!/օ̖CGF: fbU'WI^3MfPRNħqP05J*qKȩV2-WG/á ڝfKMTc%be[TUT22Y;؄Z0ZvҢe'XP"kӭPAUBt[Iۺ-q PҳTR>پ:]n;ahjWgڴ[׼G1ؐ/DjCc"Ʌ-χvpLO0w:пq!ݱeO22ꊎЖ=q5q piaӹzd:Bъ%8~XJZI3n7]5|ܰKF`!6zM(%-c  E6wI 遡bx9$4Detw /zjoX._wma>χ,դ(/X,Zd;=e2Ϥt6Qfdܴ n冄yY:bMU̎R ԭotP-l_=ਡr&Y (SĎ,iqւ&PGz 5rZvq?K-t[ U*epv]^$Z]A)'Fn5 wU7OY'61ZN96n?k\vRj"n5웆➯Nj%@!9c2QT Y:wT>4}#;^FKʞdZAMlMI\x*緕jŢYq~z/ҡ=Vbqx1**&f+jl2a4)6v3sMl;I9:58_n 鹴$׭`M>ʲM ..POH йe"l%:f4JnUs2Ê?|ݿxjBy=TRW*cЀr We(e߉A/yo#MqXWoRg*/U#bS$dv̎yGqW]ƶrbʲmYk%?<.2htJ4)yhO~mY }|:Hl¼*n Z u3pux&_$Kvw4ZHT,9тoݬ I|e*rbAV =W_Z7f}R1ïm{xhtZztH?.kB P{wӀzOpb:}6p=ɊdgmS~CfHhh:tcD+P heCu˒%u:g'z+:f <4xo;uBAJb ѻCAABD l &LJ̅D̲y'_-%&  %ϏC3tuz 88~(?;BzД'OD b<5w>Z\W~hޞrϿ  Bq&]nʫo_3G"ZS.-6>0il}!Yu=UoT+<ȓOKzԯիAAB$av\]szK43)㪡} ?XWe  NTYYf0nI&b9g*Gj8 zJxf-f֣\UB鈧L$?>{3qYida槇J2rRًǩf\;Zދ2Uٜ9- {qfv_@؄ 1̾>9X-߳?ѷ '5C7ߑkBnʵ`s^\Zvt*p@i#mOAA/r̕kE[D[D@ֵ.8+v@t-nKL3@G@V A]BiPÃsW04('3[T+zc/IȆ@"j%]9< s!BsC%$H4+ $E2$vN:Yʉ+ q/nX?AA!7d2N]oԗakΐqP3I(y#lEI5$4Ȓ(${8PKYlʚT07i(ȡY8FA*~%4hn]FIUe qZ{={Dg<}ͣOAAȍ2^÷?8 V2Ȕp,Kk亠aqYPM ¹H^pϘxIH5zܝe⓰_Hi1^=~ I:aOAA>4pಉ\=™W`pg T2Ο ѣ27ŀj$S5P{ i{h8ay)woql]ƚW1`ͼ̡,v;?*{'8Q)vl:|Y,?  q{X2rowhhV&FӫGM9DlEPn? Xw)aա V{m}3?b z_ʗuG{ѾqM|ղ.!c [QF$'#ht'@PwqTbs%|+zew%\3v͂%3:4.5yndwAA᱑=iR$_}R` }-gZݗu&,s,+Ve3,kBn`ka/U1fqM6k .^H׮]ILLQQQmۖr%Ik 3Bر i0`@~ <8~| 6vY{S ]o_uA]U⯐8 ׮Y8/pAXP ߍbF0:e ł&/m!(F;qf?HJϳs Nљ]iw/Cm>SڇyyM}A۱B)j\dLG҉];׷U =|r[&.hpvTL`[zVSY*]PɟnB 5Mg2JbϬSM\=w1Z^[ٞOu})FOIf]s娎\Kx"wgIdaUr;g7u';ǹ7A(&JhDAFgIШM*82$/]ʢqv86jP*Вç9݌5,qMXDǁ g ݅p{2M]#Sp {Bܳ_jZvcyN)f8CZ}!twu1yf+bR:c2ml]|@G!|=gg X*# ]|0| .yҹ%%ZuUC8T4oIACX߲TwG6 pjo_?e`dRUB <H(ͷl*i{@:пq;Z.W%__fY) R % +e 7# ήHR@DO.f.ƫT,%+ c!# lݝfbyK0䡊LƷEj9Oue=+#ԃ޸hp5vS)(^A9̱g(&?>fE Axc1"2njV(a kk ]FEADtAA@D̨,T3j Jc2E/%X1隕_*RT xVVb'ֳ}|AbOЌȌg毜NvA"%s58~- KӬ0>Q^ O1iɧxHLc#6ep*ٹ*8J~ ӹw~}AJ+&2'?>\2Upv$?;E̒EcC-ۖˆ\/,&%:fUr!J=Ct.4MV0{/2JKа%=WJ_ٸ˃U.M4jE!w/rCfB~KUq}XD"Eai۶͉鹀C!pp4#2UhQ]ݲ=Z ;ޏfdZ|G*xJql~fƏжgSI^d8q`flBE[[KĐބJ9v8SKţ^= <k~>xwdbgM!xrV~׉dYKSya51tz5! ޤCŕX:՟}D_[x^ySV jw.OÐ_*;rn);F3x܃T_Fɠ%q7*TYص]j>d+KtC^it,)⥻{Cޫ1sPUsgVm oDրDn;<HqׁKSsڶx?GOpAXP ߍbF0kj$@}Y_~{)Z}RA*Ɠٟ#VCWCӍA c.(/G~AZ'u'cL?Nډ_Lh @±JmA!7~'t6Bh31:k:6dfF`lfW5}İpd gJLt ~X8ST{cCd>Tv[[WqϿ]ǒ1C^;{ylyYI:>} 3WQcqAbd Oi3ޱ{xT{T-k//\!@;.f FRR(ڨKY4>&HaѪF_ -I;|+Xˆה*֦8'MEyOG%G 8Yͮh{RQ=lv݃ײP*G8+ O 8Qk7gĕ_=1â]G?cHٷǁ5 * ^ηRaـ5oqrӶ+h׏< <2^Lsg/&3&ӦPjoҹ%%ZuUj7l̀TW\%bDC 7V.:?9Fݛd/ΙlqWw%_wb-:p ˯w/otr%:TNIo3щx)] 9?cC~j,/6+ف Sk\0P1]ЧkX>Q5q!w,gnu2,!A֨X:}so\bƫ/ǯquR E N^&DT}H@?|\5ʄQAL`4˃_'Ѡw,KX-\^WXia%$ r2-P|'+YXo5tf}V}.܇<_mdU gooPgo={oqodwTJ壧x<3m}7}*ċ{OP+P¶oƑ&̜׃0"ADnRo ;\&d[ *װ{~NT1VFl9ΑU8+&aTޝV<-drvFbBQ!n'u7I8fJLd) ہdpB/e@P1]@1c/tj4g-d5i@3{8]KZr8KȮPw~O=$(oX\?A%?>OttyX84G^n/eLCƣN'Y$hhQ2${}|YۆhMaDŽ|эEPaAҍejLY|+_'y"p~j9;o,cPuwmMzH*$uY󞣁[/]ˌcϬXڔÿ3rr,.` coSpО~ʽ {oװz#6'xb%yz6 ͢Np!^7kf?$/[9K]:8տ-<{^ySeS4*O 8OŮcCeS#b';&px)s5ᑿ/7v`4I0ޭU-#y';~y{J$O 'ϯ"t_-…_xYKDmd3l;{ uu-BdI}ѱe/kX[l)׉ F7sSAhT@^'6%;u'HJd{#xeу!}5W1ïm{xh*.OܩdYĈc6s,-x^CщG1"}GUߙI$^^*ۮ.϶k]uuu]ѵ* @T("BHH3s@)65Nkn#|c2AMho2 8;(2?{ rC2EuK+G[n}\J 9{ #"""QDd]'l#5늑 `Ÿi(XGbлAt:n73vo\Ʒ6|1/\E/ݗ2곀.:ZawOjK $-^D]ݛ/WAp""">fˎIJԧV^9߯Ğ#Φ6Y\8fgj<~.F/O^lIǗa}elNA2FDDDMV?H<{h,w/X a~ 5ae7lWDԸ}0ہpۋcxa qgWk4jf\*(P]zbdc87.6#FL 8~4yn6VILxė/f{cù솑bUuat͈o6}ѵ푟-aG\t +LKdToHa*or89Y8wb}hZy|???Sh#z߉#U ]23I39_ڷ;⟕ĦWay29v/Ba_{$d%\v_1_:Jv~5^tjG$~fFAwt!RAtoTl; s?5buf/3)k;uooYg"aķ5 #*.g4~f{qW$ O1Vj;H_˸/dZ`-#3M\> ~v9Ųb5SI)6z1n4bll}HܝQ넁W\4~8iS˙Hj%1;3_0[q:G`@_,aNŒ;t†9( sJA2 0G-cQe7gZAD;[l'p>|3ǁ1FLW\&^1{.?MztȳSr:wdrd`.~ D]ۘ_d M?'a/%aTw[9m>xA3/v5'6[7yDY/EcCB"|l~x4IM㛃0l ~t{M#@8mohn!)-NG=?{宁!Dxb|ؖSiQ'?U&=n[J&"`fcZ-"""R344+'t#8SԤ[ԶÌyAq41un(\a(6+8)#:-+8ɸ<>-װW~2{ԹtL ;A&0}pmaU4(,sSMk#d "%8In5;|.qVaݸþɍO ǡtl]~/.a'EAAjYѝ,d{ub`xȆL㬿gN>vÆqIDs.IJ|L f`(M!KqT+b_(4>#,fWKZ>473^?;S%8d.^œg=AHg09lYdUHKI^b.Nt}# _#4Yc98c[8b]m'oHzldŶ$:w4}ޛBFRT\6͍ =1ܰ&D)8=c6h+]bkDhK;a0fJ4}DMN݃آvs,L28yO9: zfF'\R`, |C1>w W7SJ2g,cV'|s !wHO滛0C `յ<\St~z:{:s13SvQDDD*QfHTEDDD#)S"""""""nDDDDDDDNq;%""""""vJDDDDDD픈)S"""""""nDDDDDDDNq;[LӬ/,`2"O """"ҹY="UMFDDDDDD*RVQC2ƻy;4˫#9V&q\Ρ2UK?7$5Z:ó2uk{$?-<Hl=kwca8V1~ܿܛ%6OF$""""R [q:Y]ذNp%+n3ȄU xu`KsoĖo-o)xK썴`=o,˖Tl@Ūr$(Z9[AH!wnHƋ?;yWPIG>l|G#6"/r4Oi9V| 1_> /q|ӏ^g'y9]1)+Gn7 ur Ɛ&?]o"LYv8},4˔hh`ߑk哔aZrǟxn%h<+nEDDDD?ŵCy11Q<ޚYH&e[Oodh־ir &7*D-?07xh(/G9y'/wn !<#fvDDDDDTђo4]ht'DOմ?=/ےOw`tH9{\?t!ޗ{ohڋMQ%a,wGO/ hv@؏ї_Gݪue;qs[.O5ro\ y׆wb0hc,Ǜ 8Eec(3 1V"B,oނ_(D0!ޠ 4)./7M+^sۘ_dd-'""""D'oNp❒^p %w&$Ӂ->@F\8JrEpJ&"dq i &N{\*`Ώ}?y6 _NN^O֓;LrlV(62{) e`gQ,O+`>'4)vkSQn ,~)K'4tJN oeⰧ@Fң*+ ~!Φ5܂1+شB1HiB>;|b' =.1Lrt-JBI->yd6ܿ\RDDDDjgЎUO'K 2 7ej nC+/,_f\2;JhW ]f\@!s&+X9s;>΂,*LX7"jy"""""1k,sذaԩSIMM=kٳg3n8:,rw*L<33fw_> ..wX^ڳCDDDDDaR"""""""n9"rN<.՟G[ϞK KeH&9{9_'ˁO)%F7 I~{K>vTxR񋈈H5ՏlI)c3cYV"/ s&Y+bѧ g*S_@ߗB΍F*~zDL_fZ"~zC'X+,>Cvv{OoI̜ы 83Y3k ͺOIh}򞦙ϳf镶2+u16OӬXL@S'jf$zRMeP`Socwbq)7?߬ϑίW݈wȏ" 9n~2?ܶs-sWz|;V|~)@|3?!%+X8aV&rmQ5.?Kz": ?/WZgڞSvuw*-'7_]]MaYL2>py't+fطٲp.fh7SVQ/f"DF±Gk?ȧϽJqa;|kO':o'^76ϗsYv#\G2#8Ivim&йi8ٿko߯ub7es=/t˾eN&! WP,89l i/m3W"6b[0)oJUi;HŸӈeY3#qwFauAL(pEgвc0^'Mόֲ٫ΆfU5C_|Qr ~3}ЩUZTeZ60b[/Ow3mĴ~DKhtsKJrg?au V!d!|B{ &=n[Pmȯf2j!p|"xӨft0-?ЌV@ꪕ뫺 sO~Vtu GJjB֑ۚdmmepvG.֕7oϬ|gE<{q}NO>/""RaQXXݜyDaO#A.A|b{Rp c8_*<;-^Y!t]FR hulӋm˰AUS{ ӦW`  ]'2`ސUm&0#uHg0 w=}}[L,"yl]sҁ.Q}8-UIz[>ws5#'vmPrkT^$"FhS:.廭KʟrMwD:_J 53 s5hάݬ%GЙYc:ζ}z֐֗IeXǘCi7,.;/S[bkVa&>Ie|<s,Zw7/fgaLy+-7|ĥu IDATqCD"?OyfFP ^$wҸiW#}o o?;-W-~kO7~VŔx f@w >QUO2d| i{`<Ox3i<f$v}=)MglϿHǷ57r|@zoY,a:}0vǕ^bI6Ca؎Ч _ܙ7>~g&Nn~/I |B5𞌿 s&:ҵ&W ʯtX\}TeG~)Y[^#F 5Qx#k™Zw5yr2W}ÑǑ/50>7Zj[s6.G̍8W Ԡr} +jK=y-1niLMN0^5;"""0f͚e6 T8iHMM=kٳg3n8:,jb"R!G{_LenxQb'Vĵ\&`l3 |0BYzblk!flV"""""R{|ޮ#NX8e04g"*Y}MLhᝂcF̠T^~7X ;Ƽ8,+0yHsR͚A)8VVMǽ8RQz.~ """""VI]f;4OP3C=HGDDDDDDܮ%"y qEDDDD=5y[K-2XkՆ/[^0vȋwwd""""" RK$؏MA` =.t""""" F͇fyyOQI/Qqg^1C`RV.F@>pՓpt?zrAem|X"L zIf0a)\4ƌw>`(ݫY7rLW/""""rr /ρ瀱|`o{P'Y9xd-Z3 K/බ'7`س| \uv`Mk%ǢUP/0 -hՑo`31mBٙ0g FƁŀ0S,W7M DkB\REDDDD=nHD 9'>$p-bj/ܒY">,(ި{<ޠ0r xNRsJl`)a̼ Rj̻#8`zq"""""RHaPPӵ} fl_G [C/l^T~f6DApX:6`ٶ3F %(Y:w5õk澈jJGd-)GĻ֜ʷ9="Tzk,|]pǢ?xwx4 & -Zf//?0kp*_%\߷+?DD%""""v 4 aGXXXe $++C?xеU_Br! P;XSIJ~~>ׯ4M+wWӔxP񓌪vIH]hт*M(4MwֺH}`t""b֤ߗz#y17Ms<}4MNg?ϊhUϹohMxW(M6tN7f!w[U4M, Vx:??ypY쨕}}EjyO3,Д-"e?̪E 0۩IΎxxpLjipo&`⣇2j@?;ǎC>{>x3h@L;sO&B6& waL?}h/OìF ÑG^E0fʳ]v8+)ga̘㧜'FjCPf+x|OVy ^ߙRZ}Ez!dZ,[ግP/ -6n_Ic1$~4zDw/o4-i؛MI؈2+l|a*w{ƿ2<|g~ĺ 1}OnyaՉzWX Cz'f7)Yp?&w?Y `'?"R%ze28(d+`̅Sõrw,s7kՏNuj6sՂCXj'iF(mc,i]b~P{|N% y߅\7)ކ}VՌzWtwryq.ib:eNZˌX?}X.+R3Xt3v7 S_"2Rr[ ?~3+1efoO<̟{Ǟ| @!= _ SuwwW=#"""FK;»ߒ<=W2fx<#0GhJ<']gkd qLf/8ʻ_$ f>2{'3KZil~ KOT/~+o!w? "M<^NJnVrv-:Rӿ>[~Z曷KX0&]yǘUoT _ 7<̻Kij qUČ?Nb>bKQh޾+'9Y{1Bջ9Y_wQ/{g||ֆ^g/5"5t%+Wd5|nhxZ=Ճߞ Y^H'xpA k6oכ 50d?v䑖A e+(=rw.x?0-FCOrW̞G*+رc>c,!*5 e,߽mØ6>1gkV%6?%kwN-c1*_ޝ~GfȠȳﭖܯsDNnbbWY=3jB7,` KƆ}"7nL[EDn"Cym2`38(8-sqEx4S`/o4 }# ҏ7qj,ݳ7I~GU;L$NO埞}C0ڏ1⹅,]Ds[#ܾ2{1tje,c{ߎCI_g* 掔 .˺aiUQi=8 32ùY_wQ""""?&%ڑKFrּin|vB?1zg$y72Ly7aSfQxLnf-gQ`J\5d0cȑadM KηWpgeJ;y˓k|z"vZ1C9N=V2Q+5D,zQqV Ղa I2)O|Zc _y!}"9oy nc/-iKh\k}S`/WdI%;!;%QЬKNJYW60ic+lHgצ#whCIs [aJN6NcoWw``u>xz_\ja-pUKo)IaɲE&?/ ;e1[N%Li``TZO[!\>`:\| űs"DDDD,y[߱B[h{fEOr~"Ղ%ו|Ee,7n,2{Jq]nou9ĚyG+; !V*osե֝C%""""Yc\r:a8?ap1YgPPPP} !88Z sw'!C\%#"""MVҥ .BoFǎqcp8ٳ'k׮0 /c9tCne.HC4,e%""!Ү];oNZZZs9PڶmKxx8n} hѢ 6~OHHÆ Yfdee1khQ""""!aUm "88إiA&M2eJKp8'33rZJDDDD =/ЬrK}JDDDm=BQ\0yNj*ɀN|DvkUw^|ǹ>2MM6y: 9~t.]q %"u%x:i@/^\㕈[(qO " DDDPHzxDrr2Zz{:S~#"nvez|Ԉ>JDDD^#NkaćPf!5J]۔9Le}@q/ܾvS ρ0䔷7\y\ҝQ?*=h?Ldu,#Vl'<G@w=ۼDOFp z4t$e[0 FËN%V|hO=}.ya0zk W|G?6}@xb/eIZ  /*;/!d -zR+dBÍs k`0o }`_Q+=߅JGM?3JDDDmjd|Y B΢U#0}$] eX (iqY[; +/ݳi ]ECK\/;?l^ NLX8ܸrvW^k篒s[^|;a_!}3:Q+_[`zxL]:8n_. ҄ v551 Qcb`!Jl VXPP@[~.Ҷ<}XvΜ93;33g`0{pݰm8Ů θB pMJR$u wZtxm ? ^o`mDu~'=[i¤c!9JVJZ2 )|UۿD$Iw6tLè0 W`N*lhɝ0gUd~)ZyLէ_WephhV0! }=Pxעi83jҔR߄eACZv IDAT4*cjA2\;u|ʱ|j# XâD} ' [tG+Pxirg$!iE(ZT-O3"DZ$(pU|X\mð){Z_T[!6QxUs&ձ?D&³90RXxB~~(8 07vSEapM_>R~~oV$ՔH$R3Ǚ6áJz:ˇH|1P2g%IS̞TH| :w}`DXSx_'C&}õ {Vz,/~U^ C{@ӡY )=/S8 gG/ !&\ Ӟ>˿|[8 N̅ -|0L=.ȝͺ@0-_Y)0lO=!o;k#C1p,}aYrha_wKYGTS'O 45kְtRcرlذkz<4i3<ÁHJJ.㤦2uT:wL˖-3gwD$I$EAD$IRD$I$EAD(I[$}ʂ)F<_ zyZ~$UbADT;\ [jf^~j[b|$ kmHHЦ?L\x΂C{BB]qXh ”[$zX8C/@b`ث}= W>'@ˡ0wKV근WӿBš{| SSY=\nsCF|{8g?h7.;f֗vKuADTapvɅ`||~.,f'a0v#Sãs`ۦ~Kg 8+D`΍pi&lY 'APa)\xᛩ5*)[+EUhh,x%8Th|Os:} O>mKz.K ~,($i7 k|<Ɲ 0Ŷ|?φVq  sf@Zyzv`S8/R ۸4){x_[w3@%<@E RBl>Hk@W/@V9к4nRaÆگIRT 4NߘWLۄErm!8z0F)ȁ}!'Li VVU,c,t) *coOӡp׸)~B14-Eh !keO(@l Ą F R/EAeIүHrQ;ڄg*3B5g3)aؔ]e↗UrPgC;[0j24xQ!6QxjUsT/Q6͒$zM_ޝ6áJz:ˇH|1N86lڵk=Hnn}F M4aƌ|2Njj*SNsδl9sxGD$IRD"H{5$IB% H ]TUfo$IuI$IQg$IuI$IQg$IuI$IQg$EJRe$Iv I$IQg$IuI$IQg$IuI$IQg$EJRTAD$IRD$I$EA[$IjYc;"I$Iթa,I$IQg$IuIRTD"JR=U}AD$IRD$I$EAD56͒7K$I-D$I$EAD6˒ʽfy$] ]taٵ]je$I``qU}d$I!@ YI "Vm{2H$fշ&9WUo){쨋_~w/vQ$I_,#;^Ka d|^k@]xc ]J~-/EkJOSb%T@rF}2͛TJ߹NGEwXqjꔷ//_WR7h;q_Xx~C̚Av8ӯl#sW|3y$|\|Y/n4+ʷfѷ#ϣwo3]{ih"P]Z/N%T~{ pc:VѸ(.9kV9`\x;Mb\MJ 2p1d7_;P^R`* Ytد(yD P[`-oO{KD$>YE3XfRv+7eh֛|;&͟ r>rWr1ϛAؘGAdz,#_:C53Lm߾w> 4ޛag]9rykw@Β)ck3.x~It91?XΦL-^6l^)lU&z7lFv!,fY7 Ӡ| oc1& 7j\?'C%v7WUw^ɕۈ$IuXG\J>׿ m0w\8ia6B[GYi<@JWC9>9,6:#ߘME_μ9l7iyH cgcsˀ²~s/^%LO1.9̏~ BO O3o0F6K^7gleQ>(`˪=7!!{Soz`ƳOkyO2 nGp婙sCw|#I{2陏sx-l͠-zgrY1=r>Ņ" y7x*~t'n" 7ǃχ8'~4&>N1 xbe nW."3Ê7>oӾ'K/N߷3>cmI>|qn &kuYRǝ_}hgpvľTP62^GC$Ztă)v=Φ&UpYKYwD'X/Ҍb9w__r^ {ey1 {_]Ǧ1USt=La$:?(mw?}&S}6&_1id//w&>Ɋ@,Mz{fqk%L2t>" y_}Icsy{"=:%H@ _ޣٿaKYlILحm+:$gk^Cc;F9Dy1j1p$y%߾ʇ8epC | ~~@]zҽy<`2{w&n:2#MHĉ[c6Ң(Z2l C |G]I*q{2hp'Lƈa)|=gi`cy }Z7$)!o_g^`N؆`]Fۃx540_ʢ 8dxgvךGt!9cX*sX?&.CpTț>c2vXNkV.K n @BbwsfҕAte&ܔW$Jp w:Kc8woi0~;pOC4?DFO(! ؼ= VsL]=I)f` ]8mRhGyAޞd:8Nk`,Mz~iH]3Y]%{Rc'z.yy!42f'o+Kbl;F\4/RѯU(33.yMi5lZ%>㭢怜0lgfed" ڲ.4ܺ &ӼAMNM>?+.vh([WFeI&omoUNM Z&&1s(SH_;P)ۭ`HAp:r;\-P\н0b,/eIz!HL(_"@L`CC+U{ vdu2kY8nntkLJ%>Jica (CQֱl5ψ$7!!{[Q8}-]hux)^An:ruX!-?$w1L|)_Im8?qmI~ށftlKEVq.?_̆@x?1G\B:!"ߑxp᝕$w/11pw~[R>P_ڶPa$.)HN ʋ%)viG2rBZ欝4*G7( }[q8yvy:{O.ldLIDAT9¡T~&#"٤emQyX#8j:, 2g4jHlmP$e&=ZS8-$lH0ԊÆ6׾`q5tg B&e۶\9e}`F4imKO ]Ҕ#l~h_g ЂF{|m>mW7/շQ eyg+uU|t xzMmEROE [7&aS֒  lWN$EwJbit&=?{[FF"2 䥧߄&A3(J|O0Ks~Gb摷zD 6^ͦbΖ̏X&jwڵIqCo+!E!Zz=CҮċ-ʼno8nyL~$Ly~iN_Z(VA0,>"I"r-P޷v:EgFlx\s?ð~-ݒIȞ9|ן!m Rm^dz$e`\whƜ{]S_nn8GN>XN=ڜ}t,OE_5~̻zqCY7lƎ9˞̧g (O YURmvAtƜ:$"?1c~-n)ǣpZei1Ƴ8Q:f_oI.WÝ[V|>rN?~,ǟzͥ^I+XX^>q-޿g':c#Ϗ|&_@ʬ1p z3LUĠÏ'T| +ּg1)wcĖ0vK>o?_?x~+҉ 9c1t#[Bݨ\9a$'̠Gpc5[`?aI_GٟJiK1蓹Î{͑1;:yžB<Өɓ# K$O~>u3NmK ^x}>ؾ+u}=?Ǯ$Izw3fLm#:"y<},]uY$ǟĝ&1쎻{fΜɀjbΜ9xF$*"d/{iq{o`M*Hg54ܻ`xve+F`?$^|/GH`ψY?Ϳ?ڋ/@nTA$w9^} l-VpfoZ-\9rXڝHˋjܙ^ɑ؝IuUjN[ o A$w uSW4gwp1uXi%IT{;zg_~]jQfI$/UH*N}f$IUHI}۞ "$I5QF,Z=zvQT|Wv1AD$tڕe˖hѢ.T:w\ŨV1P$I)++UVv1BFVݼ@IENDB`coq-8.15.0/doc/sphinx/_static/coqnotations.sty000066400000000000000000000064501417001151100213510ustar00rootroot00000000000000% The LaTeX generator wraps all custom spans in \DUrole{class}{contents}. That % command then checks for another command called \DUroleclass. % Most of our CSS class names have dashes, so we need ‘\csname … \endcsname’ % % \def\newcssclass#1#2{\expandafter\def\csname DUrole#1\endcsname ##1{#2}} % \RequirePackage{adjustbox} \RequirePackage{xcolor} \RequirePackage{amsmath} \definecolor{nbordercolor}{HTML}{AAAAAA} \definecolor{nbgcolor}{HTML}{EAEAEA} \definecolor{nholecolor}{HTML}{4E9A06} \newlength{\nscriptsize} \setlength{\nscriptsize}{0.8em} \newlength{\nboxsep} \setlength{\nboxsep}{2pt} \newcommand*{\scriptsmallsquarebox}[1]{% % Force width \makebox[\nscriptsize]{% % Force height and center vertically \raisebox{\dimexpr .5\nscriptsize - .5\height \relax}[\nscriptsize][0pt]{% % Cancel depth \raisebox{\depth}{#1}}}} \newcommand*{\nscriptdecoratedbox}[2][]{\adjustbox{cfbox=nbordercolor 0.5pt 0pt,bgcolor=nbgcolor}{#2}} \newcommand*{\nscriptbox}[1]{\nscriptdecoratedbox{\scriptsmallsquarebox{\textbf{#1}}}} \newcommand*{\nscript}[2]{\text{\hspace{-.5\nscriptsize}\raisebox{-#1\nscriptsize}{\nscriptbox{\small#2}}}} \newcommand*{\nsup}[1]{^{\nscript{0.15}{#1}}} \newcommand*{\nsub}[1]{_{\nscript{0.35}{#1}}} \newcommand*{\nnotation}[1]{#1} \newcommand*{\nbox}[1]{\adjustbox{cfbox=nbordercolor 0.5pt \nboxsep,bgcolor=nbgcolor}{#1}} \newcommand*{\nrepeat}[1]{\text{\nbox{#1\hspace{.5\nscriptsize}}}} \newcommand*{\nwrapper}[1]{\ensuremath{\displaystyle#1}} % https://tex.stackexchange.com/questions/310877/ \newcommand*{\nhole}[1]{\textit{\color{nholecolor}#1}} % % Make it easier to define new commands matching CSS classes \newcommand{\newcssclass}[2]{% \expandafter\def\csname DUrole#1\endcsname##1{#2} } % % https://tex.stackexchange.com/questions/490262/ \def\naltsep{} \newsavebox{\nsavedalt} \newlength{\naltvruleht} \newlength{\naltvruledp} \def\naltvrule{\smash{\vrule height\naltvruleht depth\naltvruledp}} \newcommand{\nalternative}[2]{% % First measure the contents of the box without the bar \bgroup% \def\naltsep{}% \savebox{\nsavedalt}{#1}% \setlength{\naltvruleht}{\ht\nsavedalt}% \setlength{\naltvruledp}{\dp\nsavedalt}% \addtolength{\naltvruleht}{#2}% \addtolength{\naltvruledp}{#2}% % Then redraw it with the bar \def\naltsep{\naltvrule}% #1\egroup} \newcssclass{notation-sup}{\nsup{#1}} \newcssclass{notation-sub}{\nsub{#1}} \newcssclass{notation}{\nnotation{\textbf{#1}}} \newcssclass{repeat}{\nrepeat{#1}} \newcssclass{repeat-wrapper}{\nwrapper{#1}} \newcssclass{repeat-wrapper-with-sub}{\nwrapper{#1}} \newcssclass{hole}{\nhole{#1}} \newcssclass{alternative}{\nalternative{\nbox{#1}}{0pt}} \newcssclass{alternative-block}{#1} \newcssclass{repeated-alternative}{\nalternative{#1}{\nboxsep}} \newcssclass{alternative-separator}{\quad\naltsep{}\quad} \newcssclass{prodn-table}{% \begin{savenotes} \sphinxattablestart \begin{tabulary}{\linewidth}[t]{lLLL} #1 \end{tabulary} \par \sphinxattableend \end{savenotes}} % latex puts targets 1 line below where they should be; prodn-target corrects for this \newcssclass{prodn-target}{\raisebox{\dimexpr \nscriptsize \relax}{#1}} \newcssclass{prodn-cell-nonterminal}{#1 &} \newcssclass{prodn-cell-op}{#1 &} \newcssclass{prodn-cell-production}{#1 &} \newcssclass{prodn-cell-tag}{#1\\} coq-8.15.0/doc/sphinx/_static/debugger.png000066400000000000000000002417431417001151100203670ustar00rootroot00000000000000PNG  IHDRD,sRGBgAMA a pHYseIDATx^TsS4&7&%jFA"{Q@AQ*t (X+^(,KUS{~;;3;;z<{=̙=Μ&UK$گw_dF{_|i٦}^4PX=te/u2~},4h@bTS-SMqUN dwP@@12;}ϧuޓ(t_}L{<銁Lj';^oqjwRuZV "DDDDDDDDDDDD"""""""""""Q@2>"DDDDDDDDDDD1!""""""""""e| D(c BDDDDDDDDDDD"""""""""""Q@2>"DDDDDDDDDDD5jHQƖß-ˑy?BDDDDDDDDDDDlȞ/孞>oųmmU`yQ@d'SVktLK3f__(d fb -rss&"""""""""""2.|cɒ%KQ6uI'%V5﫯J Fh[X"m{ADDDDDDDDDDٔo* 'w54 Z7жRDByOG2U&"""""""""l*Q "WxQҡH<&2[6Uw ļ>~޳gϼcǎ/MDDDDDDDDDD)F CB@$TA9rd3cF} wbڞ}ȼP+zdIsZ뾴mY}-|XZېEo(ٹxKv 8i"V{g DƎwȑy9[?-=}}?mW>!  ʏ+HB~ u0 D d[ :m"v"r|R뮙3g/FsC济8PHF D޹пziW0)ʶQٗAFhÐPe6)t5Jo/Ph]Ɨ*X"V@RD 2l7HaH2B4H@$o DbsAK>b_'Ct*Ȏ 9xDJ:@# 4nZ|ͷ땷?Ǥ|@?b BDDDDDDDDKCK>b_'&" 7(ΜY%$|+P*@?X>h8"6D"u%&;_oN6Wxt@˜aG4:uw-ES|*bq^bf BDDDDDDDDDe_-QaH}_Kd rZ4pV1^AM2" J;"e߉:I@dgy>3f`ҥK}Q 5E;@b DY"CՉ(b RE3HǡH"׿>?^CG>hs==kW"3!"""""""".QDm'*ɠ#݆"I E⋼UO,2(쓙#6;v7s̼J y[fH D ]+"""""""""JM 8i(R&GEKh=yf۷oԩSۗ;ز1!C"Z$z R0-@RyS",F Et *4p!=PѣGABNJwM6 o7r}ȼ֭ Ml@nl48`b7L@dfRYޅQJ>o8T|U@P$T",x@dڴi)l/>/_>__ 7Be[k(5ߐ_:vʏa8Q@$-a> o0,!""""""""[~кm@0$wN}\ FhG-: !'Z>?"Ö`HZ#ȷuM'چa BDDDDDDDDDDUDaH?D&i BDDDDDDDDDDDL"nDBlْT&@(@dF0$d…Ker DQD<; ?~R""""""""""dTy;vۼy?~<LQ2*Ӂ1!""""""""""JF D(c BDDDDDDDDDDD"""""""""""Q@2>"DDDDDDDDDDD1!""""""""""e| D(c BDDDDDDDDDDD"""""""""""Q@2>"DDDDDDDDDDD1!""""""""""[Cr2 @d<" 1x D@c 2@dmX^xE-ȶl"^u|[~产N=+[Av}@` %P+? ^'@Av}@HHqo1ǧ?lؿE?)^ψ^w!Žx$P? D,@dɔgĂ ^NHz( "ٍe٢"۴E-Fa2sO|ZKӧoԦCG[ƧqX'ķ 美gϙ+eCu5q[eڷ봤.:m;]^љ74kZ\u ro2utm'ߧ2O~*獕UokבQcʾ}{ŷl鯓Hw}k;ŋķD` q'J;2!eBy{.gWww@$}O[0{:|G/a "_HQ DR:f RR2U1Ni"ىHjc :t|@$0a R}$WD}c/N1'z2} b;v߲!k֮ˮV.[&e={ۉ;!p`늁HlR2]uE~{5;ab7m/!S,W,O &m"sT9b? k޽{d2hSŷlzjB]{zM)D` +WGo˵I 6jb?={=&v;vBnǞԒ]]M^oݯoIl,^Dʊ/^NpEz?׏)ng?{aA^O>5T7m.TA^}5)umH{}={W>[6_yJ#}?ޟV㻇yTyם}?i+#\*ZM^:USRey},]F`߇4/žXA_}K>aHx'}>H(r1a u@$X_I.{:1c_"'}cȷ_!v|;O2)l{=D@$ܟK"=Ry/k%=c @$ {cH-k%}O Du,ܵk Z{Y)rTvl]VHu_d؈bjroDpxJfv.}bۭ>~WM4E5ZC<^ǃ_}U*rh|qG]wݎ-g.idR)L-~qM]oN3\8d˶dspMbZ}w}KTS܉xE|< ;aev?zmL)| ߎGb>,kq7mKR؁}X'  }]VZ)a'|f{"+Q{{$x>rŽIqSǻ~1S~.~F~Hqߏ}>{ZJxx$d }Q^Mۏ~ݠ >B릸 {c;~Hqp~/>" D} "vJIHec" DJr`j}ȉ_7zƺ3+ocH")}*x׏֖jo߷?7^ b@]2].UD:ј5~^7,T{w9;0K/߷۱~%Hhw]B}RՇ߲>vd1b_Dx_vBEz{?Ix{!)?vϷ$jSD586m'v?}>c=KbD hVfܹCl `ϯ-o17w}x$+'lE$}"_#*8˕"?Le?ac"a D"˥ "GzD+o;SN~-@$^0I." Dba3a @+}W@|J} ŽFI_x띷m/chofq?mNbNh_K#=޷^~ف}o%[J7].r* {@ľbng*@.`^9rc*bз?+^sg\xŠEsYbu rni}0u.n˥H'_o_v;Wt^7_dkYlKf5Y~O{)|ۅEze!HS'Bq<$YڿG"}?Q?e^ߑ^Z?}>c=bxn.k6g M[p[~3ψ Jx~ny<y}*`{S {}DOD}}RBQJLEWHٰ1~J)qw}"'_r"_d.'^H$siӾeO$Q~H'罬;u19eHb޿U {}0@>ۿ릃/?}uv,nۇE:qjޝ3[>_tS^ߺ!z*yB|W:2l(V*ױ;1;wn=zDn*zvճ&=_o+NzXyqq3^v`DK2p黽 R?}].UD:јWOd=Pqo"Hd~]`.f>,~?- D섂}>=5{~NDV^&.[_Dz}=Q"=dϥ+QT=Rm$=Jz|gCo.KH7˕)c}V}^.^$} Nfa?7HW{I'D>/H DRDIa @l똁HXdP6GN0)@$,Qەȶ_W"a \\yb Jz|@HHq?7a"7^wg@ĴIZ ׬pB$)+%_.$I?&}aYmӡ0@rr\?Coq4/O:7t2_z~qAn);m>Pl_؇U }kܴإ*uPqܵDseq^vby iѦz`.*"h4}"~R6n(#nuy={N.6@K2_V~gs}vKP%=pv:KccpK~ϩ}֏y{ݛH;H׃x]?σ]o}W\!v赙m'[S)+RIvIѬ7ߔ w|{$ֿmTѫOWn_rv⌋o̫"t@l똁/n1)\;$o\ D"%" DLH@"{?hv]b}h~YexǽĈ}Ha*N4Il}F{`SQA[G^oZ Uowfv6R# Dkpr??d5Ol;ۻ[0nׁvv[?ϥwaֱo*7UEc,w1ޟWOdهW}ۇ0I7m'CMRvn.e륚},vɞ?y{e':Tl0lxnd'Juol;^wz0ŽOLyPѺ}GC8]Gq۷Wlxt=R})kd`{CbXm+$xnNv ^}߲!?-_.eCysacA_&Q{'"i }_#*l@_a "ec" D c RtǺ3+׃a R0F;" D01>}e"R@?JD:`FzK@L}@,=df {@` ;%X2#H&nRd "&zC(/YTዾmλ\'!e$=Df {@` 1@@i_Ho.Td~ H^)@8C*~ H^)θl@d," 1x D@c 25ٰa=@Fa 2޷"F@d<" 1x D@c 2@d<" 1x D@c 2Ȟ=e2g={o믿.L>]^z%y矗'ʸqg?~̘1Cv. |s`}]=(u>0@ ;$m 2oDl=ێٸq[N{=A># /ۦ>!k){b9,FYpEYoŷN:ۿ~l;x|-mS{w[ķd?G?/k uJI7+"ӟxdݺ`$r`$>uOY"{:5/Oێ6RО%o޷ʚW؃W=)9_VuM+[.>xD:0zȔ'ķL{0GN~T+_~:H'[.u܃ķTvpv2S؀oJə2_ =lSYM{Brs69n79Nؖ'N#mO?TX7odlPovOķ͌olj׹?r^# }H*_.|R~|QYLE#^5:le:wсkeƲղߺ"? (9" D2 0"E0"HRڵS>cٽ{؉;v $ 6l=l9[o˖-fq_?&PlԠAYS-[y뭷dڵ⻍LylSMv+_0\45W-g}59lW:nuݧ[W˗u :( ~.쭶raZɑ-^k.gYkɚwӉUCrf< _/~JF6 l$md O6KJΫeݣK>Dc @H&b @$ D0)Hb+"L>ٶm%J#K,9r|Ro蹲f@.ui&Y`؇ۉ1d}h1$ӫ"5~IuOot%=N(L&+]$m}ef{rkrtn7f4/?|T>Y߯ꉘܜe`1XO/_ C wzc98aoƁ.=hE%pѷ:&o䭷ޔ^zI쒉{.8}"/_. .ay7$>ln-/{/"Gk+^#>-roǶVbj=ޒ=Ȫ._KeǸF>C K@|t()radߔ`|`l]N^)_=W I-dS-o[1a @$0a vB"` D` U7 @#vC Drss)!Ha;K 0w bNYtC2L4I4i"njQGʈåaÆ2glmOl1l2fh9j4LdM/ctC/@zrtf;Ym#}ȶ{|˪r|n7qчCA/?%_ W6<\rG{i󗇄5%+{qˏȁZ ķL_^j-_}[V_}:X ;lj|"Db @H 0)"E1*ȪU>>D}׮]bn'$::T6h~B\m̞=[>C=4x`iޢ3RG _6Tl0bw dOɠ'mo'#͚5NjTi'r&YvyA1rdf۰+/6əTWM'aFוU]n7>%Ώߖ=i=dϔvoj˖OאïCBCr_R9VC&Z~جrpRjW[odĉb l)S7/^x~D{J6־K{T6N.Yx-iM6k`[owh[۔Fmv7־CxZ|,ܻSCwNl*Y=NyPl}04vϟW'z’5{|92}~aY:ٿi |RV5؛PwTˏ].m"c @Hc @40a @x DJHR,^H$6iR{E7|zl9[ݞ굫N}GRV*޽{7i׮<ҫty J "6cYҠa`o!=)M6T>L;ͫeuʒ#e#ȫ-]Ыm֒;T[M]6iށŽ˄䌿GOo!F֖Um*MwaߪOe{M.^*ՒowM(_n&'@ĚrƲŲoRݗLrdF+ə쏐qwˡ:6R/OqƉ}X @,X vBcnـӦMC?C1cF-ܱ]pnRWvժ..K#֯%wȾwa__>9a,i\[lv;#{{v7\#9;v1$]:pZȟW=#G,;>B}ɝ0zrtVG1y.9sS*;ir}O[ȡc>*k_+;?!mf]fʚѷˡ!prյo}{)&0"酁H@ 0(KU}bW?~FڎRo*<,Zl9[oɶ٫w>@C]/Ciڿ 3IAt)ָͤ﬚OՔuO/b&9> f?,6p?v7쑪'ŷluJU#:1wʑV͒k " D0""10Uw}W]zdΝcl(75}B75uɳ d/^,ve˖]">GzJƍCұSG ؇8@ iߡԯ__F!N2DkErrxf'9Z˰Y -.afr;֐S~1}yGgj>&Sg\k;lm}l{䌻Gz(w;fiKno\Y\\m#}bn[tRygϖˬYd̙b}WEm9[϶cM`di&aC^lv|~{U)Tm.8~L}|>Yдcbdob۵۱^?M&1$E uUvadeˤA}JKQpLȊF}Lo-eFr|#oL 9B{';e}fr(7'̷l6 l)UL#{?|1nY׻db0a @$ 1)Hb1a @#D5b?"1 Dt@>dN|gb'%{R:J>XR殾GYY9kG[KlذARYv铕+W]*.e{L`}aҮC[i߱=*·Lm.eCU56/._ {ԓUmokI~y에Iu eEk*ˁ1.ɚ>]#_%[ 6ѯynm+,L6UyD {ȳʎeerpvm3XѦ|ѧ~UY6uAN"m0a $?" DD@$(6ٽ{;b'f̘!/BFt~zj2{}e2jV7q!ׯk׊].2ydٱcS2`oҠaiӮm&& 5}2ebDne}FrW}念_*{C&pٚV˚udɽɚu b˧*d܃m\)\,G!{(U&`9%n 7gTi}xo`y,l)ndad2aM6ۻwȂ dΜ9bh4s|zۮݎヨ7/R>oL6VI6R|s0XT$[5}W_g۱%W1$dMNJduJgRAzoM/n=K>}J%˛^-}OCF-7,߶^˾eߛ$wackҺo[" D%"%@("E1a R DNHb1*ȶm[e9bz7d~m{Ne52sQu箕!+V)SAf嗧KODg}ҨQ#i޼i:*m۵vIzd=Tm},kt}'c=k'W?XKGY`m9e` K`dߠ`rF)~Iw{},ZuU-+Ȯfêɾ~7,K]){$^#\ +\%}(T?:\'Z#gi8GC 7~+Q?+Q׿ZRuDbL_yK`cɒ%e)< rƶcuo~ѕA#Yw}ϷdM7ʦ aoA>SKv:ֳvvvݯ/gB`1cD{~[~UrƲX'eyNo"]*{@N`J34=XR2m @6S ` @$Ht0?"@"ׯ_l b'l`a>7~5Ezll/$ >엀6ґ}wɑɪ% yǮ~daeɝȮ7˾^> ,mxSY*jʑW:Y=dukdQUOFqqXU ?[o]ZqbzO~o`/_[ֳKEvvv})޸f7]um5ɺkV\s_ߣGW_acbvlv;?qQf=TS6W @ 3$x lYA7ٵ]|F?|z[A%G:tl/-Z6/V>KvmcǎgnݧE3]C&^ ^卮]rMroʁnOo9W|1_ Koi%,{\EL*PYXNr{XX὏gkɁ!UȋkeI"p*}+J` rǡ*Hze߾}E~''v{or^ton'k. n"kWU]%j]?G_ʖ esGmnnǂ-RQ|}f{94x,5{gKkdMr(7'̷l=D O Oy 94*Yӭdb0a @H)" D|"` @$&R|2o<G.TȡCرcb;1{nٱcG6H8p@9"-0[wrJKc*MwK^=MҬESMk,eBZn!=!m o^~cYX^He}_Q^,)D[+AQ>PN=|-eE;wӁpPbӛ6ڞWI;$'xCNj&&F a^'^/9.]-_$:}*K?6wk% rtrs9B[9;oɾˡGˋC_+??Tњ8qٳ~͝;Gm9vIՓWW7U z*~|voYuUEm9˶c۵۱۵Օdi{ (ю{vfdg[z{Ԑ5 .6g./o^/Kq1" D0a @0"` "؇ƾꫲfHN ٠"_}/b?I?.m۶ɪU,^Xx K=Ұsgh#uҬy3P7i٪4mvWF vRn]iԸ iޢtlR~dڿa,k|l{؉})-ȔvR܉ܽ]DΗɑʖɾu+wӁpWI_ A`A3ŷt}hu|֢r>nGΗʁvHn|/C]M.*K|,}k Gz {N?D?3!r[?QReq]Oȴid۶~oڵk_9sf}ݖҎ]]~&ھ; ˧W]Ken?-_)Wl-gvlv;v "{ ([Ѯ{.o!݃}[U8W=M|D;̐ew'Q prUŷM1?@" D0a ( DD$G Df̘!˖-%KHl`b'mv ,w-Z$6 2eSihժ4iD:u(͚7&ՠa}S,YXV\!Փ o!7m";u_l0e rhmbNhvSWˁ|)|Zג۫%k%mwwʡ>$r%Jn[GeK\"9ܷ EHqP5 ~B3u[~֭b۱~ E+nyJWNw,}آ_&azۮݎKWCF>lDG}ْXOWɁ߽ŷ.63=ŷQw ~v$ke-"m"c @" D0a ` Db @"&LH'R ^F-T&Mz^4h 75_%ln#v m&we-g6Y6xr(t7`:.oNj_$('>W5"9ۃ C:ZPLGn!`'? m#'k7 yy%cX](;\&FV-6SٷǽC ;^*mG91xV[Ixqh轨`m]2q׮]bKmgݺsω~_|!c_KCۮNǐ 9ɤkʴ?VUٿFs|l=NvogeMwSY#lP7DCWM.-ko[`E;e{K~JYY4(| D0a @H"@ďHQ D Jm R-b^K;򗿔.H*W,jՒ͛ݟG}T'O> 2AI߾}GyDz!ݻw]J.].]UZu=ꫯ :lѰQW~ZhU]֯϶kcϋ/N߶RduZr.1Q܉5V7!rɣ3-C̿tm# `m沢ْ۹jaZ_(++%5ێ6RɎ⚗˂;ɍgz?Vy[嶹PV+ZxEWw.ez7t]R]}ع;Pw/en붜g۱_/WWέ(3Ͽ9~suF1l9[`;}>T-+W/}Ne *Z=| |3m#̑U:ˡN5p|rYV&W=/M':mgs" D0a @" @ďHQ D lH:unzb_J.T؉MJNO< 8Pl02`ݻS܁Gǎ]vҦMiѢԪU[Siѝ5k`vZm'6m(6ڸq|&.UNm~xp=_Vwo"n?[9n)8[,YեˉX}zrERp¿@Ӱ|HeÀ@$񹲴¯dc%NH-29n9Em6澰Q  vrE=mWCɁf˧7Q|1TϗYfaddԩb&L(biӦ"ۮݎ{fw#_|l,qv2bY7Ƣ\z{ 2;n/v}),G9>xPmBC ޏ?m#B׼F>Ok#]|˛.[=WX:Wum=" DN[*c "0a @H)a rb D@r@}Rb'Vv >z!6v^z @u&:t Kr] ؉ܜf&0r%y♲_Wm#l~IYPa/$u;.mC-<7]XsH| ۰zu Zg+s˂>&{|>|ٲeb:t7_/=X̙3[֮]#~9bK7u[ֳ$kb~;Yn,~@o 27dacϨ$"OIl=δ}nn7" wncXR`U3e}7F6ط~,kRM_s,m\M_-u3=]u%@$0@H&c " D0I"a @"a_\})BJ2o<=Ґs@D $l0bgV+W :-o 'o;w=(2vb=( *'t{KeΐE.H~>_ _]G8E?A^},MӦMO>DfΜ)/ؠ!W~[|<^zI V}ݷN*/߽{ vI ɬAcdT2__Ā?_zrvlv;vv?C w *!]H>" D2@H4$RO D] N/Y8{ e. b'lPgnٵk Pm}^\zbe',X rK.C47[].[ =bH}ݖl;@'d4"a/nJ} P@d<" 1x D@c 2@d<" 1x D@c 2@d<" 1xD֭[VyqyqyqyqyqyqyqyqyqyqyqyqyqyqyqyqyqyqyqyFN=WY^;DV]!u@jp D"` @r DsL~FʄJɷol""?w@$uKw/ɞOJ+߶|-T."fΚ)u@ly| DH?/CgՕ}_G֜aJ|Bmˇ;c @pY?Yv؇WyX~9Tm$Sf߶N;0 4)3l?<@6|@Vx\OYVY&v>}߸\{MR=2tR@dEPϞr57J;' kWKRbeyOӠUS^:U!U$W 9ַ}7-!={e˖w0a @qyGDV^,ݟ$+v<"-{>#vV^)ָSlOȒ%mb76ԨYS>,]Xw,=$r"w[K.\ ~X- >] +,gƏUV Vlǂb۽{֓5kVI@y"E0dwޑ1SkKf{ɳsʺ=]e2wm{ytJ Ynۙ:}؉~tRz-q S}ܵʕŶ[o-w5׉{ *w 2xP}_v^ ĖA}KŖxb˙&;0"L;2f 2tF+;t{p,^1O|,9lk#Y3=%fȨ1cNۇ}|)< ~ޝg=+'}ڛ*}߸pv{F]Z˖3ώ/Ýw0a " Dĝw@䍏'Ii-d[j2閦doZ]fad{+A f45YΘ!;0"L;~ ާJ*KGkkɇȘwkȇknh}>|hn0Xr{ K,:Ja@ Dl;]S?؇ϛ<3~4kZ] ڝwIrcYtt{a׬SW֬Y%ox" D w?yIN(>S欪-o!sWבwWޝؠW[6Nޓz57$[<@@Ķx"rk;.UұK=])|_B51}*6ZvRz  a $#c"fҹbKkk櫒y`Mŷmy"dwޑql"7<1L_E^^6J2nH/2;؁YvYTAF}<0&Yrҋ;` @;2~ :i<p- ғ;` @;n 2;` 2;` 2;hժ0Ýw0ǝwnZ;;;;;;;;;;;J} rρ2{m2;` {m2;` {m2;` {m2;8s$@dw;9 $ \QVy D|M@ra "v@d," 5iD@d; @d<" 1/as(UE4QD#/im# FDDDDD);n(@w l͎\DDDDDDߌ" D|@iEDDDDD);n(@w l͎\DDDDDDߌ" D|@?Fƞ)e]I7} ?AԮ$^l+eNd%]xa=%u2ɔze[&=߯Lz^-x_ ?QO&o"e }ߕN:J7'A*64"@$> D@! Dˎ\""""ߌ" D|@` .%S+_#߲_NH|0/ F$Ӯ-'v߲!ٚ?2 """"g E48R@$%1I."a "a ]vŞ7fHȐ! t:'f<'c*}*cs9snLXDb'G!eit˹b;i[DQNj=lP4KF l+& DRҤ62'2f{ķ @$2> D@$Bj2a\ -+R׿drg˩*+W͛7K@(7';0e @$ D@$1c ` R+R DȎ|3h@K"GӀ >n<.lذaىmm"0I/@d ^7#VL#duy=Ļl!ՔqJ d mdoٌH?"8DNRԿY|˄dkvTn䬳Β ȟgׯj Dbώ|3h0qb @$6 DP2@$fH1!""""";n(@ib'\XJ|iɘ;5aw2j8lPpB8t;ցHje׆^TTFol3e2G1k`ԇŷn," D 96Ju`gd0[.Ϸ"]%cn$.[*#>f`ݐo= Zn^_c. {na? @wc?!'?%e uiynp#yŻo ϷGyQ9CuN`gR 5)uL+ww_&wS]sLJyz 6nA]l'0} t&ցH/VcVűxzfgdJ2~qM|gXH?ןH瞾O& p?zyᾪ2=xB]?QdR`Ud嗈ǽE|ۈEYS~GZ/[?RҎ;&ח_J|A&n{zOS/)?OT"gyԨQC!;n(@ĉH0_0a  b @H<$7;~p1@1;n(@ĩr^G91,X+#WLQ˟;o'# \Yc:ϖQ/M?? bz?jRp>ΨUsKXǪ؁H1僯l9q"}:Il="F_jt7e@ُae|!['&hOȕ0Ҩ؀bB|{]Ntu2v2zbdSeܛ]dTn?c6 ϵw};!=2vUo/8a&ցH/QdV_Y?~xlcx_c6xK`+wD=622x/:_{߲{ekvr{Vn]Z:tH>s9%%{19s|b3F"@(7'"a Db@Ha DsB8^ D0)9"͎\n D1!"""l̎|3h0qQo('pͯW]`^[ntľoHlQ+'\D3e7ȷ)JI ͝@]JpN~Q(}R}"^}(M􇪏؉;;.7vc25t};ܰ&~ .~8C턴t?`"SҁH_;)W֏Ab?fS?q Dx_K:?D|uC&]]?Ͽ D&%eJC0gwl͎\n3f _M6b7?.a}O.\(n<\r%RҞx ]D*Q""""l̎|3h!f bف)"` @0" D9!/" Df.7"DDDDDdqoF]t D,>{.Upbˏ5jT)rۡe@$M{#cYfxߎ.d߫e6J(EQD"\2+׏;Kb9?R1DEǀ}ߌ麰 n' }˞H oDv5PzB<;LL2qof.cc=&Q\I"@_Y?+nK+=xqO'yG?[?@d⨦~?Y}CO?/^,6@8z-[Ll9}}Ν[o/~ q{w媫b5ժUH1!""""=;n(qg]t[S"{@$1 "1a H'@$ }" DJHf.7"DDDDDdqoF3.-i D,[rG˘;qB^WNC˦;o'*#]ڥ8е3v2nz{rv)*uc'#].7vhG_%ý}۳/%u}0WmqRM Y%]?@$‡ [\ _ʩ*fHx/׿U~Ȕ)SľnKn6Աc};7f>TP"" Dg  D0){ DN?e E48ى1wϓQ vy`؉ZB)Q 3@Fz]s u!a[~̍ egdžo܁ ^!#w ZDS" DF+~ mq"RV;066ho>om zR{]? ]$!vI!/n b9X4"?RK] /t w%]m%ɘob}LY?)oIk]D/T&.Ieˍ;D >ƫ$q{X]qDzog@.7a| -+R~" DR gH1)""""7F D| H~򓟈oى^dnQ:|-0ޏe.5E2ޱ2§bĻn܁HqFM.VIvsd uO}oێ;1F&c.X&v~Ջe']/V@@(ސX_?@> HԤW`bB"~ qݮ{>} 2vi/y2- CL|Lrܪ؇({Bֳv / D@^^^]uLx؉rw}׳s=vI ?D+h*]>篬-r'P0(PF:7z|Xmmo=~tt)-^?QDFo$Ӯ w.[6$[WW}:KN9KeҤIv[g?38C:w,+nƍD[n@dΜ9bÎct"@1;n(@ĉH0_H D~H\"I@$)"Ec BDDDD٘7fHKfw A^sWw 42~j;-iR;owla52ekv""""""7f` ;H D(S DHt񃋈(RvQD @ɌzEKy~z%lmJn܌aۋ]O\.cDdkv""""""7f Cw d@` 5;~pEʎ|3h0q(eLa>>2r06%6jS2[MƋ]/6Mf."""""HqoF "NT%@񃋈(RvQD (-ٚ?"e E48RҒ񃋈(RvQD%=;~pEʎ|3h0!""""g."""""HqoF "DDDDDEDDDDD);n(@?"e E4QҳQ7"""""Jzv""""""7f` BDDDDDIώ\DDDDDDߌ" D(񃋈(RvQD#"g+_GgV/u}(񃋈(RvQD%=;~pEʎ|3h@zj2w9||чXƏ/mFDDDDDώ\DDDDDDߌ"M4"DDDDDEDDDDD);n(vvmҤ}[0"""""J~v""""""7fHKfQg."""""HqoF?~L8 9r-?"e E4QҳQ7F D7o&7p|W[0"""""J~v""""""7f` BDDDDDIώ\DDDDDDߌ"i?yr饗ʁŷ%?;~pEʎ|3h0!""""g."""""HqoF|H,X ?dIFanvbBd"""J^v""""""7f` d'F Hv!""e."""""HqoF;jH֭ŷLanvb4ց-gh3δ=dgϋޮ<&0G[!d9yu[7-:~~?SWi0HlGWo)|!}6n\.WR= Nz"*;<)vzoO=凧"R'eёo*'KoDh;?>Yκ,^Hq\x[&(y񃋈(RvQDe "a DQe."""""HqoF|W]2ki[07;1@%V"\(=$ۍnwWmͽdŸ[]?V6_A;GWK(?!? bvιg=+ %n>X8T)V 2D&dcpkǫ뼮|W>P,=jo@䞁I'T"l/DDDj1B|CG}$eNY?'"J7';@$6 D$""ˎ\VYb}g}-s"X(Uߌ"i;yG}޽qe=)αg?j@?[vO"\3@5$6Ȱ~ab_o0)ľ>x;4-ցHq,5P_w_Ws_ڮ:[&QQ7';1@$6 D@$5QQ7FD]2kɒrKŷNanvb"G~!v,ێk̑abkȿRdn M}n}a! o?-g_y*gz@+CrCXe_i~i;Rs֣DDDDEDDDDD);n(@N2 0"鉈?"e E4~ ӧ\q[07;1H 7yk^k볾oa}ODD7f` d'VĆ@(}WU'XY}˜~"'"J7F Dn" 6˗ɱcGe%rHŷj"Ϭ/գoQGݎ-dށ2gb7@/ԍ=eۤ}G"|0L~pa?+uwD{tp=!-&eC!浧ܑybH؇7Rz.&w"qW+7!He=>wotɅ^(e?"e E48ىQ"a @40!""ʼEDDDDD);n(62iR\9(gyiF>,mf'F݁Hq>l`5~ іgJ:!I`J?~$إ?ێ;1^W5;]~`5]/V/<)M>)؇W6Dؿr @lHcߔ/쿾t YoDhZM?:Y~}ٯ뼮[7e^v""""""7f` d'FĆHb BDDy񃋈(RvQD#m"we,5,exQQ7'IR="H"""J^v""""""7f` ;IvPeEDDDˎ\DDDDDDߌ" D|'IQv (y񃋈(RvQD$) O+ķlaa%e&%/;~pEʎ|3h0q$E1AY ""e."""""HqoF "N%/;~pEʎ|3h0q$(y񃋈(RvQD$)Jegv""""""7f` ;H@̎\DDDDDDߌ" D|(=񃋈(RvQD 3;~pEʎ|3h0qhrR˓x2I鲻.co;{oy:,xOwRT}-Cٙ?"e E4*T Dl?! 0Nd'EDDDDD);n(as"l;! utM~#ŷuA $g?Dٙ?"e E48R:&l}g (;Q7FD?&'?q>!~e{9i[_7ig??2FJ.Uķ̉Yla3-}.@$5$3;~peZ5jԐN KGBDDDDDd E48RYc ĎH޿0I D(+b BDDDD7FDu*^{0I,"@} 29gJw:oE_H%ķL& @?nMt"DDDDDT|vQD , 1"@1a R DRJFvʖ<;n(ŋIrdʔHؐCC ~*v>Uw >8Xl[z^q*TY v"V?>Yκ,^ppc\qb'tO;4Ԯ9;+`?X7p7N#j/ҩS'-?ϥI&?IJ#;n(@w^b=!@H*` rb DHNJHQ~Svʖ<;n(vR.R7oL6U]1 9NىKG/pķdVy]?4| {v/W؉^z_Ro'~_#.wC-r G ]ŷXwK[ND|On{6x/9cϷm1 D(+[w ҺukX"_fƍvZ9}#@DDDDTqoF "NTb D"a D0) " D({W@qoF]Fw?>.9uZC9HO.m#Svt|=h"uɬ;f/[=%m]iaCrr\Rx%e x?`_RV<@m$ D"^2+.炙X߿UR+[w hZ~kN܁W_}%wC,ߌ" D|e'0a @$Q0a |%(;W@qoF'S/el Qwn]eCʍ)'D؇ZJx珊|資ķl"u؉ː}C粞rg;]/ *_ > ~L->N.Yb'}ˤtT낟/~ ;BN٩[6]I Dм{wN|$֯Jķ /lGIeϲB񃋈(RvQD$C6c "Hg*v){ozo-)'?<_X*$Ï6NhV\EN?t9?OL$myngߓoOxJnz_ʙ)֖Ҿmg۟'d9{]ķn:+2OgI-ߛ\7:S9Αӫ9| r?`cv""""""7fh֬Y^۶mXOJH}B" D0a B?"e E4瞼VZ@NO>Y~ɐ!ŷ[b'|Vib3zpZKZCjoTV'4O;=x^إ[}( |1@|&B n{BuN,vUJg z,!5z6bajf{m3e$QR'URҁ/zB<{.w̺Cl=t̎\DDDDDDߌ"v[M2 X1OYׇh1o'VeG@H<EΎ\DDDDDDߌ"Uo5I&3q9rX^y9Se„[0'%@8av2HvBN(or+9'}WO H\gX)UWolVZ??&[L}(L{u R}Fuq_eZ٫ľN񃋈(RvQDjӿq1)] Db@$>euB<]Ϗ矁HX= DR7;~pEʎ|3hԪU+u;qh\jԸ]|V<g-+rg?:Yl;U(m$eNhڇIwt.g2t~NT??Y:3߯6#/F^&ut̎\DDDDDDߌ"-[k۶-I0 dc @" 񃋈(RvQD#4 ɨHhb[0'02orNt߲!am97_ QfK~u{k2$[/??VV$n'񃋈(RvQDS` +") =NHb J@(uQ7F Dv)X֬Y-3gΔT4ym0'k]+UT2!v|;bktKvicI9Riym04Ŕ5ŷL&qW@eȾ!sYOϝF:J՟dK??M~d B9;~pEʎ|3h0q*|` R2 Dj'ēHX= D+?͎\DDDDDDߌ"i31G?_~ȑ#ŷ[FgT.eC #q|ܡd ];ysrOr5IqRok?i<0p@-l rp__k:MgcOG/ϟ?s9I'tJS]eSv""""""7f` T@d$Fg ?x"D?g>,+_e`nj"A"& HNJHN(Q$"Asjr"oYSu>ӵӵ_k/ه&97vmw ^WoVkvz-}|Gc$IA QeMrn(ڴkF"vA/ ne!/2&97vm!ZkmJڔHmϕj$I9r~(hsCmG& B"$IR?QeMrn(d!R]d$I`< I E,D $IG9?4ɹhHvA$(2&97vm).2H$s0P$ڎM"EIkNWoYjSҠ(2&97vm).RHjWm1j58վA QeMrn(d!R]Ԯڒc$.kp}OI PQBPH!]%H]$T%D9?4ɹhHvȗ?-Q협瓦T[rLN;v̔jkw̵aՎ=.AC@j;6Yj)!4-9d!6վ!?E0r~(hsCmG& B"D9E%H]$e!21}OMU-D9?4ɹhHvb"悜Ƣڒc$.ײվ&b~ PQi.Dvao~S4묳F_;uEX r_ݗ9_;gT;f"[<]ψnQ1S=?o{8[kz-9FR"y ն;=硿Fc&rD֧F[Q jғO?~s'EۨS_ۮwGQGGwDT{씺 (2&97vm).RL\k^Y,De!ҟۮg!/2&97vmw ]v9z_x ~՞cJ15Nw>=7s/˨V[ryS"ޅȦ1?oݣ?^qoto~Y]w/?=9m?e]՞cJ]AC@j;6Yj)&b.ȵkz/YyS"mhYC@j;6蠃jnjR"Ōh՟4L4컗~7z"ofm-%[FT^؛.^~ z/x-nQlNܛys4Do7FYT{l/ b7|¯\M輭΋j^] s/=wT;b}J~V4d_:&ӕFyk9gRT.D>㣶rQ^/D=ttEo[7Q۟]zÃ_ltE+|用^HDxT~~}^21q8.*k[~ QeMrn(d!R]\_~",D,D_ 6D9?4ɹhӸY\~eQ.DZh믋nhVZi\SW]^|sw}}X]e\&z_`ɟYaxoF{ݻWml;⃢sjK)]vž|LT^8?;Z ]pQCO)9^.D\;h~՞c4վ!?x5E㎈jc0r~(hsCmG& B"E?ܴ,DBdڏ{",D` I EBoB!**;ʏj)8k(_^-9v!O_8,*/룱ZY7==sQKǟs=_r!R2kэw>}Cc4վ!?x{F/hzou0r~(hsCmG& B"E?ܴ,DBdڏ{",D` I EB'^WD!QyAۿ[T~TH Gykzo壿sFcw~?*ߌ7<[kKEZ]1z˖9-M3=Qc%ǔ.D](M-VއՎ||3<'*:һcJYB$[xGi͏FZ{wڏ{~.jc0r~(hsCmG& B"E?ܴ,D i?YL~߳#2&97vm7 l5׈oިMտ/D瘺R"E?\ g4ߪEL% l4?E=߷.V8{vvy%/yID?Ѧ'l r_=Q^[gE޾ky[Em\T>~F#~}7*;wGE{oQy\z>z E?b􉯟ՎR;W94Z;D`iZ|f\l?E_9>WEgtM w[~TRW߿M;>~fGvt,G'hID:Wѥ^՞-^zv̔`P$ڎM"E~9 BBd$YL"ӓ4 I EB{VZih9^WGwq{T{+.R̈z\QzY_9kT>{/zof~Q^ Gm/-bl-ʿ?/ǯ\>\7Eoxըѕ_ݶmQ)_/љ;ՎE޳{1(=`4kT.DL(/}Qrp 㯋y(w̵Qyw 7cL.@qM ,*Po~GꤨY.DJ?lt)}aӢsԔj-;q~ 5BPQBPH1#rAn( d!b!2,DHo r~(hsCmGѦquE i;Fy 8ۋr1j58O]J32D9?4ɹhHvBYhR3U;uc5EeMrn(d!R]OY,DAC@j;6YjsBd|d! PQiW,DW"xBd|e! PQi 7B䬳Ό>G>[4sF.믋j1uY!zxK~mtĹG"s/=w4L3nѨ0(r~(hsCmG& ByBddYL_" I EBo{[o.hYf_~SW \+wpI;F[Q.Oui]K+ZlŢ1GeMrn(d!R "B]" I EBd7EM u]7=ԕ~nӟ-zfYv~7oGO}n艕.we[F7o('gn;NT>> |uF/eQrk׊qo-ъ?Z19̖D_Q]5ZtEWUl-FܶKT>~Y"2&97vmZy" e!26YB9?4ɹhfm5\ ox| sL]闗o/GTx]kχv[ Q.2~q7(/v|htIkG?mWn=n;DYӬu)MB0zowo1z'-Qf! PQ) "Bdli0(r~(hsCmGѦqYdE׾ѕW^'G/{ˢ 6 =ԕfQ.nQ^sE,)(]={QӻwzpF-;sT;fJe! Fcjv!ˤ]\myԏz4+gʏ6 `PP$ڎM" B]" I EB$o/Eq /p4GJ\(wmp}\Q|M~_[E{<{}]5Q"+/E3Q"'1F9߈m޾k41,DAC@j;6Y,D& vY"2&97vm7 n> M Z(zի^/+团?+g%Dyi!2:{7|SMF0yt2Qwq_ƺ^|QZXgwlT~|'L\|Q~mԏ,DAC@j;6Y,D& vY"2&97vm7 뿢MO:(T}Wj1u0}+DHlE}9*oE =-G]~W+Y_^\%uS׋yۢݶcTޒ+kZwZQwNG?GQyXBd޺[n)7C忾2*">˾7ߍvmh_}Ǘ9zjLB9?4ɹhH!/L[6  P$ڎMn!rEK/tW2zk^w՞cJ3_FyKvqߴM곳EY_([e[b]Q.TQ> U;=G|qWly7G4?Ey,QB$$=?WѬ5z2hvDl=|,/-7)Z߉jh'2&97vm)Ԗ#v|Ff!b!Џr~(hsCmGѦqyǣw:sѹbd}j5uڒc$.u!ŗ!pg#xݣs& I E,D %H]$YXPQi,Dhf>Fx{Gǎe\>3h'2&97vm)X e!2h'2&97vm k:*˅ƨxY)"?Xt˺_/\ݲы>ݲ&Q^Vڷ/O|=z G{hoF_~o.%^F~wǨ|G)d=_]t}1E~e˨|Xuߢ%;h'2&97vm)X e!2h'2&97vm7 |SwѺo׿>ksM])"O]cz]F}Q^NGϏh{h"õ_>OFǗ\qhO~6*?>Zjբ] :ۣڱ vr~(hsCmG& H,DNeMrn(4n"-BFoyD瘺R.DzuQ.>=?{}& 㿾(D|_.GD/ѯ/9z/B?Ez4lGy˲|$*VkQ[~M=/.6;=NeMrn(d!RBdtNeMrn(4"õF1SWB[DuG>rاWZ Qy|ã|1S7,~)-E:ۋh'2&97vm)XXLOC@j;6{|Iw~QBSOj1u^/Dv:h6Ԭ/{^'/~tQ)k%ѷUt9:KBdÏrooe,ʹhйjC4B/Rs8_Ƣj;6Y6HZr{D:^#뜗K|rn(d!t.燲# j.$yT/Η(ڎM"@r~( =ҠŠ\@*˯SK|qEzvm:CYmr^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yQ2H~y9Rmr |mzvm:CYmr^弜Y/-_ڎM"@r~( ='?}oFLi}-*վ-ϋomo?ƺ}/ õGs^弜!Dc{x[Ծ)Ko׳hйjCO?f!.vqeo?: r(Ys |mzvm:CYmǚ"/Xty3:3gq"mM!]~iꫢcƢa«y9r,D9_r|=k;6Y6c"2}*y]K.(*g Zzy>Pe>mGvj7->0|:g'h׈_xh|&ϑ=2u]\nM4BDz~SjχW^ qDFNFzzaOY=?Ï<" <1?yO'Ey9rڞcu^tPO'S:?D8_uΗޖgmG& s9?Ն~BBdJ W+]BRf!b!,D9_r|=k;6Y6co|["K|[2:3֒DB$/<.+ӹї64ʏu-|}tgsN?5mݣ|^=\0#wQG.rdz\ڶGr!rG9:(A?׿MbGя(>m_r(64ʏ׿Y,"-ϋŕ[nG ,XtyJ.$ Õ*zem),y\^-;3<{Ey{sQ^ y9m-D/?-C?(ʏᅢ#-6vXɚn4Ǘ_G.BMZsVZu(ަ[fEnկkSy^.. uםmM:@Ƣa3N~Y'//NwMir^弜˱>/Fzˬ8|mzH:CYm,D,Dnz_צ]\e.$,D,DH=PΗޖg;4 s9?Ն~lp  $g<.-ާ~jZo('|l|zhϟʿzE"E'|bZ#r!ϷEymB"oar!3_|"v:_" HwyGT{Lc]/D??ϣ<6 ~Xw\{:(4ϊ ?rrƙGy˫sΊ/|mzvm:CYm,D,DuO |-ϋŕ^ᄄ_. eXW^x=~WT>OEC6sAx>я3#p<#{ͧ;(/>5sC9/mH6Vᓏ~?ytNqtYQB\eץmy^.2W""r,D9_r|=k;6Y6HmSEϷ@tιgGsy^tӍnj~y9rڦܮjJȅGA2YQB\eGjB4|ˡf!R2YQB\eGvu(oU[i孫r!_j5o|P?j˼u)"$;Sc)P~|=k;6Y6H`)P~^Z2Ԡ/._ڎM"@r~( =RvYgFeh%_dS/Q{Cy^p꧜C yyAG{WN:ĨA2Ԡ/._ڎM"@r~( =Rf!2~y9Ԡ"r 5K׳hйjC4yq t~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yqu t~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yqWt~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yqW t~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yqUWt~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yq啗t~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4yq t~>r^T/C9_z[E,DPVzA-ϋŕASˡje(Ko׳hйjC4t%\]~%\P{s^:/>e,ʹhйjC4B/Rs8_Ƣj;6Y6Hgwџ|^teL̯ΈjuK|qE97vm:CYm5\HRt^}XsCmG& s9?ՆiP :"gG{ >uQH:Rs8_Ƣj;6Y6H I.$r^:/>e,ʹhйjC$I$M)ڎM"@r~( =$I4j;6Y6Oq$I_PQB\eg$.rJRj?S$IRsCmG& s9?ՆT)ImL$IU E,DPVzFR"$3E$W97vm:C]䔤6~Hhй^V)ImL$IYmG& sS3S$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQiM6,DP$ڎM+Rd!t&2&97vm:C@j;6Y I E\sBLeMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?Q7~G]_Đ_!:҃IG۝]@rn(d!t.2,D,DK E,DPFȴ.M)z碱vDу}097vm:Cu"ӲL97vm:CYiCff)[FGnN=h褛NƊȴQ'}2Zգ9v#z_}Dm}OD{@7rn(d!t.燲 iY j;6Ycf'nr!rDw=~WͲ,)7Ȑk׹3n=#&'l'&=:)  g?-|ȹhйڲL"-ڎM"@r~(kk o:9Cj{Q*">pmTZu%gɨ3ODya ! _Ǟ~,*Sͽ+w~e+D?pTZ者}.'*m}-n{h_,ͶlQ.V;jh߯.+%_Tpι(aϿ|k?*G / j;6Y-    `PQB\emt!Fy/;8 WwE;DBdߋyΨ?x(oD=Q.VJyݏ^]~ѽ7ZGw!X9K޲;<i"b8w?qw~y~TE?pnx('z> j[U} 0PQB\emYXXX'ڎM"@r~(kk \4U.D%ۼ$[5=Q>>/K$jZotDMB$.">B\\ߧ~[}+:ã{Gcir7E۝]o'MN] E,DP֖ȴ,DB`|˹hйBdY* m]w~Qy[[3G)/t}Q/O?tFK|'}2*ŏB|S\85[fuMn~hr!2~E"DsCmG& s9?e!2- : %ڎM"@r~(kk ezǬ-?KF鑧y ʅH-h]Jy~Qi6ʅHyˬUT[f7uoǿzWG\r@TI'<Mc%T}(T}Sy`˹hйڲH]o!01PQiU>Y.DVI3o?s孩hswRB$oŔuY?+G93-=cQ)z.VJ Wwet_BnVyO=囹=*")_G>IgF8{4 ׎r!0{hң([e5-LJjhv.Y#?tI / j;6mᆑЙڲO Erb!t&燲FI'xB4Fs4G0 T.D|".xQ\困/+"*Bd?lhbWD+R&=z[ϓ ||H>~_,ͶlQ.JV?jh6Z.2޸^x.jog.prt+R.^˫ȹhйڲ<97vm:C*GpQWV<|K'mm{ʹhй,DB E,DPF:C/;8[s{ќ;]uUj;6Yd!@?ȹhйO{tgD{G?&j;6YsCmG& s9?5~13<]{L3tM0PQB\eM,Dj;6Yj}򓟌J0PQB\eM,Dj;6YFzի5VS?L<97vm:CYBH97vm:CYO^җF{oT:S%wG E,DPVR E,DPVZwu%\2j /D}k=3&j;6Yd!j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2oh+D]zP46?ihV E,DPFH`ɹhйʨEw_i7EϽ\4yջ:zFt#ڎM"@r~(BdZ")ڎM"@r~(+-{ȲL4w~[o>}oE_}xy(w"Mnzh `˹hйڲH]~[o97vm:CY[#],?KEy῭ ./79Oyk||(/;*]|Q>矎h"siOF|^o &cuˬTmQ.DoBrn(d!t.燲,De!Rg!0PQB\emt!2ַZ%g(=#Q>>oA5Rieֱ͵\Q)?=*mtFQ)o*uuˬY~MWKJ_9i<<|䛜i䛪u^/囪oyʖ5\Lt97vm:CY["Ӳ-D&j;6Y.DVI3o?s孩hswRB$oŔuY?+G93-=cQ)z.VJ Wwet_BnVyO=囹=*")_G>IgF8{4 ׎r!0{hң([e5-LJjhv.Y#?tI / j;6Y-    `PQB\emt!Nh}i?`t]D\dE\$:&xM)7a_EWwETʅ"7~NV:bMo{('8:~/GM |X"m٢\~цm\dq7FϽ\$ >?\"Vd\jWEyK+sCmG& s9?e!b!b!0xrn(d!t.2ƧU\%xсOYЅj;6YtD_vp:s9w3" 97vm:CB~sCmG& s9?џyh߭{#ʅ j;6JЙO"j;6mᆑЙPQB\eMrn(d!t.2&97vm:C@j;6Y I EV^yBLeMrn(:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(/9:C@j;6BLeMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vm:C@j;6Y I E,DP$ڎM"@r~(hsCmG& s9?4ɹhйPQB\eMrn(d!t.2&97vmBD$I$I$i(d!"I$I$IMEz$I$I$I,D$I$I$I҄BD$I$I$M,D$I$I$I҄BD$I$I$M,D$I$I$I҄BD$I$I$M,D$I$I$I҄BD$I$I$M,D$I$I$I҄o؅ '+I$I$I4*"$I$I$iU;]s_rIQ'RuםCH$Ih1I$IB! I$Ic]I$Im.Dr!?=cѳ>;˯3/(*_I$Im"$IRwY4d!"I$i$Ik؅H:(o!5Q˯u(_I$Im"$IRwY4d!"I$i$Ik؅H>( *$Iz9C$I. a$I4V3$Ij:$I^gΐ$IBeI$IΜ!I$uH&ʠV/~|+G7QI$meΘM IBeeP}?BD$Il3: I$B䮻&ʠ2/$IzD3k I$BeeP}?BD$Il5s$Id!Ҳ^ *g}V7G-pVOo-=h.hM6Rт$Zg SO;-*W_Egt"$Ie3r7ՎZkGzvxBD$Imih2  I$)a!2m"$IjSȝw1ЍvPɎ=h/mNQ.nmRÏ<"=hY[|#ꫢ[n%:u7(*o!"IAjs1ꦛnjNK.?џ;/;$IR,DZ6A%$IF;gXL$Id!Ҳ*sw'>}{뭣ڱmZw/E|Q4?c(?A뮍Վ_ey:r>孽D&\h rS>^$Iw}WǗ~~Q)mNWb>W;jkͿȢѡ 7ga٪k-bV|tGn(|?G⊨|7ʯgޅ<I$YlJžZԳ$I 69Bda!"ITw>ЍvP 3όjǎѯ=4*>T&g|SzoNة-F:"w[ p/sLA+"\{MWELwJ$Ie3GVʏk. {Hߏ_cigt?rJC=hE(?&FlQtEyK\矷H F(\X$IzHF;XXH$IMv,DBD$I[~mQcS^5ANE|r+ڱ(SU~uWGz(??򨣢]5W\yE_rI4cQ9hE9`#I$559#zS<(?~G([Nx=ϦwhydzO~sQϛ>*!_x \?G8oE$I& vP$I휑Y U^~<$IZ/Dr1Vj|E^zITweFvXt 7DqnJouGFb/*_\#*? 2G[{Ϸ@){v!2N[}M[rhϭ,#I$559lÍ7淢/nm~̔wߏDdz^}1GQywiE1\G('7 ʹg?Ey$I& vPBD$Iv($I^Zng?'tMW\]sn(sojG/ܗ'TԵ]y~o^O~**VBԴסWoӯ~-Zyգ|Sc%Ie\|3;/C~x/^;Ql!?&ϜSʏVny?k$Ig!Ҳ* ȴYH$I"<,D$Ia In=hMm6zG;s-vm78o^4ʱ} E;kt%GQ"GD_qyCo~gDK~|٨iPhEǛ7S_K_N<(7<@qI'F<|yhv(?ޫ!?UX3 <Oʁ/-(?~uES$IRY^'&<].Ze5^^]~\~<ǗsF?_"éDXD ,XtE/hM#.(縳9;?|dy!X0r+^IBe;XXH$Ie;gd""$I \`믿>gDo^_ͣ64Ɩ[D뮷n>7[';ыz5uY_8_?ѧWX1{GE>;q[6mQӠ0/}|9h֏\#Q'恿0}rm}tOD~7|p_.r>FW]}UT'I$MWsF[ڱS߫87\lNQ8oQ?裣|ܝw|O}:/?U$IzHz5XXH$IYBBD$IM46Z|?Z{|e_^iY9  Ds{徣B+/ugG7XtF7X`zg#ϸ% __w}ІGGϿ0-Yx2cx[_zc޶o?=ʛV)QS /+3uw?djs~OW=sEsΤi!>(C}3o޻Q>nzs'}Q~sC|Ds{Be:X e!2crzw^\qe]yAG>?tѼtϳoweQyWDyx-:h囏dc`E?IѳϽ}'Ey~חG.D>4dJ)/\ۢ߭_\\#|H.Lλ(\CO)?{=Q)nρ)&3N=hf}-oyK뮻FBe:X e!2c7 - ' V;V^yq!r)'E\rQtT\raC^Ps~>-stݏGPDou] ]pQs&EǪ\ؤ/GGy \Xu |ž|LTr!/\Z'F'^ \L~D4m:zvOGDY|"`y=h|^{n ,Dq"3& Bdb}ǰ 3:};sNpbh/j˓,ou7GGukSn{].`MkL].r7C?~۫.;94n$˅ӆ?8'*t?F?TT~|Fo> M7_O|S?5n0 g9:{,+v!w=iF)~:z^>*" YԳYL,"c!@c؅=孳Z~uCE}(<}t㢼@Վ<2zſEkxFUV)Q.X"(+ykr|TM-NvBT3nj1uHt Fczم?)"-,=\O?|T.Dcx[twD~轓_)YQ-:{sTF7XTI;DxFjĐ_?ZtE{.[B! ",D,DF#BB?a"ӟ΍?}~g'tu=o޴n薻jnje]Dic㿳Q."|.@qM ,*Po~GꤨةB$/-?P9fS:;1l~{\}QrQB$O~OFy+: Q>W?=gktGZ(0ьRK-mѠַwGB! !"""cBdb/c؅H:f{]^`/jǨZbl}jT;VD3"3Y\ U^iBDm6Bdt,Dh?B$A{|sA)|ji/nzo.zjǪgG_@T;F `9c:g9/=W_.Do~3>=CwX Q"01^YXrz!R[ R:.Ҏn4ovԍV}KM ';s}.hxꩧmj}cViYBDyM㝅?B`("-סvVR7L49gW;,DZC"nhrzwX̠9N'V,Dɠ9NXwu+}hib`?՞c,3fS;6pBd *@3` kLꫢ^7zg{7:}SѤIF *#}-Zgu"2g)"-3 ϜcwBd֎xᇣ6$:񤓢c,3 Ϝcg]ǔ,DZfP 9Ի)Blw͡Q혱׃5\-sۿ[}/,D`x3wX׃9}Ǹ[l卣SO;-3jPywl=c_b ^ *"-ՠb!^U;~!rD?wtt]wFS?X6ܠ׿>Zve&~{4L3EzkT׿-RQ[|#-B*5-DxhM67Ds5W/~1EQ~{(L|5X#*=?(^mO?XnFwXlABBFj9rz! ʫiEKѕW]墢\cph"&MJyӛQ..ySNfei! _<>F뮻nr!{衇|q{o4묳FVZih%믿>ʅvmXnFwXlABBFj9r7 {;zg)oYX{seTMwEmQ7U[H?s/(:7tSTDyG|W\.h9R.D\p(߳Fo~stG?|,:쳣{`,jra!Ҳ^ *""z5gW蛅HA+]t(ߔ=yOV[EK/4ʅƓO>6x\b"߶<ʅb- 6 y督ַ5袋rs7G>:zk^X?BeT,D]x Q )"-zPdM|~o> 2+|=h"^xaY $Be]*"u"L$]0H}HˆT^G.l4?:묳|_QBsύ? Gkfi:QI+r|&[|sc9&Zo4 +FK/tt}E7xcoz_I;ps0zBe *""0R ;cnPB䮻Zj(y|SG-\4&M-ld֊"&mY7)z+_qWDW ?[կ^FniSOE03+"-nPnFw}!&*_kK^vE|[n%=X֯<%/8:gDP;,DZ֯_:gDP;Y6$eW\yEF}ku{]GO?tTZh*_h%~R;,DZ6A`8#3,De!?R;n!R{#)}dech pF:g hU;sGuD3]>):3<=M_ch pF;gv1ҫEHZwuUW]5j+eg{}oTʅȶnrhV,BeTvΰb!?R;n!o^;fF6Am#^Z*z뭣(o%u7G>:zk^r!>D_~R;,DZ6 @iz  W^)ꛅH է.j1MP՜ѴEH[fuCXs5" aՠz5gXXYu]o^+=X֫A zE1=V?hg~_F?xtWGKTZq^:o1z׻5iZ0>e0hz=gXLB`[o/Dn׃ D3N8hyc`tD?ZiWk_hM7zꩨd!?R;6xBd&ʠ2gXЏ}"뭷ߦTזDTaS;,DZfPz͜cw}!kmJ zmI0HT^3g)& k ; OEڒ`2fS;~, A5sr1"7|Sתv)=XfPz͜cwX̠9NhIcR.~ih͢+2?衇se0v}H *@3`ij79vLT^3g)"-3fS;,DZfPz͜cwX̠9NiA5sra!2 k ;Be0v}H *@3` In\}9Q.3fS;,DZfPz͜cwBΨvLT^3g)"-3fS;v!oT^3g)"-3fS;,DZfPz͜эVX!:蠃nhy^җF-X47L3M#< gz??~3^iA5sF7,Dڙޅ>~S;,DZfPz͜1.MozSsEm:цnwv1,D T^3g- ѱ ?徣B[o *@3kmH3<ёG ѱrJ׿>` T^3g- ѱ `!}G.D7UV_#ec_k3ΈFmo{[tFBdt,DS+"-3f"Bdt,DS+}3ъ~! cȠ9cl\wuQ^Ǣ,BQ>[,*=?(2{71Q~^2,5 ^} \|.(?\Ʌ>zh嗏J`jBecBBdzѲYw>4?lvj1T^3g_|1k38# eVӛOD3G*"ox+2E]4` ~?.Dze](]}H *@3Ɩ|#e!b!@)"7|Әorw]wFomR< *@3E]o{.DhV^WG}kM74_.DR^ :kE]PN|_k63^#ηb~wW"ѾH >[_>pG-\T;,DZfPz͜1,D,D)  O蛅HgPz͜эՁ4yg0> T^3gtBhB`b)"&惚A5s@9ꨣ\0`|+"-3f/"KiA5sra!2 k ; ϭM< A: k ;Be0v}G-DN9h׈_xS˯sQcA; <_E#va(.r:ȡ8:\/2r} *\# %2! rӓdMUueVgg/SYݕUٙLgIU8C$I@GT$I$U I$"?P-?l6}pŕ? ½9Pur"I$gH$IUwn@ *$Iq$IT_yo~sx炤f RM++; Dz@E$IR9hv"e "iTʊ"=V\zY8#B2Ɂ$IscpaOhΜ93via؝|a޼yn馰׿|g8餓?ܚ6R*; Dz@E$IR9hv"e 2XSxӲTWyGk^V \tQ}7#9&=FH$I:_ kV[dkaX'Ī>yg""d8y |ʁȽv{XoÍvv 7|K(޿nT$I$Uf|W*z'N;\s𖷼%̙3'_ u7Q'4=7o f CDk_P׿5q]zWx>0W,/rk[Wmo?mO"=r"I$g43e Ro>!> DV߰׿Ԗ>" c=? =?['*$IqF3CH굣:*\q{xûpYgkvXhQo-{V>o9*?y䑡xBs >3:wPUWU5;5ٷֶ?9P$ITu32O#^  D DTw<y4*$IqF3@[ay闪j <']}MozSC=^y"U{6x /V]ln?mO"=r"I$g43e ROu,ɦ7YQֿ⼣Ȳ)́$Is̊χn]s5au oxB>N1cF<'<.Oh[pa!/өP=Dtɛy" uC^Tuɤ&aNMd?URYy*QrlH08P$ITu32|uB<_eC1aNLLUieyGSI$IU8 ^W޼K^dmmo K, B~(>F:SC;ϰkۏVm?Uwȁ$Is D@dvB|X53욶 D[@DZYq@dٲ_p#f}jP̨MH(NT$I$Uf`= Ss o}[^'p:yBC y{kvqCy晰kdа?.KUWTcUjO"n ҙI$IU8rB|E"뻨m?f R/D!Ou?an. ed"Ta ַB2c}x''>/n([f!=P ÷|yb}82{0襇~}?lx|w7|!od=.~bz>3֠ͷv{u l'wwi({Q'rJ8C/τr6bȍ7{o6>#y~`U~ѡxmohe Rbn{gyyw?د +ra3wx`Ys}î .meu?qX_ D@*md L"۠A_d *` ҟDR> ??x8f侣!wsa_#>=OH\/l9s+ӟ ?; B.(lBq{;y@!SN_;9 xzm.P|R<#!?qǺ!Eۇtq׿>Ǟ!},\wȎۈ.[AtR@dПw˯u 7g? 8gnl# 6n_ƻ}(>NsP+A.?zz`r@AqYa6ۅM7s(~ς{B7,]4+܏Kޔ-ۋAs9ƻ9zl3Ͱ_4*Sٲc}V\_:w^//ɑqGmG}]ԵIsGݨ|tP +OΐrJ̞{`o~dO=̐TxP)?ow auEN }!{r;˯"Πۗn~%}Խ\֠כv"_y?ODe>篓_ː3l^ r='ozt1Sq}|)[v|?tdV~HwKf<uZۇ^uQv$zHe˗u+@Re?2-3ְ^M6ic RÜ;"""?P5)g D2i&A D D D&@dDc"\~7c{T ~P\S<_?Oty7$r7CCoFvFm+ y{Ur?/Ų;d}F@ ⥬R~b.?!9=wr`pAͨ=z}sCZx! pC\'u DyAM]zP={y'x<s!+#Qn_>WǺS~o 븡?,B>)7wc#q?<-ۋrf :Ultv!=;F^oUۇ^E]ۑlY㩲eX{/y驲eGm:}F负Iw eˎ5_0sU'Ng(;S7*(?t.Ol?oǮBw{ȝt)!=]gT>Oyɳ)9 9 #O(=v>ۋ_BY,/u{t?zyT2c%оsBy.uxޟqd(nݾuIǺS~oUu0gg~dEy<[/[]y{Uo&ʠA)JP\ΐW}ui}]TAFoveҰ.I}C2e@_g;UJUl DVe w4u0z`rM]kU7Lvm?@\q}x^E@&OsO'M,;;yp!]ORL|󼐿^K%凎㜹!wпN 嫒;heˌvɬN,Kfz+.Y!/!rU};A/#=6-3*7M7ˍ{Aw xO}zЯ 0wRڠ'-?5-;*H{r!Ol-ۋagNA?Fu}_刼=gЁHQGK^W}uiz}]T9&[NwP\9`.ޞ BqN>ŃB/뗁Ȁ;"+ 6Y}}&H D DsϣȪ DVe 0u DjSaK&{>_Xpa凕j~oƐ/zvspsB^+;k;? x-C~__\>1򜌚vl8~sn?_9e!SO?ˍw=ZL.M^t{b d~x^ҨuKZxs8]L~= :Eyɨ||!KyKSB\7o^|]%~OyA7m;bH}ۇ_ޝra3C^y5ϣ=F=@eWo D*a6@@dTUn_@@c L""ܿ@d탁D0Ϥˆm.'OW y.wtP|<^wO v.[r /1uat3O\矌~fϽBq#9&ĕ-:Qy7O|ǑGu| c~xZ^ a3O-;Vs}>uяmu__#o7Ȍowc}N}jЯo%qX%G7y=Uڠכv"_MrCaAȲeKC6c @x@^1hYf</ DӬIENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqide-compacted.png000066400000000000000000000032731417001151100230670ustar00rootroot00000000000000PNG  IHDR[EdEsRGBgAMA a pHYsePIDATx^ܿjZq=N[p7(Y`_s toYB'O t)Nswcɖ%5p,ɒe*9Կ_@l`6Y/j>QHϟTڿ|} 8Yrl3TDZ}Rto20\Nicpb=YOʉ]p5J^jϗeеNcFjg,b `]Փgbdgl |2*#'ilM,c3:"2bɃ.غ:Ŕef%6i(O5:fAUe'\W{ sXDXvٟ䛽c{h?Ē @ԐMf:|^'[ߟZ~ʷϐȓ eĶ#@<PFl;"O*==ߞ(yԼO߫ޏ7QckԽ~F9&[mTc+Cxtco@acI}яml?Ϸm{u8S|[] ylZ},d Kb|Vg=2¶fk/Ʒ[{!c9ȓ ed R6ٟud871#o}V^?~\˕Aw 2k:&3u_Rp&f,@3L.'3$@αM'yRd>C"O*ێȓ eEl!0b  Oɻ b (-'jj5qCD*&*5nnm$b Gge'  ls{]egl׸~60y5}C<^߻kX'wy7DžOJAcM1?y]]}u.[?-+fZl}Y^;6pk^˫{Ϋu'X=aNXWtXW.? p}ƅz3y7_ 8qe[w.Ah|J_}?c W!B嗬DkVe;6an<6w8gcmdN.y:նhU421P]68/}+xMMqrg)0nwr8F2ZA3gi< "k Ҷ*7y7Bkq\9W1~GoS,G/xeWzP {+cS @,0Xa Ǥ'q߈0NV]Xݴuk~Ֆ ,izʡ'NJ*.zo'18IP TUܕ|X' KFxW\nPV)Rwa8=:lx${{c0jI c±m~}kSr@$~JQycn~Efi@[,GVŁ0X:^ _aRS,0Xa `:av>_(A2n?FX>q2{ȮH&q'J1d*cbqg|إha 0٤6Q1d`:a\6W<_(]&@,0Xa `1cazxM']><6W{4rg~mW~lu8k#e2O<^ggƹҭm*TUhͶM`U<>>Ц8HVUkghU<2|xҶa6Ya uw>eRkTƩ R<15a `14x34ƿd7^ᚴ{nF~kqyr{3B@zuw}W7v 6ޭ*߸!|8T(W2](_{m}{yʗ 0!<]x6<6o}!qL@!x7&tufMH7:p &Jn|>U7J )8~fA8 Q | [|Zìj\f'y^7?'R,@SPmx߿d5Lw aq|tTE:e}CzoՓP%{0cu\!KݗJpz =#房W6!l0\O\uo&}iw =oݛ ַe*Of=` J)SOp*a! #U7 C 00BX C 00 v 1V1Ɇ!TʰIa;Cc}l[+9{ns ע+~X6 !YƎ!\<~EnϷqX6 )wO|z2B;Gsba`s tr!4aӔM[FBxsJdsnBuQ(ҍ Cs;L˛}\*9 CB8gJ6 JX C 00("`D0 3uq\v(0uw~6fַ|"\WjոF//BiTU岦k__/|BIje6p m߱& 2!k7ǎANFwdh*pzV ήUXI3`RըBoMUp7@jGԒJ+Q _㒇kq3mJ090100r`b>a`"|Dȁ)B=3?b-lvfX~,jtyw8W?˽n'&c@`‘p396–+B8諍rouG};`^uDV ->.ĺ"I,Z޴}|T~ ʘ߶cgm۴{߾aP1Zڜ_ sN2u/>fiÆ_'޵ۧ_Vq}!\3>ڨr`b>oٮ~n`w.b~d?zSUW!W8MFfAZxnsVhu <}f_|_.OMTI>hpیuOLG®CNZUj%;q/OS\}*h}yW;~C„u5קd'kk=>j?901np#;oP?_0Bi:[~tZ[V0RJkJm}(iqo3x3\QɁHNGyw۩]P0NUNWϏ:L7̅귤ukuo%s~Ԇ;Ӿ!\>u:KPb=+k3p|?O?%/:MNGTi?AѠɁhqcNp?mϛA.밾gDXJi%hv R{rb3*nBFP>B2D4_j̮m틬y ֬t?mcne箖a:ͬ(@a95c8&LDp(ȁ Oyܱ |Dȁ 0`X00"BFDa! fƑIENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqide-removed.png000066400000000000000000000101331417001151100225620ustar00rootroot00000000000000PNG  IHDRiL*sRGBgAMA a pHYseIDATx^OoH~9hQ;Kf,-p͈A@mhC"vo0yVjS:v]=|W$?d$$-ӓEvNv}k]1qjGglv%?O?6H;ӈWtf~낃9}yDtWʪS;MCޛ+rAi67]֝(E8T:*7>*5M8()cԠbr+Ϊlg_BDZ1]bBrVkź ۠=ASŠ;.eNaA//+#S4# u}Gg*S82L."4jQtQNjD֣G;LRUA޺To][6(ꎠA T&Ha$6x40I.GF a b}3;[w ZWtkB ftՖTeԭuE@"l44nz#{vwF EJH~loneS7Etz\<}璙Ƌ솚Vpw9VC۟`X|;~Z=tHeV̿Xoe|t6t3vU{Tt>&3޺lھO[6Agow~aG4`sF6Fi~ G 3^|.N6GPѥL4`sAW@; TttmLuѝHANstA9SpA1!c+.Rci*]Iw 񠳿~K\KݔumԿ̸ּgV]Rw;zan]K]ppUm{wSh ԹVp20`,ѥ?Z@:ڿ00L]mҼ?ul l.e~ Zզ-.ltm?^pNA'jO//V:dS(v6@:)tt@: E i iD]1: 녠+Id]ߚE6 ~D'OʹDHϻ;qZWеVs-ݟewq:7UWct] -OU:Ƀ(:ZmLq:ijي+$EGPV!z=iIЭ@bu!.W A 9J咭jkyQVA =j񂨏[?^:I#[Y iy٧"`x׿t(MF\V)t jС4!vrX : g_f"4&]lwk@5;l{StEvfײWX%?gA#ӆ|[<3JSݯ'j8o/Y}u;;=7^dTt~?WW;bn}?/&Pkj)Aw$ OM-3M~y ٖ]^ ̋ٽ'nR))CnsuӴ]̖ 7=e{mWnڬ2hh.@rʽ]Y/m}\\T?4}kkz>FvmvzbEfl<ϾӞguqN+O٫k<~ S,ߓhd0+cQgOy18~{;ߖY ev.j_?+zFp?]7|}Sm>+󣧦ooޓޯ_h ?{{ѳko` ?oȠ]rA'oM˧׃lʊnτPYJA{Mm݌_Č>cm+vU2\pʉmN 2U1m|`U[g.~yk􆁮rA'শ^:O;eEݾ)Iݾ|kՐ 2lA# 3?y~tPBtJ[[p5 Cm@7'w>_eQ|0+ {.2m:v`!aîׂҨ-ދ%oC/x?JS_J/UՖL(Iev]P!IE2QEӾ}?3?[m~cNn[i4+8>d(rfxpEձ^U퐊.а|h I@l_.TU 3[y~ti:t(Ma[?/vq 1?05n]#iGfJk`Vbry vuAw[UēWW0>PG # ms|,ߥX,The@lu~wIkˬEorr?JSnxPD Qb,k o=3><kz;d}m[ưִ.7,l_#ȡ}g 7B76|f&̤j ԰\=[7K7vI)A"};HrdjA~=lx5L|xXA ~Gwȃppo{>훜 .of("F-/}#g_ b߶hCO֘1UUR b'Rcjӡ4.%GR L 4t(MF\V)t jС4"?\}RA}RA}RA}RAK3eW_l6%:,zώ#Tr>|~ 6}ﳫSw>D;1"FOƵr|AƳ͚ݔrl؍tV~tT}v ;59UdNQ_JV}]':P+`|@L w-߭h~vz |!,f}m]׿~_yfr= or궴IyyIgyJؠrMkؿƠk9?h;I `\eй;;[w -b0QӔ@of>5_M[hܠN}b۩nXB zJàu_OMPcN>s~th;I `\E' uAch>NyR|Yo6zw ie5Gt-R4Tm}~h;I `\Z / 2u ]˷~ͅמWcikUtM`uEM܃.ޟmڎ?}RW֥~[;NH;p䃱llV&]nԄ`s6`Jny{]cT/^{PLv3wݺV ߴ1U[v:F1Tޗ5L{ŠD@Em(> 5d0_.jǭ+= /M06;t/6}^kwIx}dCӮ*W}#˗vֻc[Tcǰ{Ce⺎#uRWtH jpZ'%qti9\<}RA}RA}RWt$$$$$$$$$5oKIENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqtop-compacted.png000066400000000000000000000066021417001151100231270ustar00rootroot00000000000000PNG  IHDREzsRGBgAMA a pHYse IDATx^=J`x jdbWdB$TM84 !qgKkG1Dt7`;*M}jLU=N5U!WQMsM9^®NY3t r]W 4}7juɠi/gSm޶}[YusvF֋SOm'b±2Ձδ)Fv!۶o! moOz':F#3e%!hV*Q<T2ڠ-mM=iYZfvolP h]NnO􅁦o 3.ٺ}-hΜqe64ڦO k>I  b{i/e /q Ppa2~զjPon\fsmzS,^U{j,L6㙖&e}MgW!{;;HDyE",wPi( A",uPG",8<,L7S^3T#+~ԦjڔQ`1L2|6(Kl aѱpiÐ؀jIݛѺQ`1Pֶ%Xhzvnk/ݘk+%who~ hV數w^ǟ*u/bJPIl//brE7XLC Q`D'X$<" (O4}lcۣ,Q`1HώlcۭPFn(OHDyE",Q`D'XLBD~~e/ί3?~^Ftc(?4{n!ƾ\[$>6:4b=X /u؁1ДbGfoҮKͯ?vO߰?_Kh( tҘ2ׯ#Oe]>C?6RsmwqujSbX)Pp^"͞Ob D; wᆴ#m@K?HUh C{@X87ZRL+YqhvzX6z˹vAt-Zvu''~`1vb"߻`~lgt7K:Q؇7ÝUj( ! h?32_gIyokf'~`1vbk:r!vf@kaߣ =ܱ,˯k]fK~lWo|l'&™ tz/~u} =5^/Cē'ůIesE[:zSl!v-|/}G]{)R]b{.>^Hʷh@qЁ ,RB,'1b:p"C,Q`D'X$<" (OHDyE",Q`D'X$<" (OHDy p*PRƥ1LS(i8 S5GȚ0N-{@oq/ `M@2u>kd6mEmN]wjY&lS SҶO!}KXS7Nq' ׯ}ikr.> 9xmS.w ~LZA!eT<2 ;nr nv28`ꖷۗ{C8\*:IzQu'hӻ?ܞ]н= 6/aV\[ضŃZvP069 ̹^ڿOy莯^%iHܴ.h@J%=qA{/I:l_$]{ ۓ}R- }=l åݖ\m0tgAgXG}8p^{\wr6B]e߽?KE_rc AֽR u M1kmS 6}H\ozzdo _.CŌ_`&OHDyE",Q`;cfߥ&ʊhK _;Н!ݺhoVHD mCȲ]L tWop? ]޶e} ih-7?.{cq?O=DN*QQGQemr: 4=cCZ @rr;/ 4=cmg?hΜqe64ڦO k>I  b{i/e$>~GE@맮 u:86X]qhzx3E2Q<צ|jڔ:WDi6 2?*S,% Kmm7yY=Q<ɓJXv ӆچ@ۖmŵ843{_ڣ bZNð3tL$?2UBCmفǰ{.Ѝ][/C{ +Ŵ q Tk/_O::WDi~PCQH k sEt=ZsH\],\h?Gb ~+E",Q`D'X$'`1 <:D,G@Ŵ*I2~LBJ{ޫb_[ٿ?ivPI}믧݅nI$h km DŴėOq_V?$`nݯϪu[7{>'`1mc[f-elZfhiz+2i <nzv?ih>'`1 <:D,G@E",Q`D'X$<" (OHDyE",Q`DO吖tIENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqtop-multigoal.png000066400000000000000000000107711417001151100231670ustar00rootroot00000000000000PNG  IHDRQohssRGBgAMA a pHYseIDATx^ەaS ̛C C&n- *@-'lEt1NS!1}dc<74<S}vP@Pu]71t3 AFUI;WcX3,h. aDa BxhL[<ڋ˲|򰞻2zgN&w5W뤐zQO#y:.u;ڌaɤ.>ucdKzmd ]xrg,E"lE"[d-Y`,E"lE"[d-Y`,E"lE"[dqWXuZ`%\7iLצ,EwYz7cӾz"[d-Y`,E",SAK} _}{ M|7P*h?{q'uo<1@FʏuT*Yܷ>F;6*p,{W> |6YdPoxJ}"vO.BݥP>T; Y`,E"lE"[d-Y`,E"lE"[d-x \3izC{yұwbwH~^{&YjvPFi׻A{~L1i.}~G|aLXwVW5 :D~ӷx9^Bo{/^7:{jYlymؾ0oMĕѡ:O/8)3ヒ:2|^<qmqJjAt9Lބڏh{D D,mA_f ޴OϣTκt|Ui7kbkUZ_cN@ߞCw9]; ˒ߌim^?j/b#O >_O5I%/S6#4嵐F̴K IF1]Z~w-_t1.Ϻ,oӻ/W_ky3 |6ucoy7Տ/k#O "t𚩣_jkIHLk@uRk)'B;e 3ׇG>i݇ަsAǚ,~ϪNԳot0AFP!Y`,E"oFɦgm#gh37ϼם80-lKTIw3~3QDp?R#.|HSoԺm`N+szd\3c/U;qZ7Oa+Ӛo}['>94xjoV5?aN7Q#:T6FmKbzPcj\o[l֫uq7,zd73d-Y`,E"lE"[d-Y`,E"lE"[d-Y`,E"Ū+ẹLcڿ6Me d-һӘTF@"lE"[d-Y`,E[>:1[s7qY<Z>x0NGءMP d@0R~\ Acq8mT*Y<W |2Y<\Pw$E"[d-Y`,E"lE"[d-Y`,E"lE"[d-x \3^t의RI|vPp-8]?Nc 8L,3xM]MqfV%v÷\sZyZOØϻÈVN};CuPeс:J>wG\e9֗3fYy>o xvdtU'> j g?N+i9L1|eZ3c<Դ\׮!޿qYmfe/ }:~; jYCzX5o֌h 8yUP놑TP2lm3m?%lv=C?0ߴІ==EL{ftndn]Kt. aa̻}i 9>˴u㪽,'WzV51}sy6`/j>H!BO#V:.u{،aɤ.dkKo(ˮwdLikYdW81m\&7+HGy4,Y`,E{3dQ.3dO>]}c0u?43d+}VڕPBMݯnr?>%gȢ*y~8?~SpU;ΐE6wUU;ΐ9d1?<}HRS8CS.U] Yz>I 3d٠vn3'dQT;ΐET;ΐET;ΐET;ΐEpf۵g?]2dguq 9\ߗ,>A ,WrtB3k'[.5 >];2=_4uviYFPjƽg4^ jޅ xaw-֮5v觃?頖i{~yD}K :۬̓x|{ϔ:O.'pd񆰋َbh YY`,ꯩݥp,ݥp,`JB{ϸ_3zgȢVt {x?]̏3rB>Y![5=kv!>XmP{MewgȢg4rE6YZ>)Xʯp,=T3I]C|Pj.Y|Kv!:hwv!:hwv!:hwv!lE"[d-Y`,E"lEfa3IENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqtop-on.png000066400000000000000000000155761417001151100216160ustar00rootroot00000000000000PNG  IHDRPfsRGBgAMA a pHYseIDATx^ɒƵ~w;RY醨M* j$s3 H^. GT8eoz=nD&HP2s0Tn]N$$I7H/+8:*e)e\ }s68UdaYVhS:61v8DŲ G˼\3őePvzAXbsz&;&M1ہp;$pheYIȊES'}2gM=6_0=.~*mlOq٭^[sٝTY\TyUA3` S뭧YV8Z`1 RizNkCU{kQT:mWx%_&93lXˉDj F{Iy<5}\uROjN91-Ep=?_f{=_\iƣzm.AvLAr[:y#D iJv5ہX!UBo]њֆ.*YgXSXޤ߲j=ުKg jؾ>~$!E: Rf}eȪhE}gk3r>1YO{f擄Hڷ;eNNY,161@$:+d@~=!:^ l׆(m>W&魖$3cuL}[fDž)oèR2sՎWXlpgXD_йzfv7vKÃA\۝TU&:ivɢ:hYS}?rwWuIu9{p}c:H[[¬wUQV8IB nsPl7;V{ l,#:hՀ|pmA< [Y#)A&k71uYX/);%Xc$&sD='<594R3 m;% YίǞSA?ƈtYGn1+9~rR})m(3zr IwAEQp*}oIC$dp28Z/ IRN.I ٞo0*_*'{\`OVW;EN4?*VZ/(Ro[46ӗf`==$S^EO$ i$_Wyhe$Q%?\ 6|ϛy+9$oJA{U ޝ+e^,zA +${6=I`r~$`{Lo ~$zۢ~)[펶.q$`$$i*mP9c뭐JbKdՉM+ypO/8Q{.W "7ۘ$[$ @^$ @u $H~{$ @=n7"IR޽p$H|M  $H"IH˷I\IR Wn$|Yu,2L#Nz^Wk&m:.Ӯq%H!II$ @ W۷ZJ`'h1|\IPT6~gS6sn4 XŢ,w gJq:,V͚c_z!kn\O响%9Rl0hisg:/4I|ukgv$~(0hisg:6I\]3I.R._,eϊr馇e҉FQ.*+ͫ&s3-jӸTdϋx(3l>܊gn>۟:<ԧ)_C(\ha96pt/ 2 :73_}ۮA9J^=EMUWxbE0ZeD$aK~=&aM W;I{A^4D`io:re[&+691Ygغ4 ~qfQ}xf*@ӒNY.2`2إ9jw'ITx=$znRSNb& ??iGu5! $^aN 3ПqTaF| ;ve[uVUN@4vɢ}xV$!E:RfYs8~?WK+KzRagOcM\+}ڞ_>d%ef>Ⱦ$cv-6I}cZ;:' _fn:~Kql_z}cWǘ$>J}}W綧-rDfOpCak6ql'ֱs֗v Ϯ0띥}ZvS}6ᘁ/g~SSS^a%r)A,8>veܾ'}۷eq]V:]z~F7I M]!Mbu<#y,sy{ә$g'D R@:LkO&BaapnYY``>zgZՒ,e(c4b9]{Vgc5veZ퓇MAg҆:,W/$/Z rM`Nqwݩ-Cҍ;zXZl-|^b+7n\qp&Ipuk~p]W`\I\+k7xp HzxM/ʺo^~3 @{] xﳫz\ W= v{V6똛\R$O"Y/-pjԼۘ$Vr(?>${v8șK!ape,^sEtò,_2~> _4ǯ Uˢhg6 s\䦮CLۖAvn93-z1Zٗ`sSvYZHsˑ$q܇xI enlJ;L1)s?e6.]rA Mk*@5@XdYmt9JR" ZH-m;gID>Iؤ,ϴ^gzļ݇n@\I2H+ϦܟZUYheymZLŖԼ [59x$ѳǶ/$~.՜z>CLc XCH$X7L}tui6H}fb CLIy/Up :x 2p+ w߮L~AL{F0*OڰzݔM޾p]GN93u7ֳzDzLUxÏfַUD3X}@h|VAR$|0 <Veup9܆W%׵UD_󵘫 u|?ӈN 4i$ {. v`*cwA/yW/'+D^}x `ol\o'Wl>K@ǝ$W~~w"'I;7FnW)4%)3o3}i ֓/#IFz܅I"":$`e~^W 5_sꭳCIBnBDP^Caw'JY o歐$`/hOX!ITCoIg~O$# 35I) 6gahRl$K$J)l%N66׷hm7o%ȅIB+dY.>4L Rzp/xl(H0[? NX^؄ѽ"l[烿'520\߶ߝ^^kZ] P^0tV{h.@yMSv$`3ﳼ,~|poWX˞M/i\rZWc٠my3t;JI"p%|%`8ڠ?!IL6=$FjA[(,cmU:u!I$_~}I?vS_Xų,{n5]􂱶*IT~ E[OMy[7]k`$q hV/9+]ZˇzXڙ(>yW/K Fk<^0tO/._+fӚ~%5~<+Nk3{M`N/~c$&2EKqKl&(G:VZͲZ`_D;^jC?|ڛe6x?>s_Gw{6]YdW}hm}Χ3*|$yIW[/|^0ֺIbM!;-+`jE1a$ Pܙ}|'y[!I_3 u I^0V+؞BZ`_\XJ9?685`,4 Oci4 ^ ]ϗN/$ ? tk`,3(ۻ2m;5|)_,2NL#N{^{zoz?/_5 7 z@M#.vY/K(T#{8Sʎieج9}:U.,g{vO? Ɗɱ;ϽMڥ1G{ƭc=IAr,xwcўq I+83`,;82yUE5\šO>(EP早qEPeen=Yslmwŋa}.aW eߛf#}ە6(BO۫hceG`,3Hڃ 3Pm#3͠}8ID̓eZyjz\J\9+% 3َ ,vE,U%%{x`L`_9jw'ITx=$znRSNo >DK2xes&0˥T+̲zտ)wgĕM'̭/IY+A}>OB seTS6F]ZW}֩iI—-QgA!Z.ŭ.Yꯜޝ=꒶1&$mO/vٟ[/ | ;X2?q9;V2?2Am@j%jv5\Jd|2 XfL|TJ}UOyugoA+pu uMɾ쳸hceg1>xgo`ujIGy\?ƈt7.͓$`'0Qm=Ipr\ _OiCQmWDz6F] 4y.S]/mu6PprL| f`s `,mJ#.zX@6F]vmX/? x^&IR>xU/ࣛ|P/_\gs@/ۻzN Mm:;%p5:zʬ̗8Լۓ$U}|} Il[pe,GlwgԲ2[M˲|i~v Ȯ'~!rf΃,VQ C(<݆z57ٶ6Nĸ+ )3Ar l;LL`u?e6.Ϧ\\*JPof~~ؗ#DEMaê u,`_.t%̙2> W)wg{heymZLŖEI[>yA#ZrvjD3Asܕ{ouee&?"=#ӮO<}$MK\I*Lgc}k\E47D˪ A]IID(~pm_Teup9܆W%U<>-q-AXW'A&" p"UO3 HoL@IENDB`coq-8.15.0/doc/sphinx/_static/diffs-coqtop-on3.png000066400000000000000000000041151417001151100216640ustar00rootroot00000000000000PNG  IHDR P sRGBgAMA a pHYseIDATx^Krޏoc&'<"<4nB*)J(俁h )$"R0xcНuN)[! M 7p"1j: NC׵,D\H|?tU`ִCקGIex/idKc mz,HmV|\$R۬HmljK_&?;z' ͊O^Qn~>cƏڞ~Q mH 17o8`&%KC" wׅED0h̯J0hՠ~Q mP 3 J=A}&: ?_nC}&:7D6-L幡էuT۟+5f)34eXGRgAM;oW, hfm.|x`ЦNEbKZ*'<0h"qF~417ba6# mH6^O D2g)l8 s`fl;Lt<0h2 `}x`%֞> )$"R0HD`H Aז|c׆rd'`H3Uqm(Gv )$"R0HD`H AQrd`Mߑ8v/|Cmt[8kۈ+*le(GmtNko^X-6/_A$ŵmEW`&7_WU&_߽nh6]'/hsк(mseuEm\v!"떃ׅkƶs}܄{nׯFkAEbaZ/(Gm$MO@P ھ+c^}}.Ee\nCKz%z1A[zEEBQQޚqUr/~,K/-0q?<}|=+1y":v2/(GmZ,:BE_-fޒNxmO,ݺH˵=6)X; +st~ rd`&:N5avAQ[ZoI^X U,i}cYvLEb~EbmbihO~%|N^pAѤ9 Ii-)0`lzX7%ǿe s8 myU3f* oS+7>M8i˟0_bDKI0]>ǻu]uD, a{<~4]={ˢwz]AX2rd`fv{tt_(GmЉwb󌆮> :QDGrd`(B90HD`H A""DD  )$"?BIENDB`coq-8.15.0/doc/sphinx/_static/diffs-error-message.png000066400000000000000000000127471417001151100224470ustar00rootroot00000000000000PNG  IHDRGoEEsRGBgAMA a pHYs]|IDATx^LUgǿвmՎ0-nJӰE!@՘"۬vǔ¬`&?fW(Q4̴l6ԄAlA[g-0"LvնcRgܡʏ}ss﹗$/_y<}_qM L@Asԯ  ׈AA1h B\1h%@ 5PY@霃j{AUi.=)ʷjgɘ g^UaMo0?lLq(}\_wl2gAr.iAֻ VbRQltPcmR5d}0Sl 5OĆrdC\s</uCȬ~(S3,=?S? CfNqF*ظ [!ƱnJIYU>r)  ojR`*nc0~}֑iԃn')Jnc2x琣읺B5V| jGƊcC+qz*I 9N)lA] %jA1(8p5 Ը/~L1 V >* &G'gxh('%5jʹM AR V87NyJc*,&dV& cIO?j8㐂 1AKiܫ T+$őPO;ַvCh1n(o\cLȳO=1H˟{յ"}e"腕gžA7Ѭ)5~Dq [1FY (uF᧯LW>l7\K0~=t %@ f:רcbq݅[q¸w ~.N,;&~t9ܚTTQ M<>?+ )'O㼊c=_|Lb ۋ- P?*JWWGKAhsǏS,M9fR*OH4=9>c®6{G]U$=`ϲ} M"wEfx=9Vb ǘHªQDB4RG\jݐ&eʡz u {mm# Fjv9}#tP9,k1ۮASV6 z{X-_~)*o2+̵Ԉݒt}o%ǧ3Utz3<~_7} u-$aK'G{jCgѾaj]q *P]g4=Ȁ]#?7XY~4`9Ξ(Y+YO7^U~/Vc5*D/,*F)o#>ۧ4QZj ݍuZҭXM'Ky`<ҭ/ۢ׾՗xWqذYglM+#8~0uֶ;N}䩚=o`o~"'+_)j' 6ǓjJ~.D繆6b>f g4T¥F@3J0~*\%~@#["XV6No';qsZLl8I!{V]UZ0Ot(d>jԞ1A& @ C\!v\PCٯJ?IY{?8QpIʪ9.>̴~ O,z 4L֐6M:(xiy5n-*_1Z#̓ަP5HOå@",0b0 ?ѻ?_Th#xHugˎi@'p 6+$Ҏ"/rr iX9zPdb) #҄S]'&ǧ?r\Sq_ AxZjn$bCu<1@3#Hmb"A:by۾  L#jA!.& bA@ L͠N tuAAp'dx$& G CxZ6'/mE}It>W]Sq`' 1h d\7/жg(U}=mmh-|XmQq.AS_f226=yiq 3ۂ &ޓBX1 kHez~M BmX1K*mj}8 EXcn2N& Do 4A}y (1 kպE0Cǟ*Q3y#D`c<2& QA   ܧAA1h B\3%A{\HZr7& v.GA!.&  ?Jr>9 0]9yoR tC>Fo1NpR ~JϤidpɇ0/v>tR{aS eH6ZôvڦVK]ElXeRk #mB7[<(t\cGc4~# MX8v~Iҍo\K|]KqB}cVKfRPRQaf8'%C˯褓W䧛pkO9[됤@n>F!f AJ.&cf~DȔ jZٺW8n ˏx8"Wlg6K 3Wc9?/XS<9!-Bc2w`V^MCޏqÐʳ }e637F?Ϯ.AIw&?Kܨ>{~b,}1Flܦ-yx We(n*r0]|囶ejv{Ěgy9RX_9JHrR]#̭<SCIE)OJ ?pZ Mb i_cϠ-f.YS>-J7#/{+bnɍ6B},.Ǜ!*6r} z1'\3ΣȕݘkٛוUM)lI6j +{v}w$7\r{s4N;7:>҄Аڻ}+/QzVc7`)N1&43]}~7z+0&0)r]Fb?sFVw77r[9*#uulkT~\~r+ؗpi5s)p—.sc?Sȍ#CFcNvVb.RKRH#<Y/A^h/}Sx؇^cN~G"#QQ;w#SkM~R;y2…pU;QP_Lyl)9W}f\=­P2{_7# b, b~|bp'.--8gFͧ+/K;~ v Ji,V кhVb{_4# s+:nl碝GP9F/Dh©VM,k\)YmP?gZٗ`L1!Y>77)HWqӆYzB2u¹?]_x 7mC"2^gV1jU.&{ɷ#6^y OJ`c*\hOyEYH '&5/WH M/ a(;p%5N~_Y)7Y;l8n.!Je!}ܥX΅/@PN?oPyCHnX0~i֏9!oy&r0w{9F<8ĒuOC0_|rS _.#Mc;hyilz?"a׿Q"R)fd 7iR'0sޕ0k=(jdobV[xH%6H݉ލLѲhjrSIF5G;ƅhDǒjQi^`;(!b>^Ht|h<^]./XG{b9^}n`>5FH&\)<+i\ن.""fqBYx堺= Oow;dsn_sh]\~ح+1jd)x3Oe`tNmXF5[cO7Fo xF,\_Ir}zp+~&%D&`hP<8פoկ}blLc4?V caBb%ԿȠ XCZlm_A MA(4A!.& bA@ AA1h B? -aIENDB`coq-8.15.0/doc/sphinx/_static/diffs-show-proof.png000066400000000000000000000325111417001151100217660ustar00rootroot00000000000000PNG  IHDRftO2sRGBgAMA a pHYse4IDATx^}gXUIh眧9+fE `9+,(*9 ("DED1c ж{}.{ >`׮]a[knݺIII:vE&dI3IbI&dX,L2ɤT)1dÇ$$$$$~gTJ$f RbNNN$ UĘTc޽t %$$$4I O<:p`&QAA>=zXƦ <QKKuAP7kItH߿3(3sedi޽YhHr0&Xˡt1fZҟ=\' u&ʚ,ݿ?~ C!3gNӵkW5B/^P7-u,aU,QHb~>1Z$fm1W7-u,aUPim3u uR{P3&ؗwSPH0SĮL/'EQ|߹Cd0 ͋C)3+dVBoS!ټuYwڣqSFԻڰi}?0@}ӳBY53b.+W.x@8&WX 1|m#߸6nvBWRrJ]ZZ ?Nuj S#k?h @^#F-Zј <&弼pjԼ$O:D:%09LgϞM0SaH ?As{Ĭ6n59Yx:s1*.)V5M>17jђ m;ɧqHI3jҪ5}װq9byCCeh”)4r82cm$ڒXyQiڵBn*3&NJW&N#Fp=F Ъ5k)rwN8GD3&ONk]\ez$qݛC^4ٙ{f͝K>~+3iᒥ,7jx:s&uu8ČI%8͜EC-]Ƨ>,gme >mED81VkX% ʈKhھcE9$ !QQ};=1#&i# =4ya0ԑ(Vln J΋Ӓ˙׻.1)Sm 5|fϛOOyBᓙ0IӦSni &%輁UkPOdZr LħϚMCr\֯gdܫ_. PӃ+5>1ê-]ޔ4>{.b m?1nOW7u}`o??yxz2:)YyI*A#Z ֬,Au+_ .2sA mYh0"6yxP p<ʂn`Q&vn1gbZ؟s2ڸ 񲛜FmQVm.Q5('fʨqȦWo1 ܽ' 5ML 3,ŸXek+r:u$ #v58AX!IɉBAT1brT'XTf Bn۹ EףYlxrrmdiөLa%,sm1aV:1)c&$ƋE#}}ZR/spՔx!ѧk.FbC~>1{"> tEt 1#6&xSA\Q.ח6{= )Č|X{z!I$tLۓJI)j2mԇE'#3Sx3QȆi3Te$fOA,[ -[ .?hBj,i6TQ&DX PDl$" r&"B&=rhߗ-ꬬ ({urJ8",11W 뎝h;q"o m@AQwEwkx{xqm7zt,W[09c:$fL1}ر8x(x-f?%d O OC,XJ lXQ7&tI E l3HqGYUAxP~^.~HψDuj"~6!5E"/TMXnkCbFlۋ 9ú\|[ ^j55 % ma#q.~ѹGO>~BLJJ3 G4nD6c&ox!V6itH X,[7O+ļՅ7*ac1fhA[ blJg @,kӫ9'7@S9uRMbWtYx@8ꆉM7CYX8Slxl FH3*|8,;"xv lX]{0Bٰ@liUd Q[b3萹Il\8WVˍ 9 ʤ\Bi1uL ;bAY3M_C H\dza#>ajP&0Ha yE|(r?(+~ ح48"L&S[`<ѧjm~:|E0n go *A2ކ2{( Rò|]^CVGB!fsS7cubgYx*' x 18$a}>H 4# '&$V e]3_4(ČX&$1M67(V!G'U rr /aY(Čoiq2&:6?ـMW#B̈#%qsڃBڸiY _p<#TTtFĬMRSA̅n$f Bp<{f!uRxJON6nZ:Jb(ČY IڄBx_mܴMUA!悂C*U?ڃBxAImܴMU1._f!YPuU7-u,aUPGiYYR'5ڸi$f 3b>.V!YP?Ib*9b~G -D C~s8գ6vZ:jm"$1fd1!<5@cRf&B%, c %Nh7kM$x$77RRhL0|_<`kyL4^`?t"XZ ꄺZIlU$''RXX{&A[;٢='ac@TKKu BQ\GU Z$f VXX{fe O @e|Wav^4`u0fjci .x?/o-&樬](!i@OWBB51Cm6KHhS5"*%f|uRu"HHh S5"*%>]PZTM%$JgHwJQYI*ΞDyGo\}1wwUm@[TS5 7Μ{jJuCmQk)R G+3̀J/wϷϵ2G~{jUm@[(Q=CK% 4W2qʦCt܅r,^/g(9b8uFU*XKVSz=ݎI')ٺQ}iNGIޝ: ~nv:1O_ԕ~M;MfMsD/qң}ٝ*$1]ӇO)tgã\^ewU% 痎dJ!Es|SJC =7[L4)rxlʈ ]Š_HJؑz5oE[/'bpKrbEԷPt;&NSZ7#,E~LʤHbb?]C?m^ YMΒ\v׬xCUw010V/\D6/>6lHgͦcz+̵:kO_~}Wd׻ <̉ɶg/1xuhӆ>#j}ڸj5:[Q`u^=z#O^庱PYp'gэ 4{/|O".[MȮm !. 2:ИxɶM{ s^Fur%B=}.@b ^\Z2M_MwKG'u:]N39||sȶu[M/0R0m[B_.fkhE9KGiBʉ*0?Z6iJ~x^:F>~+(X'ySFa ׊ ٪mwj&%3)@f@VEܶ믽fbc՞i@]7ZYTL"Xu+uraKii|-[?9|XnMJX\&fe|:;‚4{dֱyr ehv|àȩK7q}Z̊uۑCN&e%~1 o/;.:m/])rY³ gIbƤ\`}(\pw;1.]}cpsw.;d|oyyL]yІ+S9l9[Xk-7eebFnٰB;Ӗ9tvNPo >JL7s9 WnӰJbKۡ}'vӰxGB@޼畆E3EƳ2]MBA݄*rY}Ύ!J<uElX%f͎/emO{fZ:|4ΜKAsfuM&fkCKϜDAQ7rO @ΒČ^Lw~]K ~X\NbRzp @|k4i #kU:Aty=2(DE+]h.OAm'a072z7B_1 ڃwzހLeos Ao'@ s Gܳ¥$Rخ#}g_[LIm֠\LZПhl>ڀ2([o5Dnbun׎NJwJhD>_S ?+uk\G􅨓)zXk7_U7x+n~rMK޹%"CLܷ`z|jJqSk&;V=>.=XcU0q89񦐒*\"+7ئ;Ukf7{W@1kz= ^7PfE |lDyro*5/;Đt^Gjw|jD6֋e|!G%igSUw;u2ĽoWÐ@'ocV/zhߠ(S?BU M|'AmR$1X-"xEQW㺁'9'11Q _BC;ٰb-}}Za.ݏC!EhțoFX;MXƥq,sL)!B㌦&.,>kYpR.)ڡ -^ A]KWzdr7FyE%vcC 1==(x*7G bxvțC8j{1xxL[:g.ϚÛ V"gib %M{=m6߾[@㌝dlf{D\=)(9gy0޴)A#lzƈ8שacvyk V{CEq\3aQ$tz Y1ԬQ#wۤ'_ȇuPA~7zhEm1/ %46&͚MEumO9K36y6{KY1qLG/UcWM7,&:svW:;SnݨOԷ{л7ys)b9~+};:no P~1ic3 `p..JC>[-BvYhcӤT-Bw.;ӈA!3JL8(wQAY؝PS'$7Xg]_CCm*xYE539LZ1z`r*8V~1 e+G-/^>tLi>.ɜ}6ЖJ^_DL Խ\\yhӪXD[ L%Ki_UXb콜S48qҸWEƩ'ܛnF%@τ절%+AUe?s-1VO,C$AaAyFYx5yFrWxk,>oݛ[g% -ǼrMrL:,t(Z!h9Mk Q"꼴\@O Cۯ;5"fXrpkSyWyj2UAQ<}%F:! ꨋ]&wUu[NNX@ZsZ)n2N5٪ '6a5R!HbVo;7 TBᣛT\Μήgѽ&B joӹET|&Qz ;޷Qӥ犳NjMѹ"׹s~WOÔ_dɜiT-1"! kܢ=?GU2U+CzϖW5p 8F$b..>D~J-r=2:UVDW^*S@QкDR<mEVM2(& X tU˽ mVP$fd1T&̜DkcaO_5hB_h׍Iװ ĬjL_ԬMs߹J_,іߦ3J)"uUeԉ[XX6fмk\?zWȯ`+M]7u|+k,⽁vjG珱:9I|qZAIyiU0eIpk'ѠN0c6y%.ybrļfQpZs^5n5̓"&y-y5 1[{{ ;D݆/CΡ)d8_I;#q&>W_ĬjL{ȦO7Zr!:wT_WLթ;}z鵗ޢo['):nC' ].$5y C{t]3ip\vnLM`tpJcĢ?q$fko8FO6FRMU߶9~>Bd!ahba:POQ$fT#bƦUhi[v*{^!,p o7K8vR:sNfS 6iP"K˯wV ]Z9kτ̉kg'̱a'5ЈzG>u5 bsv T,BǨgԵ ҵoQڣ}4~Tjر @|iוa(|}3v\( +{;&VƒL.KrOtr(0'1[{{[7uiJF䙷uo$bu~e95oӂ&}T&'Y=Ո^~鍷ޤy)O[;܎>sjҲ-\dp"!pW0ЛKkoq0L.G@s/aGЄY}׎p|9Įo2~l_Sp7/H8}4nGAt?7)z*՚ZiC"RܝtJ|ńR7i֥ԪO[ˏo?n{*obf.{@#:w9lwL-EYX$gP+0שB ,+y=6:* U&'Y=Ո-;KGjKkeԵw7YwGn)<1B'X1N4d#l^oP;md&Ξ̱CWy K 8E$u|['sͧ^~Bu׍$fk鿹M?C|OXi uדcGs'7[1[gJߪanb&N=hΖE4gЁLOk6VD i,ZB'7`oiNbV`G ][}%s6Xzz;yYIbVO5"fXU V->|N^?[l1hsr<;uDȀOI%/A ~hR]y Z4p+ - Oژ_݌Al-{ezz"oa lg}=v^K0 X>V7.^ޘñܸ;{ح` n4sNxlMI[m]RW. &k^'//LIbVOOZ|K\1v_W{. wc4tS9͠i4T7 o GAބ"Ƚ~9i(LmQޭ+ [ 5}T&'Y=c9w>0sl>c`VIP%Oa<@bq;SSeb3'1K昶6_ޤT]Emlq!oאzKő:bl: vvwHWǻQ_[MG!9u D^KOPSk8uѠ}CYr p,v/Ŏ Q$fT#b6TўmXYԹGWz-IY*p\44phAقY \X_p>y>sjrS;O\NI*Vt<{W{Y|Jb+A.ݟ;ϤS_{[>ޕylNRĬK_޵~Ϝļ3e7} Ʈ+ȥLR1`8/H,3y,FXDC~q4x0wD` ܶv$;ymgZk{nf=[& Bq5oLAF߳HUIbVO5&fZWPHL2O8Ǵ㡗[SBG"ALJ{ ty8˕χ*y Lq60^ S} g,HAD ,0 tqΖ-Xw!0lh>wsX17N@wBVM1U` zCVD<^ztrIuV<=jx[rIZ絞tl[JNz11gMҿfiXHtb~f%+.]9VW Y2oXL?cal?[ _5hB_qCs1rIbVO&36@.8 #,WT@ӜgM{XYL_~*@ٮ{MPXgK6XK&FQ.]r:*\(5٪b7d V5\ ~=Zo0wHbVOu&p%UA*1`j R?W_XϖW5Ib= .repeat-wrapper { margin-top: 0.28em; } .prodn-table .notation > .repeat-wrapper-with-sub { margin-top: 0.28em; margin-bottom: 0.28em; } .term-defn { font-style: italic; } .std-term { color: #2980B9; /* override if :visited */ } /* We can't display nested blocks otherwise */ code, .rst-content tt, .rst-content code { background: transparent !important; border: none !important; font-size: inherit !important; } code { padding: 0 !important; /* This padding doesn't make sense without a border */ } dt > .property { margin-right: 0.25em; } .icon-home:visited { color: #FFFFFF; } /* Pygments for Coq is confused by ‘…’ */ code span.error { background: inherit !important; line-height: inherit !important; margin-bottom: 0 !important; padding: 0 !important; } /* Red is too aggressive */ .rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { color: inherit !important; } .coqdoc-comment { color: #808080 !important } /* make the error message index readable */ .indextable code { white-space: inherit; /* break long lines */ } .indextable tr td + td { padding-left: 2em; /* indent 2nd & subsequent lines */ text-indent: -2em; } coq-8.15.0/doc/sphinx/_static/notations.js000066400000000000000000000031701417001151100204370ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* sup") .attr("data-hint", function() { return annotateSup($(this).text()); }).addClass("hint--top hint--rounded"); $(".repeat-wrapper > sub") .attr("data-hint", function() { return annotateSub($(this).text()); }).addClass("hint--bottom hint--rounded"); //.text(function(i, text) { return translatePunctuation(text); }); } $(annotateNotations); coq-8.15.0/doc/sphinx/_static/pre-text.css000066400000000000000000000017671417001151100203570ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* Other versions v: {{ version }} {% endif %} coq-8.15.0/doc/sphinx/addendum/000077500000000000000000000000001417001151100162155ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/addendum/canonical-structures.rst000066400000000000000000000001561417001151100231210ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/addendum/extended-pattern-matching.rst000066400000000000000000000001521417001151100240100ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/addendum/extraction.rst000066400000000000000000000644531417001151100211430ustar00rootroot00000000000000.. _extraction: Program extraction ================== :Authors: Jean-Christophe Filliâtre and Pierre Letouzey We present here the Coq extraction commands, used to build certified and relatively efficient functional programs, extracting them from either Coq functions or Coq proofs of specifications. The functional languages available as output are currently OCaml, Haskell and Scheme. In the following, "ML" will be used (abusively) to refer to any of the three. Before using any of the commands or options described in this chapter, the extraction framework should first be loaded explicitly via ``Require Extraction``, or via the more robust ``From Coq Require Extraction``. Note that in earlier versions of Coq, these commands and options were directly available without any preliminary ``Require``. .. coqtop:: in Require Extraction. Generating ML Code ------------------- .. note:: In the following, a qualified identifier :token:`qualid` can be used to refer to any kind of Coq global "object" : :term:`constant`, inductive type, inductive constructor or module name. The next two commands are meant to be used for rapid preview of extraction. They both display extracted term(s) inside Coq. .. cmd:: Extraction @qualid Extraction of the mentioned object in the Coq toplevel. .. cmd:: Recursive Extraction {+ @qualid } Recursive extraction of all the mentioned objects and all their dependencies in the Coq toplevel. All the following commands produce real ML files. User can choose to produce one monolithic file or one file per Coq library. .. cmd:: Extraction @string {+ @qualid } Recursive extraction of all the mentioned objects and all their dependencies in one monolithic file :token:`string`. Global and local identifiers are renamed according to the chosen ML language to fulfill its syntactic conventions, keeping original names as much as possible. .. cmd:: Extraction Library @ident Extraction of the whole Coq library :n:`@ident.v` to an ML module :n:`@ident.ml`. In case of name clash, identifiers are here renamed using prefixes ``coq_`` or ``Coq_`` to ensure a session-independent renaming. .. cmd:: Recursive Extraction Library @ident Extraction of the Coq library :n:`@ident.v` and all other modules :n:`@ident.v` depends on. .. cmd:: Separate Extraction {+ @qualid } Recursive extraction of all the mentioned objects and all their dependencies, just as :n:`Extraction @string {+ @qualid }`, but instead of producing one monolithic file, this command splits the produced code in separate ML files, one per corresponding Coq ``.v`` file. This command is hence quite similar to :cmd:`Recursive Extraction Library`, except that only the needed parts of Coq libraries are extracted instead of the whole. The naming convention in case of name clash is the same one as :cmd:`Extraction Library`: identifiers are here renamed using prefixes ``coq_`` or ``Coq_``. The following command is meant to help automatic testing of the extraction, see for instance the ``test-suite`` directory in the Coq sources. .. cmd:: Extraction TestCompile {+ @qualid } All the mentioned objects and all their dependencies are extracted to a temporary OCaml file, just as in ``Extraction "file"``. Then this temporary file and its signature are compiled with the same OCaml compiler used to built Coq. This command succeeds only if the extraction and the OCaml compilation succeed. It fails if the current target language of the extraction is not OCaml. Extraction Options ------------------- Setting the target language ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Extraction Language @language .. insertprodn language language .. prodn:: language ::= OCaml | Haskell | Scheme | JSON The ability to fix target language is the first and most important of the extraction options. Default is ``OCaml``. The JSON output is mostly for development or debugging: it contains the raw ML term produced as an intermediary target. Inlining and optimizations ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Since OCaml is a strict language, the extracted code has to be optimized in order to be efficient (for instance, when using induction principles we do not want to compute all the recursive calls but only the needed ones). So the extraction mechanism provides an automatic optimization routine that will be called each time the user wants to generate an OCaml program. The optimizations can be split in two groups: the type-preserving ones (essentially constant inlining and reductions) and the non-type-preserving ones (some function abstractions of dummy types are removed when it is deemed safe in order to have more elegant types). Therefore some :term:`constants ` may not appear in the resulting monolithic OCaml program. In the case of modular extraction, even if some inlining is done, the inlined constants are nevertheless printed, to ensure session-independent programs. Concerning Haskell, type-preserving optimizations are less useful because of laziness. We still make some optimizations, for example in order to produce more readable code. The type-preserving optimizations are controlled by the following Coq flags and commands: .. flag:: Extraction Optimize Default is on. This :term:`flag` controls all type-preserving optimizations made on the ML terms (mostly reduction of dummy beta/iota redexes, but also simplifications on Cases, etc). Turn this flag off if you want a ML term as close as possible to the Coq term. .. flag:: Extraction Conservative Types Default is off. This :term:`flag` controls the non-type-preserving optimizations made on ML terms (which try to avoid function abstraction of dummy types). Turn this flag on to make sure that ``e:t`` implies that ``e':t'`` where ``e'`` and ``t'`` are the extracted code of ``e`` and ``t`` respectively. .. flag:: Extraction KeepSingleton Default is off. Normally, when the extraction of an inductive type produces a singleton type (i.e. a type with only one constructor, and only one argument to this constructor), the inductive structure is removed and this type is seen as an alias to the inner type. The typical example is ``sig``. This :term:`flag` allows disabling this optimization when one wishes to preserve the inductive structure of types. .. flag:: Extraction AutoInline Default is on. The extraction mechanism inlines the :term:`bodies ` of some defined :term:`constants `, according to some heuristics like size of bodies, uselessness of some arguments, etc. Those heuristics are not always perfect; if you want to disable this feature, turn this :term:`flag` off. .. cmd:: Extraction Inline {+ @qualid } In addition to the automatic inline feature, the :term:`constants ` mentioned by this command will always be inlined during extraction. .. cmd:: Extraction NoInline {+ @qualid } Conversely, the constants mentioned by this command will never be inlined during extraction. .. cmd:: Print Extraction Inline Prints the current state of the table recording the custom inlinings declared by the two previous commands. .. cmd:: Reset Extraction Inline Empties the table recording the custom inlinings (see the previous commands). **Inlining and printing of a constant declaration:** The user can explicitly ask for a :term:`constant` to be extracted by two means: * by mentioning it on the extraction command line * by extracting the whole Coq module of this :term:`constant`. In both cases, the declaration of this :term:`constant` will be present in the produced file. But this same :term:`constant` may or may not be inlined in the following terms, depending on the automatic/custom inlining mechanism. For the :term:`constants ` non-explicitly required but needed for dependency reasons, there are two cases: * If an inlining decision is taken, whether automatically or not, all occurrences of this :term:`constant` are replaced by its extracted :term:`body`, and this :term:`constant` is not declared in the generated file. * If no inlining decision is taken, the :term:`constant` is normally declared in the produced file. Extra elimination of useless arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following command provides some extra manual control on the code elimination performed during extraction, in a way which is independent but complementary to the main elimination principles of extraction (logical parts and types). .. cmd:: Extraction Implicit @qualid [ {* {| @ident | @integer } } ] Declares some arguments of :token:`qualid` as implicit, meaning that they are useless in extracted code. The extracted code will omit these arguments. Here :token:`qualid` can be any function or inductive constructor, and the :token:`ident`\s are the names of the useless arguments. Arguments can can also be identified positionally by :token:`integer`\s starting from 1. When an actual extraction takes place, an error is normally raised if the :cmd:`Extraction Implicit` declarations cannot be honored, that is if any of the implicit arguments still occurs in the final code. This behavior can be relaxed via the following flag: .. flag:: Extraction SafeImplicits Default is on. When this :term:`flag` is off, a warning is emitted instead of an error if some implicit arguments still occur in the final code of an extraction. This way, the extracted code may be obtained nonetheless and reviewed manually to locate the source of the issue (in the code, some comments mark the location of these remaining implicit arguments). Note that this extracted code might not compile or run properly, depending of the use of these remaining implicit arguments. Realizing axioms ~~~~~~~~~~~~~~~~ Extraction will fail if it encounters an informative axiom not realized. A warning will be issued if it encounters a logical axiom, to remind the user that inconsistent logical axioms may lead to incorrect or non-terminating extracted terms. It is possible to assume some axioms while developing a proof. Since these axioms can be any kind of proposition or object or type, they may perfectly well have some computational content. But a program must be a closed term, and of course the system cannot guess the program which realizes an axiom. Therefore, it is possible to tell the system what ML term corresponds to a given axiom. .. cmd:: Extract Constant @qualid {* @string__tv } => {| @ident | @string } Give an ML extraction for the given :term:`constant`. :n:`@string__tv` If the type scheme axiom is an arity (a sequence of products followed by a sort), then some type variables have to be given (as quoted strings). The number of type variables is checked by the system. For example: .. coqtop:: in Axiom Y : Set -> Set -> Set. Extract Constant Y "'a" "'b" => " 'a * 'b ". .. cmd:: Extract Inlined Constant @qualid => {| @ident | @string } Same as the previous one, except that the given ML terms will be inlined everywhere instead of being declared via a ``let``. .. note:: This command is sugar for an :cmd:`Extract Constant` followed by a :cmd:`Extraction Inline`. Hence a :cmd:`Reset Extraction Inline` will have an effect on the realized and inlined axiom. .. caution:: It is the responsibility of the user to ensure that the ML terms given to realize the axioms do have the expected types. In fact, the strings containing realizing code are just copied to the extracted files. The extraction recognizes whether the realized axiom should become a ML type constant or a ML object declaration. For example: .. coqtop:: in Axiom X:Set. Axiom x:X. Extract Constant X => "int". Extract Constant x => "0". Realizing an axiom via :cmd:`Extract Constant` is only useful in the case of an informative axiom (of sort ``Type`` or ``Set``). A logical axiom has no computational content and hence will not appear in extracted terms. But a warning is nonetheless issued if extraction encounters a logical axiom. This warning reminds user that inconsistent logical axioms may lead to incorrect or non-terminating extracted terms. If an informative axiom has not been realized before an extraction, a warning is also issued and the definition of the axiom is filled with an exception labeled ``AXIOM TO BE REALIZED``. The user must then search these exceptions inside the extracted file and replace them by real code. Realizing inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~ The system also provides a mechanism to specify ML terms for inductive types and constructors. For instance, the user may want to use the ML native boolean type instead of the Coq one. The syntax is the following: .. cmd:: Extract Inductive @qualid => {| @ident | @string } [ {* {| @ident | @string } } ] {? @string__match } Give an ML extraction for the given inductive type. You must specify extractions for the type itself (the initial :n:`{| @ident | @string }`) and all its constructors (the :n:`[ {* {| @ident | @string } } ]`). In this form, the ML extraction must be an ML inductive datatype, and the native pattern matching of the language will be used. When the initial :n:`{| @ident | @string }` matches the name of the type of characters or strings (``char`` and ``string`` for OCaml, ``Prelude.Char`` and ``Prelude.String`` for Haskell), extraction of literals is handled in a specialized way, so as to generate literals in the target language. This feature requires the type designated by :n:`@qualid` to be registered as the standard char or string type, using the :cmd:`Register` command. :n:`@string__match` Indicates how to perform pattern matching over this inductive type. In this form, the ML extraction could be an arbitrary type. For an inductive type with :math:`k` constructors, the function used to emulate the pattern matching should expect :math:`k+1` arguments, first the :math:`k` branches in functional form, and then the inductive element to destruct. For instance, the match branch ``| S n => foo`` gives the functional form ``(fun n -> foo)``. Note that a constructor with no arguments is considered to have one unit argument, in order to block early evaluation of the branch: ``| O => bar`` leads to the functional form ``(fun () -> bar)``. For instance, when extracting :g:`nat` into OCaml ``int``, the code to be provided has type: ``(unit->'a)->(int->'a)->int->'a``. .. caution:: As for :cmd:`Extract Constant`, this command should be used with care: * The ML code provided by the user is currently **not** checked at all by extraction, even for syntax errors. * Extracting an inductive type to a pre-existing ML inductive type is quite sound. But extracting to a general type (by providing an ad-hoc pattern matching) will often **not** be fully rigorously correct. For instance, when extracting ``nat`` to OCaml ``int``, it is theoretically possible to build ``nat`` values that are larger than OCaml ``max_int``. It is the user's responsibility to be sure that no overflow or other bad events occur in practice. * Translating an inductive type to an arbitrary ML type does **not** magically improve the asymptotic complexity of functions, even if the ML type is an efficient representation. For instance, when extracting ``nat`` to OCaml ``int``, the function ``Nat.mul`` stays quadratic. It might be interesting to associate this translation with some specific :cmd:`Extract Constant` when primitive counterparts exist. Typical examples are the following: .. coqtop:: in Extract Inductive unit => "unit" [ "()" ]. Extract Inductive bool => "bool" [ "true" "false" ]. Extract Inductive sumbool => "bool" [ "true" "false" ]. .. note:: When extracting to OCaml, if an inductive constructor or type has arity 2 and the corresponding string is enclosed by parentheses, and the string meets OCaml's lexical criteria for an infix symbol, then the rest of the string is used as an infix constructor or type. .. coqtop:: in Extract Inductive list => "list" [ "[]" "(::)" ]. Extract Inductive prod => "(*)" [ "(,)" ]. As an example of translation to a non-inductive datatype, let's turn ``nat`` into OCaml ``int`` (see caveat above): .. coqtop:: in Extract Inductive nat => int [ "0" "succ" ] "(fun fO fS n -> if n=0 then fO () else fS (n-1))". Avoiding conflicts with existing filenames ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When using :cmd:`Extraction Library`, the names of the extracted files directly depend on the names of the Coq files. It may happen that these filenames are in conflict with already existing files, either in the standard library of the target language or in other code that is meant to be linked with the extracted code. For instance the module ``List`` exists both in Coq and in OCaml. It is possible to instruct the extraction not to use particular filenames. .. cmd:: Extraction Blacklist {+ @ident } Instruct the extraction to avoid using these names as filenames for extracted code. .. cmd:: Print Extraction Blacklist Show the current list of filenames the extraction should avoid. .. cmd:: Reset Extraction Blacklist Allow the extraction to use any filename. For OCaml, a typical use of these commands is ``Extraction Blacklist String List``. Additional settings ~~~~~~~~~~~~~~~~~~~ .. opt:: Extraction File Comment @string This :term:`option` provides a comment that is included at the beginning of the output files. .. opt:: Extraction Flag @natural This :term:`option` controls which optimizations are used during extraction, providing a finer-grained control than :flag:`Extraction Optimize`. The bits of :token:`natural` are used as a bit mask. Keeping an option off keeps the extracted ML more similar to the Coq term. Values are: +-----+-------+----------------------------------------------------------------+ | Bit | Value | Optimization (default is on unless noted otherwise) | +-----+-------+----------------------------------------------------------------+ | 0 | 1 | Remove local dummy variables | +-----+-------+----------------------------------------------------------------+ | 1 | 2 | Use special treatment for fixpoints | +-----+-------+----------------------------------------------------------------+ | 2 | 4 | Simplify case with iota-redux | +-----+-------+----------------------------------------------------------------+ | 3 | 8 | Factor case branches as functions | +-----+-------+----------------------------------------------------------------+ | 4 | 16 | (not available, default false) | +-----+-------+----------------------------------------------------------------+ | 5 | 32 | Simplify case as function of one argument | +-----+-------+----------------------------------------------------------------+ | 6 | 64 | Simplify case by swapping case and lambda | +-----+-------+----------------------------------------------------------------+ | 7 | 128 | Some case optimization | +-----+-------+----------------------------------------------------------------+ | 8 | 256 | Push arguments inside a letin | +-----+-------+----------------------------------------------------------------+ | 9 | 512 | Use linear let reduction (default false) | +-----+-------+----------------------------------------------------------------+ | 10 | 1024 | Use linear beta reduction (default false) | +-----+-------+----------------------------------------------------------------+ .. flag:: Extraction TypeExpand If this :term:`flag` is set, fully expand Coq types in ML. See the Coq source code to learn more. Differences between Coq and ML type systems ---------------------------------------------- Due to differences between Coq and ML type systems, some extracted programs are not directly typable in ML. We now solve this problem (at least in OCaml) by adding when needed some unsafe casting ``Obj.magic``, which give a generic type ``'a`` to any term. First, if some part of the program is *very* polymorphic, there may be no ML type for it. In that case the extraction to ML works alright but the generated code may be refused by the ML type checker. A very well known example is the ``distr-pair`` function: .. coqtop:: in Definition dp {A B:Type}(x:A)(y:B)(f:forall C:Type, C->C) := (f A x, f B y). In OCaml, for instance, the direct extracted term would be:: let dp x y f = Pair((f () x),(f () y)) and would have type:: dp : 'a -> 'a -> (unit -> 'a -> 'b) -> ('b,'b) prod which is not its original type, but a restriction. We now produce the following correct version:: let dp x y f = Pair ((Obj.magic f () x), (Obj.magic f () y)) Secondly, some Coq definitions may have no counterpart in ML. This happens when there is a quantification over types inside the type of a constructor; for example: .. coqtop:: in Inductive anything : Type := dummy : forall A:Set, A -> anything. which corresponds to the definition of an ML dynamic type. In OCaml, we must cast any argument of the constructor dummy (no GADT are produced yet by the extraction). Even with those unsafe castings, you should never get error like ``segmentation fault``. In fact even if your program may seem ill-typed to the OCaml type checker, it can't go wrong : it comes from a Coq well-typed terms, so for example inductive types will always have the correct number of arguments, etc. Of course, when launching manually some extracted function, you should apply it to arguments of the right shape (from the Coq point-of-view). More details about the correctness of the extracted programs can be found in :cite:`Let02`. We have to say, though, that in most "realistic" programs, these problems do not occur. For example all the programs of Coq library are accepted by the OCaml type checker without any ``Obj.magic`` (see examples below). Some examples ------------- We present here two examples of extraction, taken from the Coq Standard Library. We choose OCaml as the target language, but everything, with slight modifications, can also be done in the other languages supported by extraction. We then indicate where to find other examples and tests of extraction. A detailed example: Euclidean division ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The file ``Euclid`` contains the proof of Euclidean division. The natural numbers used here are unary, represented by the type ``nat``, which is defined by two constructors ``O`` and ``S``. This module contains a theorem ``eucl_dev``, whose type is:: forall b:nat, b > 0 -> forall a:nat, diveucl a b where ``diveucl`` is a type for the pair of the quotient and the modulo, plus some logical assertions that disappear during extraction. We can now extract this program to OCaml: .. coqtop:: reset all Require Extraction. Require Import Euclid Wf_nat. Extraction Inline gt_wf_rec lt_wf_rec induction_ltof2. Recursive Extraction eucl_dev. The inlining of ``gt_wf_rec`` and others is not mandatory. It only enhances readability of extracted code. You can then copy-paste the output to a file ``euclid.ml`` or let Coq do it for you with the following command:: Extraction "euclid" eucl_dev. Let us play the resulting program (in an OCaml toplevel):: #use "euclid.ml";; type nat = O | S of nat type sumbool = Left | Right val sub : nat -> nat -> nat = val le_lt_dec : nat -> nat -> sumbool = val le_gt_dec : nat -> nat -> sumbool = type diveucl = Divex of nat * nat val eucl_dev : nat -> nat -> diveucl = # eucl_dev (S (S O)) (S (S (S (S (S O)))));; - : diveucl = Divex (S (S O), S O) It is easier to test on OCaml integers:: # let rec nat_of_int = function 0 -> O | n -> S (nat_of_int (n-1));; val nat_of_int : int -> nat = # let rec int_of_nat = function O -> 0 | S p -> 1+(int_of_nat p);; val int_of_nat : nat -> int = # let div a b = let Divex (q,r) = eucl_dev (nat_of_int b) (nat_of_int a) in (int_of_nat q, int_of_nat r);; val div : int -> int -> int * int = # div 173 15;; - : int * int = (11, 8) Note that these ``nat_of_int`` and ``int_of_nat`` are now available via a mere ``Require Import ExtrOcamlIntConv`` and then adding these functions to the list of functions to extract. This file ``ExtrOcamlIntConv.v`` and some others in ``plugins/extraction/`` are meant to help building concrete program via extraction. Extraction's horror museum ~~~~~~~~~~~~~~~~~~~~~~~~~~ Some pathological examples of extraction are grouped in the file ``test-suite/success/extraction.v`` of the sources of Coq. Users' Contributions ~~~~~~~~~~~~~~~~~~~~ Several of the Coq Users' Contributions use extraction to produce certified programs. In particular the following ones have an automatic extraction test: * ``additions`` : https://github.com/coq-contribs/additions * ``bdds`` : https://github.com/coq-contribs/bdds * ``canon-bdds`` : https://github.com/coq-contribs/canon-bdds * ``chinese`` : https://github.com/coq-contribs/chinese * ``continuations`` : https://github.com/coq-contribs/continuations * ``coq-in-coq`` : https://github.com/coq-contribs/coq-in-coq * ``exceptions`` : https://github.com/coq-contribs/exceptions * ``firing-squad`` : https://github.com/coq-contribs/firing-squad * ``founify`` : https://github.com/coq-contribs/founify * ``graphs`` : https://github.com/coq-contribs/graphs * ``higman-cf`` : https://github.com/coq-contribs/higman-cf * ``higman-nw`` : https://github.com/coq-contribs/higman-nw * ``hardware`` : https://github.com/coq-contribs/hardware * ``multiplier`` : https://github.com/coq-contribs/multiplier * ``search-trees`` : https://github.com/coq-contribs/search-trees * ``stalmarck`` : https://github.com/coq-contribs/stalmarck Note that ``continuations`` and ``multiplier`` are a bit particular. They are examples of developments where ``Obj.magic`` is needed. This is probably due to a heavy use of impredicativity. After compilation, those two examples run nonetheless, thanks to the correction of the extraction :cite:`Let02`. coq-8.15.0/doc/sphinx/addendum/generalized-rewriting.rst000066400000000000000000001140311417001151100232500ustar00rootroot00000000000000.. _generalizedrewriting: Generalized rewriting ===================== :Author: Matthieu Sozeau This chapter presents the extension of several equality related tactics to work over user-defined structures (called setoids) that are equipped with ad-hoc equivalence relations meant to behave as equalities. Actually, the tactics have also been generalized to relations weaker than equivalences (e.g. rewriting systems). The toolbox also extends the automatic rewriting capabilities of the system, allowing the specification of custom strategies for rewriting. This documentation is adapted from the previous setoid documentation by Claudio Sacerdoti Coen (based on previous work by Clément Renard). The new implementation is a drop-in replacement for the old one [#tabareau]_, hence most of the documentation still applies. The work is a complete rewrite of the previous implementation, based on the typeclass infrastructure. It also improves on and generalizes the previous implementation in several ways: + User-extensible algorithm. The algorithm is separated into two parts: generation of the rewriting constraints (written in ML) and solving these constraints using typeclass resolution. As typeclass resolution is extensible using tactics, this allows users to define general ways to solve morphism constraints. + Subrelations. An example extension to the base algorithm is the ability to define one relation as a subrelation of another so that morphism declarations on one relation can be used automatically for the other. This is done purely using tactics and typeclass search. + Rewriting under binders. It is possible to rewrite under binders in the new implementation, if one provides the proper morphisms. Again, most of the work is handled in the tactics. + First-class morphisms and signatures. Signatures and morphisms are ordinary Coq terms, hence they can be manipulated inside Coq, put inside structures and lemmas about them can be proved inside the system. Higher-order morphisms are also allowed. + Performance. The implementation is based on a depth-first search for the first solution to a set of constraints which can be as fast as linear in the size of the term, and the size of the proof term is linear in the size of the original term. Besides, the extensibility allows the user to customize the proof search if necessary. .. [#tabareau] Nicolas Tabareau helped with the gluing. Introduction to generalized rewriting ------------------------------------- Relations and morphisms ~~~~~~~~~~~~~~~~~~~~~~~ A parametric *relation* ``R`` is any term of type ``forall (x1 : T1) ... (xn : Tn), relation A``. The expression ``A``, which depends on ``x1 ... xn`` , is called the *carrier* of the relation and ``R`` is said to be a relation over ``A``; the list ``x1,...,xn`` is the (possibly empty) list of parameters of the relation. .. example:: Parametric relation It is possible to implement finite sets of elements of type ``A`` as unordered lists of elements of type ``A``. The function ``set_eq: forall (A : Type), relation (list A)`` satisfied by two lists with the same elements is a parametric relation over ``(list A)`` with one parameter ``A``. The type of ``set_eq`` is convertible with ``forall (A : Type), list A -> list A -> Prop.`` An *instance* of a parametric relation ``R`` with n parameters is any term ``(R t1 ... tn)``. Let ``R`` be a relation over ``A`` with ``n`` parameters. A term is a parametric proof of reflexivity for ``R`` if it has type ``forall (x1 : T1) ... (xn : Tn), reflexive (R x1 ... xn)``. Similar definitions are given for parametric proofs of symmetry and transitivity. .. example:: Parametric relation (continued) The ``set_eq`` relation of the previous example can be proved to be reflexive, symmetric and transitive. A parametric unary function ``f`` of type ``forall (x1 : T1) ... (xn : Tn), A1 -> A2`` covariantly respects two parametric relation instances ``R1`` and ``R2`` if, whenever ``x``, ``y`` satisfy ``R1 x y``, their images (``f x``) and (``f y``) satisfy ``R2 (f x) (f y)``. An ``f`` that respects its input and output relations will be called a unary covariant *morphism*. We can also say that ``f`` is a monotone function with respect to ``R1`` and ``R2`` . The sequence ``x1 ... xn`` represents the parameters of the morphism. Let ``R1`` and ``R2`` be two parametric relations. The *signature* of a parametric morphism of type ``forall (x1 : T1) ... (xn : Tn), A1 -> A2`` that covariantly respects two instances :math:`I_{R_1}` and :math:`I_{R_2}` of ``R1`` and ``R2`` is written :math:`I_{R_1} ++> I_{R_2}`. Notice that the special arrow ++>, which reminds the reader of covariance, is placed between the two relation instances, not between the two carriers. The signature relation instances and morphism will be typed in a context introducing variables for the parameters. The previous definitions are extended straightforwardly to n-ary morphisms, that are required to be simultaneously monotone on every argument. Morphisms can also be contravariant in one or more of their arguments. A morphism is contravariant on an argument associated with the relation instance :math:`R` if it is covariant on the same argument when the inverse relation :math:`R^{−1}` (``inverse R`` in Coq) is considered. The special arrow ``-->`` is used in signatures for contravariant morphisms. Functions having arguments related by symmetric relations instances are both covariant and contravariant in those arguments. The special arrow ``==>`` is used in signatures for morphisms that are both covariant and contravariant. An instance of a parametric morphism :math:`f` with :math:`n` parameters is any term :math:`f \, t_1 \ldots t_n`. .. example:: Morphisms Continuing the previous example, let ``union: forall (A : Type), list A -> list A -> list A`` perform the union of two sets by appending one list to the other. ``union`` is a binary morphism parametric over ``A`` that respects the relation instance ``(set_eq A)``. The latter condition is proved by showing: .. coqdoc:: forall (A: Type) (S1 S1' S2 S2': list A), set_eq A S1 S1' -> set_eq A S2 S2' -> set_eq A (union A S1 S2) (union A S1' S2'). The signature of the function ``union A`` is ``set_eq A ==> set_eq A ==> set_eq A`` for all ``A``. .. example:: Contravariant morphisms The division function ``Rdiv : R -> R -> R`` is a morphism of signature ``le ++> le --> le`` where ``le`` is the usual order relation over real numbers. Notice that division is covariant in its first argument and contravariant in its second argument. Leibniz equality is a relation and every function is a morphism that respects Leibniz equality. Unfortunately, Leibniz equality is not always the intended equality for a given structure. In the next section we will describe the commands to register terms as parametric relations and morphisms. Several tactics that deal with equality in Coq can also work with the registered relations. The exact list of tactics will be given :ref:`in this section `. For instance, the tactic reflexivity can be used to solve a goal ``R n n`` whenever ``R`` is an instance of a registered reflexive relation. However, the tactics that replace in a context ``C[]`` one term with another one related by ``R`` must verify that ``C[]`` is a morphism that respects the intended relation. Currently the verification consists of checking whether ``C[]`` is a syntactic composition of morphism instances that respects some obvious compatibility constraints. .. example:: Rewriting Continuing the previous examples, suppose that the user must prove ``set_eq int (union int (union int S1 S2) S2) (f S1 S2)`` under the hypothesis ``H : set_eq int S2 (@nil int)``. It is possible to use the ``rewrite`` tactic to replace the first two occurrences of ``S2`` with ``@nil int`` in the goal since the context ``set_eq int (union int (union int S1 nil) nil) (f S1 S2)``, being a composition of morphisms instances, is a morphism. However the tactic will fail replacing the third occurrence of ``S2`` unless ``f`` has also been declared as a morphism. Adding new relations and morphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Add Parametric Relation {* @binder } : @one_term__A @one_term__Aeq {? reflexivity proved by @one_term } {? symmetry proved by @one_term } {? transitivity proved by @one_term } as @ident Declares a parametric relation of :n:`@one_term__A`, which is a `Type`, say `T`, with :n:`@one_term__Aeq`, which is a relation on `T`, i.e. of type `(T -> T -> Prop)`. Thus, if :n:`@one_term__A` is :n:`A: forall α__1 … α__n, Type` then :n:`@one_term__Aeq` is :n:`Aeq: forall α__1 … α__n, (A α__1 … α__n) -> (A α__1 … α__n) -> Prop`, or equivalently, :n:`Aeq: forall α__1 … α__n, relation (A α__1 … α__n)`. :n:`@one_term__A` and :n:`@one_term__Aeq` must be typeable under the context :token:`binder`\s. In practice, the :token:`binder`\s usually correspond to the :n:`α`\s The final :token:`ident` gives a unique name to the morphism and it is used by the command to generate fresh names for automatically provided lemmas used internally. Notice that the carrier and relation parameters may refer to the context of variables introduced at the beginning of the declaration, but the instances need not be made only of variables. Also notice that ``A`` is *not* required to be a term having the same parameters as ``Aeq``, although that is often the case in practice (this departs from the previous implementation). To use this command, you need to first import the module ``Setoid`` using the command ``Require Import Setoid``. .. cmd:: Add Relation @one_term @one_term {? reflexivity proved by @one_term } {? symmetry proved by @one_term } {? transitivity proved by @one_term } as @ident If the carrier and relations are not parametric, use this command instead, whose syntax is the same except there is no local context. The proofs of reflexivity, symmetry and transitivity can be omitted if the relation is not an equivalence relation. The proofs must be instances of the corresponding relation definitions: e.g. the proof of reflexivity must have a type convertible to :g:`reflexive (A t1 … tn) (Aeq t′ 1 … t′ n)`. Each proof may refer to the introduced variables as well. .. example:: Parametric relation For Leibniz equality, we may declare: .. coqdoc:: Add Parametric Relation (A : Type) : A (@eq A) [reflexivity proved by @refl_equal A] ... Some tactics (:tacn:`reflexivity`, :tacn:`symmetry`, :tacn:`transitivity`) work only on relations that respect the expected properties. The remaining tactics (:tacn:`replace`, :tacn:`rewrite` and derived tactics such as :tacn:`autorewrite`) do not require any properties over the relation. However, they are able to replace terms with related ones only in contexts that are syntactic compositions of parametric morphism instances declared with the following command. .. cmd:: Add Parametric Morphism {* @binder } : @one_term with signature @term as @ident Declares a parametric morphism :n:`@one_term` of signature :n:`@term`. The final identifier :token:`ident` gives a unique name to the morphism and it is used as the base name of the typeclass instance definition and as the name of the lemma that proves the well-definedness of the morphism. The parameters of the morphism as well as the signature may refer to the context of variables. The command asks the user to prove interactively that the function denoted by the first :token:`ident` respects the relations identified from the signature. .. example:: We start the example by assuming a small theory over homogeneous sets and we declare set equality as a parametric equivalence relation and union of two sets as a parametric morphism. .. coqtop:: in Require Export Setoid. Require Export Relation_Definitions. Set Implicit Arguments. Parameter set : Type -> Type. Parameter empty : forall A, set A. Parameter eq_set : forall A, set A -> set A -> Prop. Parameter union : forall A, set A -> set A -> set A. Axiom eq_set_refl : forall A, reflexive _ (eq_set (A:=A)). Axiom eq_set_sym : forall A, symmetric _ (eq_set (A:=A)). Axiom eq_set_trans : forall A, transitive _ (eq_set (A:=A)). Axiom empty_neutral : forall A (S : set A), eq_set (union S (empty A)) S. Axiom union_compat : forall (A : Type), forall x x' : set A, eq_set x x' -> forall y y' : set A, eq_set y y' -> eq_set (union x y) (union x' y'). Add Parametric Relation A : (set A) (@eq_set A) reflexivity proved by (eq_set_refl (A:=A)) symmetry proved by (eq_set_sym (A:=A)) transitivity proved by (eq_set_trans (A:=A)) as eq_set_rel. Add Parametric Morphism A : (@union A) with signature (@eq_set A) ==> (@eq_set A) ==> (@eq_set A) as union_mor. Proof. exact (@union_compat A). Qed. It is possible to reduce the burden of specifying parameters using (maximally inserted) implicit arguments. If ``A`` is always set as maximally implicit in the previous example, one can write: .. coqdoc:: Add Parametric Relation A : (set A) eq_set reflexivity proved by eq_set_refl symmetry proved by eq_set_sym transitivity proved by eq_set_trans as eq_set_rel. Add Parametric Morphism A : (@union A) with signature eq_set ==> eq_set ==> eq_set as union_mor. Proof. exact (@union_compat A). Qed. We proceed now by proving a simple lemma performing a rewrite step and then applying reflexivity, as we would do working with Leibniz equality. Both tactic applications are accepted since the required properties over ``eq_set`` and ``union`` can be established from the two declarations above. .. coqtop:: in Goal forall (S : set nat), eq_set (union (union S (empty nat)) S) (union S S). .. coqtop:: in Proof. intros. rewrite empty_neutral. reflexivity. Qed. The tables of relations and morphisms are managed by the typeclass instance mechanism. The behavior on section close is to generalize the instances by the variables of the section (and possibly hypotheses used in the proofs of instance declarations) but not to export them in the rest of the development for proof search. One can use the cmd:`Existing Instance` command to do so outside the section, using the name of the declared morphism suffixed by ``_Morphism``, or use the ``Global`` modifier for the corresponding class instance declaration (see :ref:`First Class Setoids and Morphisms `) at definition time. When loading a compiled file or importing a module, all the declarations of this module will be loaded. Rewriting and nonreflexive relations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To replace only one argument of an n-ary morphism it is necessary to prove that all the other arguments are related to themselves by the respective relation instances. .. example:: To replace ``(union S empty)`` with ``S`` in ``(union (union S empty) S) (union S S)`` the rewrite tactic must exploit the monotony of ``union`` (axiom ``union_compat`` in the previous example). Applying ``union_compat`` by hand we are left with the goal ``eq_set (union S S) (union S S)``. When the relations associated with some arguments are not reflexive, the tactic cannot automatically prove the reflexivity goals, that are left to the user. Setoids whose relations are partial equivalence relations (PER) are useful for dealing with partial functions. Let ``R`` be a PER. We say that an element ``x`` is defined if ``R x x``. A partial function whose domain comprises all the defined elements is declared as a morphism that respects ``R``. Every time a rewriting step is performed the user must prove that the argument of the morphism is defined. .. example:: Let ``eqO`` be ``fun x y => x = y /\ x <> 0`` (the smallest PER over nonzero elements). Division can be declared as a morphism of signature ``eq ==> eq0 ==> eq``. Replacing ``x`` with ``y`` in ``div x n = div y n`` opens an additional goal ``eq0 n n`` which is equivalent to ``n = n /\ n <> 0``. Rewriting and nonsymmetric relations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When the user works up to relations that are not symmetric, it is no longer the case that any covariant morphism argument is also contravariant. As a result it is no longer possible to replace a term with a related one in every context, since the obtained goal implies the previous one if and only if the replacement has been performed in a contravariant position. In a similar way, replacement in an hypothesis can be performed only if the replaced term occurs in a covariant position. .. example:: Covariance and contravariance Suppose that division over real numbers has been defined as a morphism of signature ``Z.div : Z.lt ++> Z.lt --> Z.lt`` (i.e. ``Z.div`` is increasing in its first argument, but decreasing on the second one). Let ``<`` denote ``Z.lt``. Under the hypothesis ``H : x < y`` we have ``k < x / y -> k < x / x``, but not ``k < y / x -> k < x / x``. Dually, under the same hypothesis ``k < x / y -> k < y / y`` holds, but ``k < y / x -> k < y / y`` does not. Thus, if the current goal is ``k < x / x``, it is possible to replace only the second occurrence of ``x`` (in contravariant position) with ``y`` since the obtained goal must imply the current one. On the contrary, if ``k < x / x`` is an hypothesis, it is possible to replace only the first occurrence of ``x`` (in covariant position) with ``y`` since the current hypothesis must imply the obtained one. Contrary to the previous implementation, no specific error message will be raised when trying to replace a term that occurs in the wrong position. It will only fail because the rewriting constraints are not satisfiable. However it is possible to use the at modifier to specify which occurrences should be rewritten. As expected, composing morphisms together propagates the variance annotations by switching the variance every time a contravariant position is traversed. .. example:: Let us continue the previous example and let us consider the goal ``x / (x / x) < k``. The first and third occurrences of ``x`` are in a contravariant position, while the second one is in covariant position. More in detail, the second occurrence of ``x`` occurs covariantly in ``(x / x)`` (since division is covariant in its first argument), and thus contravariantly in ``x / (x / x)`` (since division is contravariant in its second argument), and finally covariantly in ``x / (x / x) < k`` (since ``<``, as every transitive relation, is contravariant in its first argument with respect to the relation itself). Rewriting in ambiguous setoid contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ One function can respect several different relations and thus it can be declared as a morphism having multiple signatures. .. example:: Union over homogeneous lists can be given all the following signatures: ``eq ==> eq ==> eq`` (``eq`` being the equality over ordered lists) ``set_eq ==> set_eq ==> set_eq`` (``set_eq`` being the equality over unordered lists up to duplicates), ``multiset_eq ==> multiset_eq ==> multiset_eq`` (``multiset_eq`` being the equality over unordered lists). To declare multiple signatures for a morphism, repeat the :cmd:`Add Morphism` command. When morphisms have multiple signatures it can be the case that a rewrite request is ambiguous, since it is unclear what relations should be used to perform the rewriting. Contrary to the previous implementation, the tactic will always choose the first possible solution to the set of constraints generated by a rewrite and will not try to find *all* the possible solutions to warn the user about them. Commands and tactics -------------------- .. _first-class-setoids-and-morphisms: First class setoids and morphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The implementation is based on a first-class representation of properties of relations and morphisms as typeclasses. That is, the various combinations of properties on relations and morphisms are represented as records and instances of these classes are put in a hint database. For example, the declaration: .. coqdoc:: Add Parametric Relation (x1 : T1) ... (xn : Tn) : (A t1 ... tn) (Aeq t′1 ... t′m) [reflexivity proved by refl] [symmetry proved by sym] [transitivity proved by trans] as id. is equivalent to an instance declaration: .. coqdoc:: Instance (x1 : T1) ... (xn : Tn) => id : @Equivalence (A t1 ... tn) (Aeq t′1 ... t′m) := [Equivalence_Reflexive := refl] [Equivalence_Symmetric := sym] [Equivalence_Transitive := trans]. The declaration itself amounts to the definition of an object of the record type ``Coq.Classes.RelationClasses.Equivalence`` and a hint added to the ``typeclass_instances`` hint database. Morphism declarations are also instances of a typeclass defined in ``Classes.Morphisms``. See the documentation on :ref:`typeclasses` and the theories files in Classes for further explanations. One can inform the rewrite tactic about morphisms and relations just by using the typeclass mechanism to declare them using the :cmd:`Instance` and :cmd:`Context` commands. Any object of type Proper (the type of morphism declarations) in the local context will also be automatically used by the rewriting tactic to solve constraints. Other representations of first class setoids and morphisms can also be handled by encoding them as records. In the following example, the projections of the setoid relation and of the morphism function can be registered as parametric relations and morphisms. .. example:: First class setoids .. coqtop:: in reset Require Import Relation_Definitions Setoid. Record Setoid : Type := { car: Type; eq: car -> car -> Prop; refl: reflexive _ eq; sym: symmetric _ eq; trans: transitive _ eq }. Add Parametric Relation (s : Setoid) : (@car s) (@eq s) reflexivity proved by (refl s) symmetry proved by (sym s) transitivity proved by (trans s) as eq_rel. Record Morphism (S1 S2 : Setoid) : Type := { f: car S1 -> car S2; compat: forall (x1 x2 : car S1), eq S1 x1 x2 -> eq S2 (f x1) (f x2) }. Add Parametric Morphism (S1 S2 : Setoid) (M : Morphism S1 S2) : (@f S1 S2 M) with signature (@eq S1 ==> @eq S2) as apply_mor. Proof. apply (compat S1 S2 M). Qed. Lemma test : forall (S1 S2 : Setoid) (m : Morphism S1 S2) (x y : car S1), eq S1 x y -> eq S2 (f _ _ m x) (f _ _ m y). Proof. intros. rewrite H. reflexivity. Qed. .. _tactics-enabled-on-user-provided-relations: Tactics enabled on user provided relations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following tactics, all prefixed by ``setoid_``, deal with arbitrary registered relations and morphisms. Moreover, all the corresponding unprefixed tactics (i.e. :tacn:`reflexivity`, :tacn:`symmetry`, :tacn:`transitivity`, :tacn:`replace`, :tacn:`rewrite`) have been extended to fall back to their prefixed counterparts when the relation involved is not Leibniz equality. Notice, however, that using the prefixed tactics it is possible to pass additional arguments such as ``using relation``. .. tacn:: setoid_reflexivity setoid_symmetry {? in @ident } setoid_transitivity @one_term setoid_rewrite {? {| -> | <- } } @one_term {? with @bindings } {? at @rewrite_occs } {? in @ident } setoid_rewrite {? {| -> | <- } } @one_term {? with @bindings } in @ident at @rewrite_occs setoid_replace @one_term with @one_term {? using relation @one_term } {? in @ident } {? at {+ @int_or_var } } {? by @ltac_expr3 } :name: setoid_reflexivity; setoid_symmetry; setoid_transitivity; setoid_rewrite; _; setoid_replace .. todo: move rewrite_occs to rewrite chapter when that chapter is revised .. insertprodn rewrite_occs rewrite_occs .. prodn:: rewrite_occs ::= {+ @integer } | @ident The ``using relation`` arguments cannot be passed to the unprefixed form. The latter argument tells the tactic what parametric relation should be used to replace the first tactic argument with the second one. If omitted, it defaults to the ``DefaultRelation`` instance on the type of the objects. By default, it means the most recent ``Equivalence`` instance in the global environment, but it can be customized by declaring new ``DefaultRelation`` instances. As Leibniz equality is a declared equivalence, it will fall back to it if no other relation is declared on a given type. Every derived tactic that is based on the unprefixed forms of the tactics considered above will also work up to user defined relations. For instance, it is possible to register hints for :tacn:`autorewrite` that are not proofs of Leibniz equalities. In particular it is possible to exploit :tacn:`autorewrite` to simulate normalization in a term rewriting system up to user defined equalities. Printing relations and morphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Use the :cmd:`Print Instances` command with the class names ``Reflexive``, ``Symmetric`` or ``Transitive`` to print registered reflexive, symmetric or transitive relations and with the class name ``Proper`` to print morphisms. When rewriting tactics refuse to replace a term in a context because the latter is not a composition of morphisms, this command can be useful to understand what additional morphisms should be registered. .. _deprecated_syntax_for_generalized_rewriting: Deprecated syntax and backward incompatibilities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Add Setoid @one_term__carrier @one_term__congruence @one_term__proofs as @ident This command for declaring setoids and morphisms is also accepted due to backward compatibility reasons. Here :n:`@one_term__congruence` is a congruence relation without parameters, :n:`@one_term__carrier` is its carrier and :n:`@one_term__proofs` is an object of type (:n:`Setoid_Theory @one_term__carrier @one_term__congruence`) (i.e. a record packing together the reflexivity, symmetry and transitivity lemmas). Notice that the syntax is not completely backward compatible since the identifier was not required. .. cmd:: Add Morphism @one_term : @ident Add Morphism @one_term with signature @term as @ident :name: Add Morphism; _ This command is restricted to the declaration of morphisms without parameters. It is not fully backward compatible since the property the user is asked to prove is slightly different: for n-ary morphisms the hypotheses of the property are permuted; moreover, when the morphism returns a proposition, the property is now stated using a bi-implication in place of a simple implication. In practice, porting an old development to the new semantics is usually quite simple. .. cmd:: Declare Morphism @one_term : @ident Declares a parameter in a module type that is a morphism. Notice that several limitations of the old implementation have been lifted. In particular, it is now possible to declare several relations with the same carrier and several signatures for the same morphism. Moreover, it is now also possible to declare several morphisms having the same signature. Finally, the :tacn:`replace` and :tacn:`rewrite` tactics can be used to replace terms in contexts that were refused by the old implementation. As discussed in the next section, the semantics of the new :tacn:`setoid_rewrite` tactic differs slightly from the old one and :tacn:`rewrite`. Extensions ---------- Rewriting under binders ~~~~~~~~~~~~~~~~~~~~~~~ .. warning:: Due to compatibility issues, this feature is enabled only when calling the :tacn:`setoid_rewrite` tactic directly and not :tacn:`rewrite`. To be able to rewrite under binding constructs, one must declare morphisms with respect to pointwise (setoid) equivalence of functions. Example of such morphisms are the standard ``all`` and ``ex`` combinators for universal and existential quantification respectively. They are declared as morphisms in the ``Classes.Morphisms_Prop`` module. For example, to declare that universal quantification is a morphism for logical equivalence: .. coqtop:: none Require Import Morphisms. Set Warnings "-deprecated-instance-without-locality". .. coqtop:: in Instance all_iff_morphism (A : Type) : Proper (pointwise_relation A iff ==> iff) (@all A). .. coqtop:: all abort Proof. simpl_relation. One then has to show that if two predicates are equivalent at every point, their universal quantifications are equivalent. Once we have declared such a morphism, it will be used by the setoid rewriting tactic each time we try to rewrite under an ``all`` application (products in ``Prop`` are implicitly translated to such applications). Indeed, when rewriting under a lambda, binding variable ``x``, say from ``P x`` to ``Q x`` using the relation iff, the tactic will generate a proof of ``pointwise_relation A iff (fun x => P x) (fun x => Q x)`` from the proof of ``iff (P x) (Q x)`` and a constraint of the form ``Proper (pointwise_relation A iff ==> ?) m`` will be generated for the surrounding morphism ``m``. Hence, one can add higher-order combinators as morphisms by providing signatures using pointwise extension for the relations on the functional arguments (or whatever subrelation of the pointwise extension). For example, one could declare the ``map`` combinator on lists as a morphism: .. coqdoc:: Instance map_morphism `{Equivalence A eqA, Equivalence B eqB} : Proper ((eqA ==> eqB) ==> list_equiv eqA ==> list_equiv eqB) (@map A B). where ``list_equiv`` implements an equivalence on lists parameterized by an equivalence on the elements. Note that when one does rewriting with a lemma under a binder using :tacn:`setoid_rewrite`, the application of the lemma may capture the bound variable, as the semantics are different from rewrite where the lemma is first matched on the whole term. With the new :tacn:`setoid_rewrite`, matching is done on each subterm separately and in its local context, and all matches are rewritten *simultaneously* by default. The semantics of the previous :tacn:`setoid_rewrite` implementation can almost be recovered using the ``at 1`` modifier. Subrelations ~~~~~~~~~~~~~ Subrelations can be used to specify that one relation is included in another, so that morphism signatures for one can be used for the other. If a signature mentions a relation ``R`` on the left of an arrow ``==>``, then the signature also applies for any relation ``S`` that is smaller than ``R``, and the inverse applies on the right of an arrow. One can then declare only a few morphisms instances that generate the complete set of signatures for a particular :term:`constant`. By default, the only declared subrelation is ``iff``, which is a subrelation of ``impl`` and ``inverse impl`` (the dual of implication). That’s why we can declare only two morphisms for conjunction: ``Proper (impl ==> impl ==> impl) and`` and ``Proper (iff ==> iff ==> iff) and``. This is sufficient to satisfy any rewriting constraints arising from a rewrite using ``iff``, ``impl`` or ``inverse impl`` through ``and``. Subrelations are implemented in ``Classes.Morphisms`` and are a prime example of a mostly user-space extension of the algorithm. Constant unfolding ~~~~~~~~~~~~~~~~~~ The resolution tactic is based on typeclasses and hence regards user-defined :term:`constants ` as transparent by default. This may slow down the resolution due to a lot of unifications (all the declared ``Proper`` instances are tried at each node of the search tree). To speed it up, declare your constant as rigid for proof search using the command :cmd:`Typeclasses Opaque`. .. _strategies4rewriting: Strategies for rewriting ------------------------ Usage ~~~~~ .. tacn:: rewrite_strat @rewstrategy {? in @ident } :name: rewrite_strat Rewrite using :n:`@rewstrategy` in the conclusion or in the hypothesis :n:`@ident`. .. exn:: Nothing to rewrite. The strategy didn't find any matches. .. exn:: No progress made. If the strategy succeeded but made no progress. .. exn:: Unable to satisfy the rewriting constraints. If the strategy succeeded and made progress but the corresponding rewriting constraints are not satisfied. :tacn:`setoid_rewrite` :n:`@one_term` is basically equivalent to :n:`rewrite_strat outermost @one_term`. Definitions ~~~~~~~~~~~ The generalized rewriting tactic is based on a set of strategies that can be combined to create custom rewriting procedures. Its set of strategies is based on the programmable rewriting strategies with generic traversals by Visser et al. :cite:`Luttik97specificationof` :cite:`Visser98`, which formed the core of the Stratego transformation language :cite:`Visser01`. Rewriting strategies are applied using the tactic :n:`rewrite_strat @rewstrategy`. .. insertprodn rewstrategy rewstrategy .. prodn:: rewstrategy ::= @one_term | <- @one_term | fail | id | refl | progress @rewstrategy | try @rewstrategy | @rewstrategy ; @rewstrategy | choice {+ @rewstrategy } | repeat @rewstrategy | any @rewstrategy | subterm @rewstrategy | subterms @rewstrategy | innermost @rewstrategy | outermost @rewstrategy | bottomup @rewstrategy | topdown @rewstrategy | hints @ident | terms {* @one_term } | eval @red_expr | fold @one_term | ( @rewstrategy ) | old_hints @ident :n:`@one_term` lemma, left to right :n:`<- @one_term` lemma, right to left :n:`fail` failure :n:`id` identity :n:`refl` reflexivity :n:`progress @rewstrategy` progress :n:`try @rewstrategy` try catch :n:`@rewstrategy ; @rewstrategy` composition :n:`choice {+ @rewstrategy }` first successful strategy :n:`repeat @rewstrategy` one or more :n:`any @rewstrategy` zero or more :n:`subterm @rewstrategy` one subterm :n:`subterms @rewstrategy` all subterms :n:`innermost @rewstrategy` Innermost first. When there are multiple nested matches in a subterm, the innermost subterm is rewritten. For :ref:`example `, rewriting :n:`(a + b) + c` with Nat.add_comm gives :n:`(b + a) + c`. :n:`outermost @rewstrategy` Outermost first. When there are multiple nested matches in a subterm, the outermost subterm is rewritten. For :ref:`example `, rewriting :n:`(a + b) + c` with Nat.add_comm gives :n:`c + (a + b)`. :n:`bottomup @rewstrategy` bottom-up :n:`topdown @rewstrategy` top-down :n:`hints @ident` apply hints from hint database :n:`terms {* @one_term }` any of the terms :n:`eval @red_expr` apply reduction :n:`fold @term` unify :n:`( @rewstrategy )` to be documented :n:`old_hints @ident` to be documented Conceptually, a few of these are defined in terms of the others using a primitive fixpoint operator `fix`, which the tactic doesn't currently support: - :n:`try @rewstrategy := choice @rewstrategy id` - :n:`any @rewstrategy := fix @ident. try (@rewstrategy ; @ident)` - :n:`repeat @rewstrategy := @rewstrategy; any @rewstrategy` - :n:`bottomup @rewstrategy := fix @ident. (choice (progress (subterms @ident)) @rewstrategy) ; try @ident` - :n:`topdown @rewstrategy := fix @ident. (choice @rewstrategy (progress (subterms @ident))) ; try @ident` - :n:`innermost @rewstrategy := fix @ident. (choice (subterm @ident) @rewstrategy)` - :n:`outermost @rewstrategy := fix @ident. (choice @rewstrategy (subterm @ident))` The basic control strategy semantics are straightforward: strategies are applied to subterms of the term to rewrite, starting from the root of the term. The lemma strategies unify the left-hand-side of the lemma with the current subterm and on success rewrite it to the right- hand-side. Composition can be used to continue rewriting on the current subterm. The ``fail`` strategy always fails while the identity strategy succeeds without making progress. The reflexivity strategy succeeds, making progress using a reflexivity proof of rewriting. ``progress`` tests progress of the argument :n:`@rewstrategy` and fails if no progress was made, while ``try`` always succeeds, catching failures. ``choice`` uses the first successful strategy in the list of @rewstrategy. One can iterate a strategy at least 1 time using ``repeat`` and at least 0 times using ``any``. The ``subterm`` and ``subterms`` strategies apply their argument :n:`@rewstrategy` to respectively one or all subterms of the current term under consideration, left-to-right. ``subterm`` stops at the first subterm for which :n:`@rewstrategy` made progress. The composite strategies ``innermost`` and ``outermost`` perform a single innermost or outermost rewrite using their argument :n:`@rewstrategy`. Their counterparts ``bottomup`` and ``topdown`` perform as many rewritings as possible, starting from the bottom or the top of the term. Hint databases created for :tacn:`autorewrite` can also be used by :tacn:`rewrite_strat` using the ``hints`` strategy that applies any of the lemmas at the current subterm. The ``terms`` strategy takes the lemma names directly as arguments. The ``eval`` strategy expects a reduction expression (see :ref:`applyingconversionrules`) and succeeds if it reduces the subterm under consideration. The ``fold`` strategy takes a :token:`term` and tries to *unify* it to the current subterm, converting it to :token:`term` on success. It is stronger than the tactic ``fold``. .. _rewrite_strat_innermost_outermost: .. example:: :n:`innermost` and :n:`outermost` The type of `Nat.add_comm` is `forall n m : nat, n + m = m + n`. .. coqtop:: all Require Import Coq.Arith.Arith. Set Printing Parentheses. Goal forall a b c: nat, a + b + c = 0. rewrite_strat innermost Nat.add_comm. .. coqtop:: none Abort. Goal forall a b c: nat, a + b + c = 0. Using :n:`outermost` instead gives this result: .. coqtop:: all rewrite_strat outermost Nat.add_comm. .. coqtop:: none Abort. coq-8.15.0/doc/sphinx/addendum/implicit-coercions.rst000066400000000000000000000341211417001151100225440ustar00rootroot00000000000000.. _coercions: Implicit Coercions ==================== :Author: Amokrane Saïbi General Presentation --------------------- This section describes the inheritance mechanism of Coq. In Coq with inheritance, we are not interested in adding any expressive power to our theory, but only convenience. Given a term, possibly not typable, we are interested in the problem of determining if it can be well typed modulo insertion of appropriate coercions. We allow to write: * :g:`f a` where :g:`f:(forall x:A,B)` and :g:`a:A'` when ``A'`` can be seen in some sense as a subtype of ``A``. * :g:`x:A` when ``A`` is not a type, but can be seen in a certain sense as a type: set, group, category etc. * :g:`f a` when ``f`` is not a function, but can be seen in a certain sense as a function: bijection, functor, any structure morphism etc. Classes ------- A class with :math:`n` parameters is any defined name with a type :n:`forall (@ident__1 : @type__1)..(@ident__n:@type__n), @sort`. Thus a class with parameters is considered as a single class and not as a family of classes. An object of a class is any term of type :n:`@class @term__1 .. @term__n`. In addition to these user-defined classes, we have two built-in classes: * ``Sortclass``, the class of sorts; its objects are the terms whose type is a sort (e.g. :g:`Prop` or :g:`Type`). * ``Funclass``, the class of functions; its objects are all the terms with a functional type, i.e. of form :g:`forall x:A,B`. Formally, the syntax of classes is defined as: .. insertprodn class class .. prodn:: class ::= Funclass | Sortclass | @reference Coercions --------- A name ``f`` can be declared as a coercion between a source user-defined class ``C`` with :math:`n` parameters and a target class ``D`` if one of these conditions holds: * ``D`` is a user-defined class, then the type of ``f`` must have the form :g:`forall (x₁:A₁)..(xₙ:Aₙ)(y:C x₁..xₙ), D u₁..uₘ` where :math:`m` is the number of parameters of ``D``. * ``D`` is ``Funclass``, then the type of ``f`` must have the form :g:`forall (x₁:A₁)..(xₙ:Aₙ)(y:C x₁..xₙ)(x:A), B`. * ``D`` is ``Sortclass``, then the type of ``f`` must have the form :g:`forall (x₁:A₁)..(xₙ:Aₙ)(y:C x₁..xₙ), s` with ``s`` a sort. We then write :g:`f : C >-> D`. The restriction on the type of coercions is called *the uniform inheritance condition*. .. note:: The built-in class ``Sortclass`` can be used as a source class, but the built-in class ``Funclass`` cannot. To coerce an object :g:`t:C t₁..tₙ` of ``C`` towards ``D``, we have to apply the coercion ``f`` to it; the obtained term :g:`f t₁..tₙ t` is then an object of ``D``. Identity Coercions ------------------- Identity coercions are special cases of coercions used to go around the uniform inheritance condition. Let ``C`` and ``D`` be two classes with respectively `n` and `m` parameters and :g:`f:forall (x₁:T₁)..(xₖ:Tₖ)(y:C u₁..uₙ), D v₁..vₘ` a function which does not verify the uniform inheritance condition. To declare ``f`` as coercion, one has first to declare a subclass ``C'`` of ``C``: :g:`C' := fun (x₁:T₁)..(xₖ:Tₖ) => C u₁..uₙ` We then define an *identity coercion* between ``C'`` and ``C``: :g:`Id_C'_C := fun (x₁:T₁)..(xₖ:Tₖ)(y:C' x₁..xₖ) => (y:C u₁..uₙ)` We can now declare ``f`` as coercion from ``C'`` to ``D``, since we can "cast" its type as :g:`forall (x₁:T₁)..(xₖ:Tₖ)(y:C' x₁..xₖ),D v₁..vₘ`. The identity coercions have a special status: to coerce an object :g:`t:C' t₁..tₖ` of ``C'`` towards ``C``, we do not have to insert explicitly ``Id_C'_C`` since :g:`Id_C'_C t₁..tₖ t` is convertible with ``t``. However we "rewrite" the type of ``t`` to become an object of ``C``; in this case, it becomes :g:`C uₙ'..uₖ'` where each ``uᵢ'`` is the result of the substitution in ``uᵢ`` of the variables ``xⱼ`` by ``tⱼ``. Inheritance Graph ------------------ Coercions form an inheritance graph with classes as nodes. We call *coercion path* an ordered list of coercions between two nodes of the graph. A class ``C`` is said to be a subclass of ``D`` if there is a coercion path in the graph from ``C`` to ``D``; we also say that ``C`` inherits from ``D``. Our mechanism supports multiple inheritance since a class may inherit from several classes, contrary to simple inheritance where a class inherits from at most one class. However there must be at most one path between two classes. If this is not the case, only the *oldest* one is valid and the others are ignored. So the order of declaration of coercions is important. We extend notations for coercions to coercion paths. For instance :g:`[f₁;..;fₖ] : C >-> D` is the coercion path composed by the coercions ``f₁..fₖ``. The application of a coercion path to a term consists of the successive application of its coercions. Declaring Coercions ------------------------- .. cmd:: Coercion @reference : @class >-> @class Coercion @ident {? @univ_decl } @def_body :name: Coercion; _ The first form declares the construction denoted by :token:`reference` as a coercion between the two given classes. The second form defines :token:`ident` just like :cmd:`Definition` :n:`@ident {? @univ_decl } @def_body` and then declares :token:`ident` as a coercion between it source and its target. Both forms support the :attr:`local` attribute, which makes the coercion local to the current section. .. exn:: @qualid not declared. :undocumented: .. exn:: @qualid is already a coercion. :undocumented: .. exn:: Funclass cannot be a source class. :undocumented: .. exn:: @qualid is not a function. :undocumented: .. exn:: Cannot find the source class of @qualid. :undocumented: .. exn:: Cannot recognize @class as a source class of @qualid. :undocumented: .. warn:: @qualid does not respect the uniform inheritance condition. :undocumented: .. exn:: Found target class ... instead of ... :undocumented: .. warn:: New coercion path ... is ambiguous with existing ... When the coercion :token:`qualid` is added to the inheritance graph, new coercion paths which have the same classes as existing ones are ignored. The :cmd:`Coercion` command tries to check the convertibility of new ones and existing ones. The paths for which this check fails are displayed by a warning in the form :g:`[f₁;..;fₙ] : C >-> D`. The convertibility checking procedure for coercion paths is complete for paths consisting of coercions satisfying the uniform inheritance condition, but some coercion paths could be reported as ambiguous even if they are convertible with existing ones when they have coercions that don't satisfy the uniform inheritance condition. .. warn:: ... is not definitionally an identity function. If a coercion path has the same source and target class, that is said to be circular. When a new circular coercion path is not convertible with the identity function, it will be reported as ambiguous. Some objects can be declared as coercions when they are defined. This applies to :ref:`assumptions` and constructors of :ref:`inductive types and record fields`. Use :n:`:>` instead of :n:`:` before the :n:`@type` of the assumption to do so. See :n:`@of_type`. .. cmd:: Identity Coercion @ident : @class >-> @class If ``C`` is the source `class` and ``D`` the destination, we check that ``C`` is a :term:`constant` with a :term:`body` of the form :g:`fun (x₁:T₁)..(xₙ:Tₙ) => D t₁..tₘ` where `m` is the number of parameters of ``D``. Then we define an identity function with type :g:`forall (x₁:T₁)..(xₙ:Tₙ)(y:C x₁..xₙ),D t₁..tₘ`, and we declare it as an identity coercion between ``C`` and ``D``. This command supports the :attr:`local` attribute, which makes the coercion local to the current section. .. exn:: @class must be a transparent constant. :undocumented: .. cmd:: SubClass @ident_decl @def_body If :n:`@type` is a class :n:`@ident'` applied to some arguments then :n:`@ident` is defined and an identity coercion of name :n:`Id_@ident_@ident'` is declared. Otherwise said, this is an abbreviation for :n:`Definition @ident := @type.` :n:`Identity Coercion Id_@ident_@ident' : @ident >-> @ident'`. This command supports the :attr:`local` attribute, which makes the coercion local to the current section. Displaying Available Coercions ------------------------------- .. cmd:: Print Classes Print the list of declared classes in the current context. .. cmd:: Print Coercions Print the list of declared coercions in the current context. .. cmd:: Print Graph Print the list of valid coercion paths in the current context. .. cmd:: Print Coercion Paths @class @class Print the list of valid coercion paths between the two given classes. Activating the Printing of Coercions ------------------------------------- .. flag:: Printing Coercions When on, this :term:`flag` forces all the coercions to be printed. By default, coercions are not printed. .. table:: Printing Coercion @qualid This :term:`table` specifies a set of qualids for which coercions are always displayed. Use the :cmd:`Add` and :cmd:`Remove` commands to update the set of qualids. .. _coercions-classes-as-records: Classes as Records ------------------ .. index:: :> (coercion) *Structures with Inheritance* may be defined using the :cmd:`Record` command. Use `>` before the record name to declare the constructor name as a coercion from the class of the last field type to the record name (this may fail if the uniform inheritance condition is not satisfied). See :token:`record_definition`. Use `:>` in the field type to declare the field as a coercion from the record name to the class of the field type. See :token:`of_type`. Coercions and Sections ---------------------- The inheritance mechanism is compatible with the section mechanism. The global classes and coercions defined inside a section are redefined after its closing, using their new value and new type. The classes and coercions which are local to the section are simply forgotten. Coercions with a local source class or a local target class, and coercions which do not verify the uniform inheritance condition any longer are also forgotten. Coercions and Modules --------------------- The coercions present in a module are activated only when the module is explicitly imported. Examples -------- There are three situations: Coercion at function application ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :g:`f a` is ill-typed where :g:`f:forall x:A,B` and :g:`a:A'`. If there is a coercion path between ``A'`` and ``A``, then :g:`f a` is transformed into :g:`f a'` where ``a'`` is the result of the application of this coercion path to ``a``. We first give an example of coercion between atomic inductive types .. coqtop:: all Definition bool_in_nat (b:bool) := if b then 0 else 1. Coercion bool_in_nat : bool >-> nat. Check (0 = true). Set Printing Coercions. Check (0 = true). Unset Printing Coercions. .. warning:: Note that ``Check (true = O)`` would fail. This is "normal" behavior of coercions. To validate ``true=O``, the coercion is searched from ``nat`` to ``bool``. There is none. We give an example of coercion between classes with parameters. .. coqtop:: all Parameters (C : nat -> Set) (D : nat -> bool -> Set) (E : bool -> Set). Parameter f : forall n:nat, C n -> D (S n) true. Coercion f : C >-> D. Parameter g : forall (n:nat) (b:bool), D n b -> E b. Coercion g : D >-> E. Parameter c : C 0. Parameter T : E true -> nat. Check (T c). Set Printing Coercions. Check (T c). Unset Printing Coercions. We give now an example using identity coercions. .. coqtop:: all Definition D' (b:bool) := D 1 b. Identity Coercion IdD'D : D' >-> D. Print IdD'D. Parameter d' : D' true. Check (T d'). Set Printing Coercions. Check (T d'). Unset Printing Coercions. In the case of functional arguments, we use the monotonic rule of sub-typing. To coerce :g:`t : forall x : A, B` towards :g:`forall x : A', B'`, we have to coerce ``A'`` towards ``A`` and ``B`` towards ``B'``. An example is given below: .. coqtop:: all Parameters (A B : Set) (h : A -> B). Coercion h : A >-> B. Parameter U : (A -> E true) -> nat. Parameter t : B -> C 0. Check (U t). Set Printing Coercions. Check (U t). Unset Printing Coercions. Remark the changes in the result following the modification of the previous example. .. coqtop:: all Parameter U' : (C 0 -> B) -> nat. Parameter t' : E true -> A. Check (U' t'). Set Printing Coercions. Check (U' t'). Unset Printing Coercions. Coercion to a type ~~~~~~~~~~~~~~~~~~ An assumption ``x:A`` when ``A`` is not a type, is ill-typed. It is replaced by ``x:A'`` where ``A'`` is the result of the application to ``A`` of the coercion path between the class of ``A`` and ``Sortclass`` if it exists. This case occurs in the abstraction :g:`fun x:A => t`, universal quantification :g:`forall x:A,B`, global variables and parameters of (co)inductive definitions and functions. In :g:`forall x:A,B`, such a coercion path may also be applied to ``B`` if necessary. .. coqtop:: all Parameter Graph : Type. Parameter Node : Graph -> Type. Coercion Node : Graph >-> Sortclass. Parameter G : Graph. Parameter Arrows : G -> G -> Type. Check Arrows. Parameter fg : G -> G. Check fg. Set Printing Coercions. Check fg. Unset Printing Coercions. Coercion to a function ~~~~~~~~~~~~~~~~~~~~~~ ``f a`` is ill-typed because ``f:A`` is not a function. The term ``f`` is replaced by the term obtained by applying to ``f`` the coercion path between ``A`` and ``Funclass`` if it exists. .. coqtop:: all Parameter bij : Set -> Set -> Set. Parameter ap : forall A B:Set, bij A B -> A -> B. Coercion ap : bij >-> Funclass. Parameter b : bij nat nat. Check (b 0). Set Printing Coercions. Check (b 0). Unset Printing Coercions. Let us see the resulting graph after all these examples. .. coqtop:: all Print Graph. coq-8.15.0/doc/sphinx/addendum/micromega.rst000066400000000000000000000337501417001151100207220ustar00rootroot00000000000000.. _micromega: Micromega: solvers for arithmetic goals over ordered rings ================================================================== :Authors: Frédéric Besson and Evgeny Makarov Short description of the tactics -------------------------------- The Psatz module (``Require Import Psatz.``) gives access to several tactics for solving arithmetic goals over :math:`\mathbb{Q}`, :math:`\mathbb{R}`, and :math:`\mathbb{Z}` but also :g:`nat` and :g:`N`. It also possible to get the tactics for integers by a ``Require Import Lia``, rationals ``Require Import Lqa`` and reals ``Require Import Lra``. + :tacn:`lia` is a decision procedure for linear integer arithmetic; + :tacn:`nia` is an incomplete proof procedure for integer non-linear arithmetic; + :tacn:`lra` is a decision procedure for linear (real or rational) arithmetic; + :tacn:`nra` is an incomplete proof procedure for non-linear (real or rational) arithmetic; + :tacn:`psatz` ``D n`` where ``D`` is :math:`\mathbb{Z}` or :math:`\mathbb{Q}` or :math:`\mathbb{R}`, and ``n`` is an optional integer limiting the proof search depth, is an incomplete proof procedure for non-linear arithmetic. It is based on John Harrison’s HOL Light driver to the external prover `csdp` [#csdp]_. Note that the `csdp` driver generates a *proof cache* which makes it possible to rerun scripts even without `csdp`. .. flag:: Simplex .. deprecated:: 8.14 This :term:`flag` (set by default) instructs the decision procedures to use the Simplex method for solving linear goals instead of the deprecated Fourier elimination. .. opt:: Dump Arith This :term:`option` (unset by default) may be set to a file path where debug info will be written. .. cmd:: Show Lia Profile This command prints some statistics about the amount of pivoting operations needed by :tacn:`lia` and may be useful to detect inefficiencies (only meaningful if flag :flag:`Simplex` is set). .. flag:: Lia Cache This :term:`flag` (set by default) instructs :tacn:`lia` to cache its results in the file `.lia.cache` .. flag:: Nia Cache This :term:`flag` (set by default) instructs :tacn:`nia` to cache its results in the file `.nia.cache` .. flag:: Nra Cache This :term:`flag` (set by default) instructs :tacn:`nra` to cache its results in the file `.nra.cache` The tactics solve propositional formulas parameterized by atomic arithmetic expressions interpreted over a domain :math:`D \in \{\mathbb{Z},\mathbb{Q},\mathbb{R}\}`. The syntax for formulas over :math:`\mathbb{Z}` is: .. note the following is not an insertprodn .. prodn:: F ::= {| @A | P | True | False | @F /\\ @F | @F \\/ @F | @F <-> @F | @F -> @F | ~ @F | @F = @F } A ::= {| @p = @p | @p > @p | @p < @p | @p >= @p | @p <= @p } p ::= {| c | x | −@p | @p − @p | @p + @p | @p * @p | @p ^ n } where - :token:`F` is interpreted over either `Prop` or `bool` - :n:`P` is an arbitrary proposition - :n:`c` is a numeric constant of :math:`D` - :n:`x` :math:`\in D` is a numeric variable - :n:`−`, :n:`+` and :n:`*` are respectively subtraction, addition and product - :n:`p ^ n` is exponentiation by a constant :math:`n` When :math:`F` is interpreted over `bool`, the boolean operators are `&&`, `||`, `Bool.eqb`, `Bool.implb`, `Bool.negb` and the comparisons in :math:`A` are also interpreted over the booleans (e.g., for :math:`\mathbb{Z}`, we have `Z.eqb`, `Z.gtb`, `Z.ltb`, `Z.geb`, `Z.leb`). For :math:`\mathbb{Q}`, use the equality of rationals ``==`` rather than Leibniz equality ``=``. For :math:`\mathbb{Z}` (resp. :math:`\mathbb{Q}`), :math:`c` ranges over integer constants (resp. rational constants). For :math:`\mathbb{R}`, the tactic recognizes as real constants the following expressions: :: c ::= R0 | R1 | Rmul(c,c) | Rplus(c,c) | Rminus(c,c) | IZR z | IQR q | Rdiv(c,c) | Rinv c where :math:`z` is a constant in :math:`\mathbb{Z}` and :math:`q` is a constant in :math:`\mathbb{Q}`. This includes integer constants written using the decimal notation, *i.e.*, ``c%R``. *Positivstellensatz* refutations -------------------------------- The name `psatz` is an abbreviation for *positivstellensatz* – literally "positivity theorem" – which generalizes Hilbert’s *nullstellensatz*. It relies on the notion of Cone. Given a (finite) set of polynomials :math:`S`, :math:`\mathit{Cone}(S)` is inductively defined as the smallest set of polynomials closed under the following rules: :math:`\begin{array}{l} \dfrac{p \in S}{p \in \mathit{Cone}(S)} \quad \dfrac{}{p^2 \in \mathit{Cone}(S)} \quad \dfrac{p_1 \in \mathit{Cone}(S) \quad p_2 \in \mathit{Cone}(S) \quad \Join \in \{+,*\}} {p_1 \Join p_2 \in \mathit{Cone}(S)}\\ \end{array}` The following theorem provides a proof principle for checking that a set of polynomial inequalities does not have solutions [#fnpsatz]_. .. _psatz_thm: **Theorem (Psatz)**. Let :math:`S` be a set of polynomials. If :math:`-1` belongs to :math:`\mathit{Cone}(S)`, then the conjunction :math:`\bigwedge_{p \in S} p\ge 0` is unsatisfiable. A proof based on this theorem is called a *positivstellensatz* refutation. The tactics work as follows. Formulas are normalized into conjunctive normal form :math:`\bigwedge_i C_i` where :math:`C_i` has the general form :math:`(\bigwedge_{j\in S_i} p_j \Join 0) \to \mathit{False}` and :math:`\Join \in \{>,\ge,=\}` for :math:`D\in \{\mathbb{Q},\mathbb{R}\}` and :math:`\Join \in \{\ge, =\}` for :math:`\mathbb{Z}`. For each conjunct :math:`C_i`, the tactic calls an oracle which searches for :math:`-1` within the cone. Upon success, the oracle returns a *cone expression* that is normalized by the :tacn:`ring` tactic (see :ref:`theringandfieldtacticfamilies`) and checked to be :math:`-1`. `lra`: a decision procedure for linear real and rational arithmetic ------------------------------------------------------------------- .. tacn:: lra This tactic is searching for *linear* refutations. As a result, this tactic explores a subset of the *Cone* defined as :math:`\mathit{LinCone}(S) =\left\{ \left. \sum_{p \in S} \alpha_p \times p~\right|~\alpha_p \mbox{ are positive constants} \right\}` The deductive power of :tacn:`lra` overlaps with the one of :tacn:`field` tactic *e.g.*, :math:`x = 10 * x / 10` is solved by :tacn:`lra`. `lia`: a tactic for linear integer arithmetic --------------------------------------------- .. tacn:: lia This tactic solves linear goals over :g:`Z` by searching for *linear* refutations and cutting planes. :tacn:`lia` provides support for :g:`Z`, :g:`nat`, :g:`positive` and :g:`N` by pre-processing via the :tacn:`zify` tactic. High level view of `lia` ~~~~~~~~~~~~~~~~~~~~~~~~ Over :math:`\mathbb{R}`, *positivstellensatz* refutations are a complete proof principle [#mayfail]_. However, this is not the case over :math:`\mathbb{Z}`. Actually, *positivstellensatz* refutations are not even sufficient to decide linear *integer* arithmetic. The canonical example is :math:`2 * x = 1 \to \mathtt{False}` which is a theorem of :math:`\mathbb{Z}` but not a theorem of :math:`{\mathbb{R}}`. To remedy this weakness, the :tacn:`lia` tactic is using recursively a combination of: + linear *positivstellensatz* refutations; + cutting plane proofs; + case split. Cutting plane proofs ~~~~~~~~~~~~~~~~~~~~~~ are a way to take into account the discreteness of :math:`\mathbb{Z}` by rounding up (rational) constants up-to the closest integer. .. _ceil_thm: .. thm:: Bound on the ceiling function Let :math:`p` be an integer and :math:`c` a rational constant. Then :math:`p \ge c \rightarrow p \ge \lceil{c}\rceil`. For instance, from :math:`2 x = 1` we can deduce + :math:`x \ge 1/2` whose cut plane is :math:`x \ge \lceil{1/2}\rceil = 1`; + :math:`x \le 1/2` whose cut plane is :math:`x \le \lfloor{1/2}\rfloor = 0`. By combining these two facts (in normal form) :math:`x − 1 \ge 0` and :math:`-x \ge 0`, we conclude by exhibiting a *positivstellensatz* refutation: :math:`−1 \equiv x−1 + −x \in \mathit{Cone}({x−1,x})`. Cutting plane proofs and linear *positivstellensatz* refutations are a complete proof principle for integer linear arithmetic. Case split ~~~~~~~~~~~ enumerates over the possible values of an expression. .. _casesplit_thm: **Theorem**. Let :math:`p` be an integer and :math:`c_1` and :math:`c_2` integer constants. Then: :math:`c_1 \le p \le c_2 \Rightarrow \bigvee_{x \in [c_1,c_2]} p = x` Our current oracle tries to find an expression :math:`e` with a small range :math:`[c_1,c_2]`. We generate :math:`c_2 − c_1` subgoals which contexts are enriched with an equation :math:`e = i` for :math:`i \in [c_1,c_2]` and recursively search for a proof. `nra`: a proof procedure for non-linear arithmetic -------------------------------------------------- .. tacn:: nra This tactic is an *experimental* proof procedure for non-linear arithmetic. The tactic performs a limited amount of non-linear reasoning before running the linear prover of :tacn:`lra`. This pre-processing does the following: + If the context contains an arithmetic expression of the form :math:`e[x^2]` where :math:`x` is a monomial, the context is enriched with :math:`x^2 \ge 0`; + For all pairs of hypotheses :math:`e_1 \ge 0`, :math:`e_2 \ge 0`, the context is enriched with :math:`e_1 \times e_2 \ge 0`. After this pre-processing, the linear prover of :tacn:`lra` searches for a proof by abstracting monomials by variables. `nia`: a proof procedure for non-linear integer arithmetic ---------------------------------------------------------- .. tacn:: nia This tactic is a proof procedure for non-linear integer arithmetic. It performs a pre-processing similar to :tacn:`nra`. The obtained goal is solved using the linear integer prover :tacn:`lia`. `psatz`: a proof procedure for non-linear arithmetic ---------------------------------------------------- .. tacn:: psatz @one_term {? @nat_or_var } This tactic explores the *Cone* by increasing degrees – hence the depth parameter :token:`nat_or_var`. In theory, such a proof search is complete – if the goal is provable the search eventually stops. Unfortunately, the external oracle is using numeric (approximate) optimization techniques that might miss a refutation. To illustrate the working of the tactic, consider we wish to prove the following Coq goal: .. needs csdp .. coqdoc:: Require Import ZArith Psatz. Open Scope Z_scope. Goal forall x, -x^2 >= 0 -> x - 1 >= 0 -> False. intro x. psatz Z 2. As shown, such a goal is solved by ``intro x. psatz Z 2.``. The oracle returns the cone expression :math:`2 \times (x-1) + (\mathbf{x-1}) \times (\mathbf{x−1}) + -x^2` (polynomial hypotheses are printed in bold). By construction, this expression belongs to :math:`\mathit{Cone}({−x^2,x -1})`. Moreover, by running :tacn:`ring` we obtain :math:`-1`. By Theorem :ref:`Psatz `, the goal is valid. `zify`: pre-processing of arithmetic goals ------------------------------------------ .. tacn:: zify This tactic is internally called by :tacn:`lia` to support additional types, e.g., :g:`nat`, :g:`positive` and :g:`N`. Additional support is provided by the following modules: + For boolean operators (e.g., :g:`Nat.leb`), require the module :g:`ZifyBool`. + For comparison operators (e.g., :g:`Z.compare`), require the module :g:`ZifyComparison`. + For native unsigned 63 bit integers, require the module :g:`ZifyUint63`. + For native signed 63 bit integers, require the module :g:`ZifySint63`. + For operators :g:`Nat.div`, :g:`Nat.mod`, and :g:`Nat.pow`, require the module :g:`ZifyNat`. + For operators :g:`N.div`, :g:`N.mod`, and :g:`N.pow`, require the module :g:`ZifyN`. :tacn:`zify` can also be extended by rebinding the tactics `Zify.zify_pre_hook` and `Zify.zify_post_hook` that are respectively run in the first and the last steps of :tacn:`zify`. + To support :g:`Z.div` and :g:`Z.modulo`: ``Ltac Zify.zify_post_hook ::= Z.div_mod_to_equations``. + To support :g:`Z.quot` and :g:`Z.rem`: ``Ltac Zify.zify_post_hook ::= Z.quot_rem_to_equations``. + To support :g:`Z.div`, :g:`Z.modulo`, :g:`Z.quot` and :g:`Z.rem`: either ``Ltac Zify.zify_post_hook ::= Z.to_euclidean_division_equations`` or ``Ltac Zify.zify_convert_to_euclidean_division_equations_flag ::= constr:(true)``. The :tacn:`zify` tactic can be extended with new types and operators by declaring and registering new typeclass instances using the following commands. The typeclass declarations can be found in the module ``ZifyClasses`` and the default instances can be found in the module ``ZifyInst``. .. cmd:: Add Zify @add_zify @qualid .. insertprodn add_zify add_zify .. prodn:: add_zify ::= {| InjTyp | BinOp | UnOp | CstOp | BinRel | UnOpSpec | BinOpSpec } | {| PropOp | PropBinOp | PropUOp | Saturate } Registers an instance of the specified typeclass. The typeclass type (e.g. :g:`BinOp Z.mul` or :g:`BinRel (@eq Z)`) has the additional constraint that the non-implicit argument (here, :g:`Z.mul` or :g:`(@eq Z)`) is either a :n:`@reference` (here, :g:`Z.mul`) or the application of a :n:`@reference` (here, :g:`@eq`) to a sequence of :n:`@one_term`. .. cmd:: Show Zify @show_zify .. insertprodn show_zify show_zify .. prodn:: show_zify ::= {| InjTyp | BinOp | UnOp | CstOp | BinRel | UnOpSpec | BinOpSpec | Spec } Prints instances for the specified typeclass. For instance, :cmd:`Show Zify` ``InjTyp`` prints the list of types that supported by :tacn:`zify` i.e., :g:`Z`, :g:`nat`, :g:`positive` and :g:`N`. .. [#csdp] Sources and binaries can be found at https://projects.coin-or.org/Csdp .. [#fnpsatz] Variants deal with equalities and strict inequalities. .. [#mayfail] In practice, the oracle might fail to produce such a refutation. .. comment in original TeX: .. %% \paragraph{The {\tt sos} tactic} -- where {\tt sos} stands for \emph{sum of squares} -- tries to prove that a .. %% single polynomial $p$ is positive by expressing it as a sum of squares \emph{i.e.,} $\sum_{i\in S} p_i^2$. .. %% This amounts to searching for $p$ in the cone without generators \emph{i.e.}, $Cone(\{\})$. coq-8.15.0/doc/sphinx/addendum/miscellaneous-extensions.rst000066400000000000000000000034301417001151100240070ustar00rootroot00000000000000Program derivation ================== Coq comes with an extension called ``Derive``, which supports program derivation. Typically in the style of Bird and Meertens or derivations of program refinements. To use the Derive extension it must first be required with ``Require Coq.derive.Derive``. When the extension is loaded, it provides the following command: .. cmd:: Derive @ident__1 SuchThat @one_term As @ident__2 :n:`@ident__1` can appear in :n:`@one_term`. This command opens a new proof presenting the user with a goal for :n:`@one_term` in which the name :n:`@ident__1` is bound to an existential variable :g:`?x` (formally, there are other goals standing for the existential variables but they are shelved, as described in :tacn:`shelve`). When the proof ends two :term:`constants ` are defined: + The first one is named :n:`@ident__1` and is defined as the proof of the shelved goal (which is also the value of :g:`?x`). It is always transparent. + The second one is named :n:`@ident__2`. It has type :n:`@type`, and its :term:`body` is the proof of the initially visible goal. It is opaque if the proof ends with :cmd:`Qed`, and transparent if the proof ends with :cmd:`Defined`. .. example:: .. coqtop:: all Require Coq.derive.Derive. Require Import Coq.Numbers.Natural.Peano.NPeano. Section P. Variables (n m k:nat). Derive p SuchThat ((k*n)+(k*m) = p) As h. Proof. rewrite <- Nat.mul_add_distr_l. subst p. reflexivity. Qed. End P. Print p. Check h. Any property can be used as `term`, not only an equation. In particular, it could be an order relation specifying some form of program refinement or a non-executable property from which deriving a program is convenient. coq-8.15.0/doc/sphinx/addendum/nsatz.rst000066400000000000000000000104311417001151100201050ustar00rootroot00000000000000.. _nsatz_chapter: Nsatz: a solver for equalities in integral domains =========================================================== :Author: Loïc Pottier To use the tactics described in this section, load the ``Nsatz`` module with the command ``Require Import Nsatz``. Alternatively, if you prefer not to transitively depend on the files that declare the axioms used to define the real numbers, you can ``Require Import NsatzTactic`` instead; this will still allow :tacn:`nsatz` to solve goals defined about :math:`\mathbb{Z}`, :math:`\mathbb{Q}` and any user-registered rings. .. tacn:: nsatz {? with radicalmax := @one_term strategy := @one_term parameters := @one_term variables := @one_term } This tactic is for solving goals of the form :math:`\begin{array}{l} \forall X_1, \ldots, X_n \in A, \\ P_1(X_1, \ldots, X_n) = Q_1(X_1, \ldots, X_n), \ldots, P_s(X_1, \ldots, X_n) = Q_s(X_1, \ldots, X_n) \\ \vdash P(X_1, \ldots, X_n) = Q(X_1, \ldots, X_n) \\ \end{array}` where :math:`P, Q, P_1, Q_1, \ldots, P_s, Q_s` are polynomials and :math:`A` is an integral domain, i.e. a commutative ring with no zero divisors. For example, :math:`A` can be :math:`\mathbb{R}`, :math:`\mathbb{Z}`, or :math:`\mathbb{Q}`. Note that the equality :math:`=` used in these goals can be any setoid equality (see :ref:`tactics-enabled-on-user-provided-relations`) , not only Leibniz equality. It also proves formulas :math:`\begin{array}{l} \forall X_1, \ldots, X_n \in A, \\ P_1(X_1, \ldots, X_n) = Q_1(X_1, \ldots, X_n) \wedge \ldots \wedge P_s(X_1, \ldots, X_n) = Q_s(X_1, \ldots, X_n) \\ \rightarrow P(X_1, \ldots, X_n) = Q(X_1, \ldots, X_n) \\ \end{array}` doing automatic introductions. `radicalmax` bound when searching for r such that :math:`c (P−Q) r = \sum_{i=1..s} S_i (P i − Q i)`. This argument must be of type `N` (binary natural numbers). `strategy` gives the order on variables :math:`X_1,\ldots,X_n` and the strategy used in Buchberger algorithm (see :cite:`sugar` for details): * `strategy := 0%Z`: reverse lexicographic order and newest s-polynomial. * `strategy := 1%Z`: reverse lexicographic order and sugar strategy. * `strategy := 2%Z`: pure lexicographic order and newest s-polynomial. * `strategy := 3%Z`: pure lexicographic order and sugar strategy. `parameters` a list of parameters of type `R`, containing the variables :math:`X_{i_1},\ldots,X_{i_k}` among :math:`X_1,\ldots,X_n`. Computation will be performed with rational fractions in these parameters, i.e. polynomials have coefficients in :math:`R(X_{i_1},\ldots,X_{i_k})`. In this case, the coefficient :math:`c` can be a nonconstant polynomial in :math:`X_{i_1},\ldots,X_{i_k}`, and the tactic produces a goal which states that :math:`c` is not zero. `variables` a list of variables of type `R` in the decreasing order in which they will be used in the Buchberger algorithm. If the list is empty, then `lvar` is replaced by all the variables which are not in `parameters`. See the file `Nsatz.v `_ for examples, especially in geometry. More about `nsatz` --------------------- Hilbert’s Nullstellensatz theorem shows how to reduce proofs of equalities on polynomials on a commutative ring :math:`A` with no zero divisors to algebraic computations: it is easy to see that if a polynomial :math:`P` in :math:`A[X_1,\ldots,X_n]` verifies :math:`c P^r = \sum_{i=1}^{s} S_i P_i`, with :math:`c \in A`, :math:`c \not = 0`, :math:`r` a positive integer, and the :math:`S_i` s in :math:`A[X_1,\ldots,X_n ]`, then :math:`P` is zero whenever polynomials :math:`P_1,\ldots,P_s` are zero (the converse is also true when :math:`A` is an algebraically closed field: the method is complete). So, solving our initial problem reduces to finding :math:`S_1, \ldots, S_s`, :math:`c` and :math:`r` such that :math:`c (P-Q)^r = \sum_{i} S_i (P_i-Q_i)`, which will be proved by the tactic ring. This is achieved by the computation of a Gröbner basis of the ideal generated by :math:`P_1-Q_1,...,P_s-Q_s`, with an adapted version of the Buchberger algorithm. This computation is done after a step of *reification*, which is performed using :ref:`typeclasses`. coq-8.15.0/doc/sphinx/addendum/parallel-proof-processing.rst000066400000000000000000000254421417001151100240470ustar00rootroot00000000000000.. _asynchronousandparallelproofprocessing: Asynchronous and Parallel Proof Processing ========================================== :Author: Enrico Tassi This chapter explains how proofs can be asynchronously processed by Coq. This feature improves the reactivity of the system when used in interactive mode via CoqIDE. In addition, it allows Coq to take advantage of parallel hardware when used as a batch compiler by decoupling the checking of statements and definitions from the construction and checking of proofs objects. This feature is designed to help dealing with huge libraries of theorems characterized by long proofs. In the current state, it may not be beneficial on small sets of short files. This feature has some technical limitations that may make it unsuitable for some use cases. For example, in interactive mode, some errors coming from the kernel of Coq are signaled late. The type of errors belonging to this category are universe inconsistencies. At the time of writing, only opaque proofs (ending with ``Qed`` or ``Admitted``) can be processed asynchronously. Finally, asynchronous processing is disabled when running CoqIDE in Windows. The current implementation of the feature is not stable on Windows. It can be enabled, as described below at :ref:`interactive-mode`, though doing so is not recommended. Proof annotations ---------------------- To process a proof asynchronously Coq needs to know the precise statement of the theorem without looking at the proof. This requires some annotations if the theorem is proved inside a Section (see Section :ref:`section-mechanism`). When a section ends, Coq looks at the proof object to decide which section variables are actually used and hence have to be quantified in the statement of the theorem. To avoid making the construction of proofs mandatory when ending a section, one can start each proof with the ``Proof using`` command (Section :ref:`proof-editing-mode`) that declares which section variables the theorem uses. The presence of ``Proof`` using is needed to process proofs asynchronously in interactive mode. It is not strictly mandatory in batch mode if it is not the first time the file is compiled and if the file itself did not change. When the proof does not begin with Proof using, the system records in an auxiliary file, produced along with the ``.vo`` file, the list of section variables used. Automatic suggestion of proof annotations ````````````````````````````````````````` The :flag:`Suggest Proof Using` flag makes Coq suggest, when a ``Qed`` command is processed, a correct proof annotation. It is up to the user to modify the proof script accordingly. Proof blocks and error resilience -------------------------------------- Coq 8.6 introduced a mechanism for error resilience: in interactive mode Coq is able to completely check a document containing errors instead of bailing out at the first failure. Two kind of errors are supported: errors occurring in commands and errors occurring in proofs. To properly recover from a failing tactic, Coq needs to recognize the structure of the proof in order to confine the error to a sub proof. Proof block detection is performed by looking at the syntax of the proof script (i.e. also looking at indentation). Coq comes with four kind of proof blocks, and an ML API to add new ones. :curly: blocks are delimited by { and }, see Chapter :ref:`proofhandling` :par: blocks are atomic, i.e. just one tactic introduced by the `par:` goal selector :indent: blocks end with a tactic indented less than the previous one :bullet: blocks are delimited by two equal bullet signs at the same indentation level Caveats ```````` When a command fails the subsequent error messages may be bogus, i.e. caused by the first error. Error resilience for commands can be switched off by passing ``-async-proofs-command-error-resilience off`` to CoqIDE. An incorrect proof block detection can result into an incorrect error recovery and hence in bogus errors. Proof block detection cannot be precise for bullets or any other non-well parenthesized proof structure. Error resilience can be turned off or selectively activated for any set of block kind passing to CoqIDE one of the following options: - ``-async-proofs-tactic-error-resilience off`` - ``-async-proofs-tactic-error-resilience all`` - ``-async-proofs-tactic-error-resilience`` :n:`{*, blocktype}` Valid proof block types are: “curly”, “par”, “indent”, and “bullet”. .. _interactive-mode: Interactive mode --------------------- At the time of writing the only user interface supporting asynchronous proof processing is CoqIDE. When CoqIDE is started, two Coq processes are created. The master one follows the user, giving feedback as soon as possible by skipping proofs, which are delegated to the worker process. The worker process, whose state can be seen by clicking on the button in the lower right corner of the main CoqIDE window, asynchronously processes the proofs. If a proof contains an error, it is reported in red in the label of the very same button, that can also be used to see the list of errors and jump to the corresponding line. If a proof is processed asynchronously the corresponding Qed command is colored using a lighter color than usual. This signals that the proof has been delegated to a worker process (or will be processed lazily if the ``-async-proofs lazy`` option is used). Once finished, the worker process will provide the proof object, but this will not be automatically checked by the kernel of the main process. To force the kernel to check all the proof objects, one has to click the button with the gears (Fully check the document) on the top bar. Only then all the universe constraints are checked. Caveats ``````` The number of worker processes can be increased by passing CoqIDE the ``-async-proofs-j n`` flag. Note that the memory consumption increases too, since each worker requires the same amount of memory as the master process. Also note that increasing the number of workers may reduce the reactivity of the master process to user commands. To disable this feature, one can pass the ``-async-proofs off`` flag to CoqIDE. Conversely, on Windows, where the feature is disabled by default, pass the ``-async-proofs on`` flag to enable it. Proofs that are known to take little time to process are not delegated to a worker process. The threshold can be configured with ``-async-proofs-delegation-threshold``. Default is 0.03 seconds. Batch mode --------------- .. warning:: The ``-vio`` flag is subsumed, for most practical usage, by the the more recent ``-vos`` flag. See :ref:`compiled-interfaces`. .. warning:: When working with ``.vio`` files, do not use the ``-vos`` option at the same time, otherwise stale files might get loaded when executing a ``Require``. Indeed, the loading of a nonempty ``.vos`` file is assigned higher priority than the loading of a ``.vio`` file. When Coq is used as a batch compiler by running ``coqc``, it produces a ``.vo`` file for each ``.v`` file. A ``.vo`` file contains, among other things, theorem statements and proofs. Hence to produce a .vo Coq need to process all the proofs of the ``.v`` file. The asynchronous processing of proofs can decouple the generation of a compiled file (like the ``.vo`` one) that can be loaded by ``Require`` from the generation and checking of the proof objects. The ``-vio`` flag can be passed to ``coqc`` to produce, quickly, ``.vio`` files. Alternatively, when using a Makefile produced by ``coq_makefile``, the ``vio`` target can be used to compile all files using the ``-vio`` flag. A ``.vio`` file can be loaded using ``Require`` exactly as a ``.vo`` file but proofs will not be available (the Print command produces an error). Moreover, some universe constraints might be missing, so universes inconsistencies might go unnoticed. A ``.vio`` file does not contain proof objects, but proof tasks, i.e. what a worker process can transform into a proof object. Compiling a set of files with the ``-vio`` flag allows one to work, interactively, on any file without waiting for all the proofs to be checked. When working interactively, one can fully check all the ``.v`` files by running ``coqc`` as usual. Alternatively one can turn each ``.vio`` into the corresponding ``.vo``. All .vio files can be processed in parallel, hence this alternative might be faster. The command ``coqc -schedule-vio2vo 2 a b c`` can be used to obtain a good scheduling for two workers to produce ``a.vo``, ``b.vo``, and ``c.vo``. When using a Makefile produced by ``coq_makefile``, the ``vio2vo`` target can be used for that purpose. Variable ``J`` should be set to the number of workers, e.g. ``make vio2vo J=2``. The only caveat is that, while the .vo files obtained from ``.vio`` files are complete (they contain all proof terms and universe constraints), the satisfiability of all universe constraints has not been checked globally (they are checked to be consistent for every single proof). Constraints will be checked when these ``.vo`` files are (recursively) loaded with ``Require``. There is an extra, possibly even faster, alternative: just check the proof tasks stored in ``.vio`` files without producing the ``.vo`` files. This is possibly faster because all the proof tasks are independent, hence one can further partition the job to be done between workers. The ``coqc -schedule-vio-checking 6 a b c`` command can be used to obtain a good scheduling for 6 workers to check all the proof tasks of ``a.vio``, ``b.vio``, and ``c.vio``. Auxiliary files are used to predict how long a proof task will take, assuming it will take the same amount of time it took last time. When using a Makefile produced by coq_makefile, the ``checkproofs`` target can be used to check all ``.vio`` files. Variable ``J`` should be set to the number of workers, e.g. ``make checkproofs J=6``. As when converting ``.vio`` files to ``.vo`` files, universe constraints are not checked to be globally consistent. Hence this compilation mode is only useful for quick regression testing and on developments not making heavy use of the ``Type`` hierarchy. Limiting the number of parallel workers -------------------------------------------- Many Coq processes may run on the same computer, and each of them may start many additional worker processes. The ``coqworkmgr`` utility lets one limit the number of workers, globally. The utility accepts the ``-j`` argument to specify the maximum number of workers (defaults to 2). ``coqworkmgr`` automatically starts in the background and prints an environment variable assignment like ``COQWORKMGR_SOCK=localhost:45634``. The user must set this variable in all the shells from which Coq processes will be started. If one uses just one terminal running the bash shell, then ``export ‘coqworkmgr -j 4‘`` will do the job. After that, all Coq processes, e.g. ``coqide`` and ``coqc``, will respect the limit, globally. coq-8.15.0/doc/sphinx/addendum/program.rst000066400000000000000000000306641417001151100204270ustar00rootroot00000000000000.. this should be just "_program", but refs to it don't work .. _programs: Program ======== :Author: Matthieu Sozeau We present here the |Program| tactic commands, used to build certified Coq programs, elaborating them from their algorithmic skeleton and a rich specification :cite:`sozeau06`. It can be thought of as a dual of :ref:`Extraction `. The goal of |Program| is to program as in a regular functional programming language whilst using as rich a specification as desired and proving that the code meets the specification using the whole Coq proof apparatus. This is done using a technique originating from the “Predicate subtyping” mechanism of PVS :cite:`Rushby98`, which generates type checking conditions while typing a term constrained to a particular type. Here we insert existential variables in the term, which must be filled with proofs to get a complete Coq term. |Program| replaces the |Program| tactic by Catherine Parent :cite:`Parent95b` which had a similar goal but is no longer maintained. The languages available as input are currently restricted to Coq’s term language, but may be extended to OCaml, Haskell and others in the future. We use the same syntax as Coq and permit to use implicit arguments and the existing coercion mechanism. Input terms and types are typed in an extended system (Russell) and interpreted into Coq terms. The interpretation process may produce some proof obligations which need to be resolved to create the final term. .. _elaborating-programs: Elaborating programs -------------------- The main difference from Coq is that an object in a type :g:`T : Set` can be considered as an object of type :g:`{x : T | P}` for any well-formed :g:`P : Prop`. If we go from :g:`T` to the subset of :g:`T` verifying property :g:`P`, we must prove that the object under consideration verifies it. Russell will generate an obligation for every such coercion. In the other direction, Russell will automatically insert a projection. Another distinction is the treatment of pattern matching. Apart from the following differences, it is equivalent to the standard match operation (see :ref:`extendedpatternmatching`). + Generation of equalities. A match expression is always generalized by the corresponding equality. As an example, the expression: :: match x with | 0 => t | S n => u end. will be first rewritten to: :: (match x as y return (x = y -> _) with | 0 => fun H : x = 0 -> t | S n => fun H : x = S n -> u end) (eq_refl x). This permits to get the proper equalities in the context of proof obligations inside clauses, without which reasoning is very limited. + Generation of disequalities. If a pattern intersects with a previous one, a disequality is added in the context of the second branch. See for example the definition of div2 below, where the second branch is typed in a context where :g:`∀ p, _ <> S (S p)`. + Coercion. If the object being matched is coercible to an inductive type, the corresponding coercion will be automatically inserted. This also works with the previous mechanism. There are flags to control the generation of equalities and coercions. .. flag:: Program Cases This :term:`flag` controls the special treatment of pattern matching generating equalities and disequalities when using |Program| (it is on by default). All pattern-matches and let-patterns are handled using the standard algorithm of Coq (see :ref:`extendedpatternmatching`) when this flag is deactivated. .. flag:: Program Generalized Coercion This :term:`flag` controls the coercion of general inductive types when using |Program| (the flag is on by default). Coercion of subset types and pairs is still active in this case. .. flag:: Program Mode This :term:`flag` enables the program mode, in which 1) typechecking allows subset coercions and 2) the elaboration of pattern matching of :cmd:`Fixpoint` and :cmd:`Definition` acts as if the :attr:`program` attribute has been used, generating obligations if there are unresolved holes after typechecking. .. attr:: program{? = {| yes | no } } :name: program; Program This :term:`boolean attribute` allows using or disabling the Program mode on a specific definition. An alternative and commonly used syntax is to use the legacy ``Program`` prefix (cf. :n:`@legacy_attr`) as it is elsewhere in this chapter. .. _syntactic_control: Syntactic control over equalities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To give more control over the generation of equalities, the type checker will fall back directly to Coq’s usual typing of dependent pattern matching if a ``return`` or ``in`` clause is specified. Likewise, the if construct is not treated specially by |Program| so boolean tests in the code are not automatically reflected in the obligations. One can use the :g:`dec` combinator to get the correct hypotheses as in: .. coqtop:: in Require Import Program Arith. .. coqtop:: all Program Definition id (n : nat) : { x : nat | x = n } := if dec (leb n 0) then 0 else S (pred n). The :g:`let` tupling construct :g:`let (x1, ..., xn) := t in b` does not produce an equality, contrary to the let pattern construct :g:`let '(x1,..., xn) := t in b`. The next two commands are similar to their standard counterparts :cmd:`Definition` and :cmd:`Fixpoint` in that they define :term:`constants `. However, they may require the user to prove some goals to construct the final definitions. .. _program_definition: Program Definition ~~~~~~~~~~~~~~~~~~ A :cmd:`Definition` command with the :attr:`program` attribute types the value term in Russell and generates proof obligations. Once solved using the commands shown below, it binds the final Coq term to the name :n:`@ident` in the global environment. :n:`Program Definition @ident : @type := @term` Interprets the type :n:`@type`, potentially generating proof obligations to be resolved. Once done with them, we have a Coq type :n:`@type__0`. It then elaborates the preterm :n:`@term` into a Coq term :n:`@term__0`, checking that the type of :n:`@term__0` is coercible to :n:`@type__0`, and registers :n:`@ident` as being of type :n:`@type__0` once the set of obligations generated during the interpretation of :n:`@term__0` and the aforementioned coercion derivation are solved. .. seealso:: Sections :ref:`controlling-the-reduction-strategies`, :tacn:`unfold` .. _program_fixpoint: Program Fixpoint ~~~~~~~~~~~~~~~~ A :cmd:`Fixpoint` command with the :attr:`program` attribute may also generate obligations. It works with mutually recursive definitions too. For example: .. coqtop:: reset in Require Import Program Arith. .. coqtop:: all Program Fixpoint div2 (n : nat) : { x : nat | n = 2 * x \/ n = 2 * x + 1 } := match n with | S (S p) => S (div2 p) | _ => O end. The :cmd:`Fixpoint` command may include an optional :n:`@fixannot` annotation, which can be: + :g:`measure f R` where :g:`f` is a value of type :g:`X` computed on any subset of the arguments and the optional term :g:`R` is a relation on :g:`X`. :g:`X` defaults to :g:`nat` and :g:`R` to :g:`lt`. + :g:`wf R x` which is equivalent to :g:`measure x R`. .. todo see https://github.com/coq/coq/pull/12936#discussion_r492747830 Here we have one obligation for each branch (branches for :g:`0` and ``(S 0)`` are automatically generated by the pattern matching compilation algorithm). .. coqtop:: all Obligation 1. .. coqtop:: reset none Require Import Program Arith. One can use a well-founded order or a measure as termination orders using the syntax: .. coqtop:: in Program Fixpoint div2 (n : nat) {measure n} : { x : nat | n = 2 * x \/ n = 2 * x + 1 } := match n with | S (S p) => S (div2 p) | _ => O end. .. caution:: When defining structurally recursive functions, the generated obligations should have the prototype of the currently defined functional in their context. In this case, the obligations should be transparent (e.g. defined using :g:`Defined`) so that the guardedness condition on recursive calls can be checked by the kernel’s type- checker. There is an optimization in the generation of obligations which gets rid of the hypothesis corresponding to the functional when it is not necessary, so that the obligation can be declared opaque (e.g. using :g:`Qed`). However, as soon as it appears in the context, the proof of the obligation is *required* to be declared transparent. No such problems arise when using measures or well-founded recursion. .. _program_lemma: Program Lemma ~~~~~~~~~~~~~ A :cmd:`Lemma` command with the :attr:`program` attribute uses the Russell language to type statements of logical properties. It generates obligations, tries to solve them automatically and fails if some unsolved obligations remain. In this case, one can first define the lemma’s statement using :cmd:`Definition` and use it as the goal afterwards. Otherwise the proof will be started with the elaborated version as a goal. The :attr:`program` attribute can similarly be used with :cmd:`Variable`, :cmd:`Hypothesis`, :cmd:`Axiom` etc. .. _solving_obligations: Solving obligations ------------------- The following commands are available to manipulate obligations. The optional identifier is used when multiple functions have unsolved obligations (e.g. when defining mutually recursive blocks). The optional tactic is replaced by the default one if not specified. .. cmd:: Obligation Tactic := @ltac_expr Sets the default obligation solving tactic applied to all obligations automatically, whether to solve them or when starting to prove one, e.g. using :cmd:`Next Obligation`. This command supports the :attr:`local` and :attr:`global` attributes. :attr:`local` makes the setting last only for the current module. :attr:`local` is the default inside sections while :attr:`global` otherwise. .. cmd:: Show Obligation Tactic Displays the current default tactic. .. cmd:: Obligations {? of @ident } Displays all remaining obligations. .. cmd:: Obligation @natural {? of @ident } {? : @type {? with @ltac_expr } } Start the proof of obligation :token:`natural`. .. cmd:: Next Obligation {? of @ident } {? with @ltac_expr } Start the proof of the next unsolved obligation. .. cmd:: Solve Obligations {? of @ident } {? with @ltac_expr } Tries to solve each obligation of :token:`ident` using the given :token:`ltac_expr` or the default one. .. cmd:: Solve All Obligations {? with @ltac_expr } Tries to solve each obligation of every program using the given tactic or the default one (useful for mutually recursive definitions). .. cmd:: Admit Obligations {? of @ident } Admits all obligations (of :token:`ident`). .. note:: Does not work with structurally recursive programs. .. cmd:: Preterm {? of @ident } Shows the term that will be fed to the kernel once the obligations are solved. Useful for debugging. .. flag:: Transparent Obligations This :term:`flag` controls whether all obligations should be declared as transparent (the default), or if the system should infer which obligations can be declared opaque. The module :g:`Coq.Program.Tactics` defines the default tactic for solving obligations called :g:`program_simpl`. Importing :g:`Coq.Program.Program` also adds some useful notations, as documented in the file itself. .. _program-faq: Frequently Asked Questions --------------------------- .. exn:: Ill-formed recursive definition. This error can happen when one tries to define a function by structural recursion on a subset object, which means the Coq function looks like: :: Program Fixpoint f (x : A | P) := match x with A b => f b end. Supposing ``b : A``, the argument at the recursive call to ``f`` is not a direct subterm of ``x`` as ``b`` is wrapped inside an ``exist`` constructor to build an object of type ``{x : A | P}``. Hence the definition is rejected by the guardedness condition checker. However one can use wellfounded recursion on subset objects like this: :: Program Fixpoint f (x : A | P) { measure (size x) } := match x with A b => f b end. One will then just have to prove that the measure decreases at each recursive call. There are three drawbacks though: #. A measure function has to be defined; #. The reduction is a little more involved, although it works well using lazy evaluation; #. Mutual recursion on the underlying inductive type isn’t possible anymore, but nested mutual recursion is always possible. coq-8.15.0/doc/sphinx/addendum/ring.rst000066400000000000000000000726231417001151100177200ustar00rootroot00000000000000.. |bdi| replace:: βδι .. |ra| replace:: :math:`\rightarrow_{\beta\delta\iota}` .. |la| replace:: :math:`\leftarrow_{\beta\delta\iota}` .. |eq| replace:: `=`:sub:`(by the main correctness theorem)` .. |re| replace:: ``(PEeval`` `v` `ap`\ ``)`` .. |le| replace:: ``(Pphi_dev`` `v` ``(norm`` `ap`\ ``))`` .. |N| replace:: ``N`` .. |nat| replace:: ``nat`` .. |Z| replace:: ``Z`` .. _theringandfieldtacticfamilies: ring and field: solvers for polynomial and rational equations ============================================================= :Author: Bruno Barras, Benjamin Grégoire, Assia Mahboubi, Laurent Théry [#f1]_ This chapter presents the tactics dedicated to dealing with ring and field equations. What does this tactic do? ------------------------------ ``ring`` does associative-commutative rewriting in ring and semiring structures. Assume you have two binary functions :math:`\oplus` and :math:`\otimes` that are associative and commutative, with :math:`\oplus` distributive on :math:`\otimes`, and two constants 0 and 1 that are unities for :math:`\oplus` and :math:`\otimes`. A polynomial is an expression built on variables :math:`V_0`, :math:`V_1`, :math:`\dots` and constants by application of :math:`\oplus` and :math:`\otimes`. Let an ordered product be a product of variables :math:`V_{i_1} \otimes \dots \otimes V_{i_n}` verifying :math:`i_1 ≤ i_2 ≤ \dots ≤ i_n` . Let a monomial be the product of a constant and an ordered product. We can order the monomials by the lexicographic order on products of variables. Let a canonical sum be an ordered sum of monomials that are all different, i.e. each monomial in the sum is strictly less than the following monomial according to the lexicographic order. It is an easy theorem to show that every polynomial is equivalent (modulo the ring properties) to exactly one canonical sum. This canonical sum is called the normal form of the polynomial. In fact, the actual representation shares monomials with same prefixes. So what does the ``ring`` tactic do? It normalizes polynomials over any ring or semiring structure. The basic use of ``ring`` is to simplify ring expressions, so that the user does not have to deal manually with the theorems of associativity and commutativity. .. example:: In the ring of integers, the normal form of :math:`x (3 + yx + 25(1 − z)) + zx` is :math:`28x + (−24)xz + xxy`. ``ring`` is also able to compute a normal form modulo monomial equalities. For example, under the hypothesis that :math:`2x^2 = yz+1`, the normal form of :math:`2(x + 1)x − x − zy` is :math:`x+1`. The variables map ---------------------- It is frequent to have an expression built with :math:`+` and :math:`\times`, but rarely on variables only. Let us associate a number to each subterm of a ring expression in the Gallina language. For example, consider this expression in the semiring ``nat``: :: (plus (mult (plus (f (5)) x) x) (mult (if b then (4) else (f (3))) (2))) As a ring expression, it has 3 subterms. Give each subterm a number in an arbitrary order: ===== =============== ========================= 0 :math:`\mapsto` if b then (4) else (f (3)) 1 :math:`\mapsto` (f (5)) 2 :math:`\mapsto` x ===== =============== ========================= Then normalize the “abstract” polynomial :math:`((V_1 \oplus V_2 ) \otimes V_2) \oplus (V_0 \otimes 2)` In our example the normal form is: :math:`(2 \otimes V_0 ) \oplus (V_1 \otimes V_2) \oplus (V_2 \otimes V_2 )`. Then substitute the variables by their values in the variables map to get the concrete normal polynomial: :: (plus (mult (2) (if b then (4) else (f (3)))) (plus (mult (f (5)) x) (mult x x))) Is it automatic? --------------------- Yes, building the variables map and doing the substitution after normalizing is automatically done by the tactic. So you can just forget this paragraph and use the tactic according to your intuition. Concrete usage in Coq -------------------------- .. tacn:: ring {? [ {+ @one_term } ] } Solves polynomical equations of a ring (or semiring) structure. It proceeds by normalizing both sides of the equation (w.r.t. associativity, commutativity and distributivity, constant propagation, rewriting of monomials) and syntactically comparing the results. :n:`[ {+ @one_term } ]` If specified, the tactic decides the equality of two terms modulo ring operations and the equalities defined by the :token:`one_term`\s. Each :token:`one_term` has to be a proof of some equality :g:`m = p`, where :g:`m` is a monomial (after “abstraction”), :g:`p` a polynomial and :g:`=` is the corresponding equality of the ring structure. .. tacn:: ring_simplify {? [ {+ @one_term } ] } {+ @one_term } {? in @ident } Applies the normalization procedure described above to the given :token:`one_term`\s. The tactic then replaces all occurrences of the :token:`one_term`\s given in the conclusion of the goal by their normal forms. If no :token:`one_term` is given, then the conclusion should be an equation and both sides are normalized. The tactic can also be applied in a hypothesis. :n:`in @ident` If specified, the tactic performs the simplification in the hypothesis named :token:`ident`. .. note:: :n:`ring_simplify @one_term__1; ring_simplify @one_term__2` is not equivalent to :n:`ring_simplify @one_term__1 @one_term__2`. In the latter case the variables map is shared between the two :token:`one_term`\s, and common subterm :g:`t` of :n:`@one_term__1` and :n:`@one_term__2` will have the same associated variable number. So the first alternative should be avoided for :token:`one_term`\s belonging to the same ring theory. The tactic must be loaded by ``Require Import Ring``. The ring structures must be declared with the ``Add Ring`` command (see below). The ring of booleans is predefined; if one wants to use the tactic on |nat| one must first require the module ``ArithRing`` exported by ``Arith``); for |Z|, do ``Require Import ZArithRing`` or simply ``Require Import ZArith``; for |N|, do ``Require Import NArithRing`` or ``Require Import NArith``. All declared field structures can be printed with the :cmd:`Print Rings` command. .. cmd:: Print Rings :undocumented: .. example:: .. coqtop:: all Require Import ZArith. Open Scope Z_scope. Goal forall a b c:Z, (a + b + c) ^ 2 = a * a + b ^ 2 + c * c + 2 * a * b + 2 * a * c + 2 * b * c. intros; ring. Abort. Goal forall a b:Z, 2 * a * b = 30 -> (a + b) ^ 2 = a ^ 2 + b ^ 2 + 30. intros a b H; ring [H]. Abort. Error messages: .. exn:: Not a valid ring equation. The conclusion of the goal is not provable in the corresponding ring theory. .. exn:: Arguments of ring_simplify do not have all the same type. :tacn:`ring_simplify` cannot simplify terms of several rings at the same time. Invoke the tactic once per ring structure. .. exn:: Cannot find a declared ring structure over @term. No ring has been declared for the type of the terms to be simplified. Use :cmd:`Add Ring` first. .. exn:: Cannot find a declared ring structure for equality @term. Same as above in the case of the :tacn:`ring` tactic. Adding a ring structure ---------------------------- Declaring a new ring consists in proving that a ring signature (a carrier set, an equality, and ring operations: ``Ring_theory.ring_theory`` and ``Ring_theory.semi_ring_theory``) satisfies the ring axioms. Semi- rings (rings without + inverse) are also supported. The equality can be either Leibniz equality, or any relation declared as a setoid (see :ref:`tactics-enabled-on-user-provided-relations`). The definitions of ring and semiring (see module ``Ring_theory``) are: .. coqdoc:: Record ring_theory : Prop := mk_rt { Radd_0_l : forall x, 0 + x == x; Radd_sym : forall x y, x + y == y + x; Radd_assoc : forall x y z, x + (y + z) == (x + y) + z; Rmul_1_l : forall x, 1 * x == x; Rmul_sym : forall x y, x * y == y * x; Rmul_assoc : forall x y z, x * (y * z) == (x * y) * z; Rdistr_l : forall x y z, (x + y) * z == (x * z) + (y * z); Rsub_def : forall x y, x - y == x + -y; Ropp_def : forall x, x + (- x) == 0 }. Record semi_ring_theory : Prop := mk_srt { SRadd_0_l : forall n, 0 + n == n; SRadd_sym : forall n m, n + m == m + n ; SRadd_assoc : forall n m p, n + (m + p) == (n + m) + p; SRmul_1_l : forall n, 1*n == n; SRmul_0_l : forall n, 0*n == 0; SRmul_sym : forall n m, n*m == m*n; SRmul_assoc : forall n m p, n*(m*p) == (n*m)*p; SRdistr_l : forall n m p, (n + m)*p == n*p + m*p }. This implementation of ``ring`` also features a notion of constant that can be parameterized. This can be used to improve the handling of closed expressions when operations are effective. It consists in introducing a type of *coefficients* and an implementation of the ring operations, and a morphism from the coefficient type to the ring carrier type. The morphism needs not be injective, nor surjective. As an example, one can consider the real numbers. The set of coefficients could be the rational numbers, upon which the ring operations can be implemented. The fact that there exists a morphism is defined by the following properties: .. coqdoc:: Record ring_morph : Prop := mkmorph { morph0 : [cO] == 0; morph1 : [cI] == 1; morph_add : forall x y, [x +! y] == [x]+[y]; morph_sub : forall x y, [x -! y] == [x]-[y]; morph_mul : forall x y, [x *! y] == [x]*[y]; morph_opp : forall x, [-!x] == -[x]; morph_eq : forall x y, x?=!y = true -> [x] == [y] }. Record semi_morph : Prop := mkRmorph { Smorph0 : [cO] == 0; Smorph1 : [cI] == 1; Smorph_add : forall x y, [x +! y] == [x]+[y]; Smorph_mul : forall x y, [x *! y] == [x]*[y]; Smorph_eq : forall x y, x?=!y = true -> [x] == [y] }. where ``c0`` and ``cI`` denote the 0 and 1 of the coefficient set, ``+!``, ``*!``, ``-!`` are the implementations of the ring operations, ``==`` is the equality of the coefficients, ``?+!`` is an implementation of this equality, and ``[x]`` is a notation for the image of ``x`` by the ring morphism. Since |Z| is an initial ring (and |N| is an initial semiring), it can always be considered as a set of coefficients. There are basically three kinds of (semi-)rings: abstract rings to be used when operations are not effective. The set of coefficients is |Z| (or |N| for semirings). computational rings to be used when operations are effective. The set of coefficients is the ring itself. The user only has to provide an implementation for the equality. customized ring for other cases. The user has to provide the coefficient set and the morphism. This implementation of ring can also recognize simple power expressions as ring expressions. A power function is specified by the following property: .. coqtop:: in Require Import Reals. Section POWER. Variable Cpow : Set. Variable Cp_phi : N -> Cpow. Variable rpow : R -> Cpow -> R. Record power_theory : Prop := mkpow_th { rpow_pow_N : forall r n, rpow r (Cp_phi n) = pow_N 1%R Rmult r n }. End POWER. The syntax for adding a new ring is .. cmd:: Add Ring @ident : @one_term {? ( {+, @ring_mod } ) } .. insertprodn ring_mod ring_mod .. prodn:: ring_mod ::= decidable @one_term | abstract | morphism @one_term | constants [ @ltac_expr ] | preprocess [ @ltac_expr ] | postprocess [ @ltac_expr ] | setoid @one_term @one_term | sign @one_term | power @one_term [ {+ @qualid } ] | power_tac @one_term [ @ltac_expr ] | div @one_term | closed [ {+ @qualid } ] The :n:`@ident` is used only for error messages. The :n:`@one_term` is a proof that the ring signature satisfies the (semi-)ring axioms. The optional list of modifiers is used to tailor the behavior of the tactic. Here are their effects: :n:`abstract` declares the ring as abstract. This is the default. :n:`decidable @one_term` declares the ring as computational. The expression :n:`@one_term` is the correctness proof of an equality test ``?=!`` (which should be evaluable). Its type should be of the form ``forall x y, x ?=! y = true → x == y``. :n:`morphism @one_term` declares the ring as a customized one. The expression :n:`@one_term` is a proof that there exists a morphism between a set of coefficient and the ring carrier (see ``Ring_theory.ring_morph`` and ``Ring_theory.semi_morph``). :n:`setoid @one_term @one_term` forces the use of given setoid. The first :n:`@one_term` is a proof that the equality is indeed a setoid (see ``Setoid.Setoid_Theory``), and the second a proof that the ring operations are morphisms (see ``Ring_theory.ring_eq_ext`` and ``Ring_theory.sring_eq_ext``). This modifier needs not be used if the setoid and morphisms have been declared. :n:`constants [ @ltac_expr ]` specifies a tactic expression :n:`@ltac_expr` that, given a term, returns either an object of the coefficient set that is mapped to the expression via the morphism, or returns ``InitialRing.NotConstant``. The default behavior is to map only 0 and 1 to their counterpart in the coefficient set. This is generally not desirable for nontrivial computational rings. :n:`preprocess [ @ltac_expr ]` specifies a tactic :n:`@ltac_expr` that is applied as a preliminary step for :tacn:`ring` and :tacn:`ring_simplify`. It can be used to transform a goal so that it is better recognized. For instance, ``S n`` can be changed to ``plus 1 n``. :n:`postprocess [ @ltac_expr ]` specifies a tactic :n:`@ltac_expr` that is applied as a final step for :tacn:`ring_simplify`. For instance, it can be used to undo modifications of the preprocessor. :n:`power @one_term [ {+ @qualid } ]` to be documented :n:`power_tac @one_term @ltac_expr ]` allows :tacn:`ring` and :tacn:`ring_simplify` to recognize power expressions with a constant positive integer exponent (example: :math:`x^2` ). The term :n:`@one_term` is a proof that a given power function satisfies the specification of a power function (term has to be a proof of ``Ring_theory.power_theory``) and :n:`@tactic` specifies a tactic expression that, given a term, “abstracts” it into an object of type |N| whose interpretation via ``Cp_phi`` (the evaluation function of power coefficient) is the original term, or returns ``InitialRing.NotConstant`` if not a constant coefficient (i.e. |Ltac| is the inverse function of ``Cp_phi``). See files ``plugins/ring/ZArithRing.v`` and ``plugins/ring/RealField.v`` for examples. By default the tactic does not recognize power expressions as ring expressions. :n:`sign @one_term` allows :tacn:`ring_simplify` to use a minus operation when outputting its normal form, i.e writing ``x − y`` instead of ``x + (− y)``. The term :token:`term` is a proof that a given sign function indicates expressions that are signed (:token:`term` has to be a proof of ``Ring_theory.get_sign``). See ``plugins/ring/InitialRing.v`` for examples of sign function. :n:`div @one_term` allows :tacn:`ring` and :tacn:`ring_simplify` to use monomials with coefficients other than 1 in the rewriting. The term :n:`@one_term` is a proof that a given division function satisfies the specification of an euclidean division function (:n:`@one_term` has to be a proof of ``Ring_theory.div_theory``). For example, this function is called when trying to rewrite :math:`7x` by :math:`2x = z` to tell that :math:`7 = 3 \times 2 + 1`. See ``plugins/ring/InitialRing.v`` for examples of div function. :n:`closed [ {+ @qualid } ]` to be documented Error messages: .. exn:: Bad ring structure. The proof of the ring structure provided is not of the expected type. .. exn:: Bad lemma for decidability of equality. The equality function provided in the case of a computational ring has not the expected type. .. exn:: Ring operation should be declared as a morphism. A setoid associated with the carrier of the ring structure has been found, but the ring operation should be declared as morphism. See :ref:`tactics-enabled-on-user-provided-relations`. How does it work? ---------------------- The code of ``ring`` is a good example of a tactic written using *reflection*. What is reflection? Basically, using it means that a part of a tactic is written in Gallina, Coq's language of terms, rather than |Ltac| or OCaml. From the philosophical point of view, reflection is using the ability of the Calculus of Constructions to speak and reason about itself. For the ``ring`` tactic we used Coq as a programming language and also as a proof environment to build a tactic and to prove its correctness. The interested reader is strongly advised to have a look at the file ``Ring_polynom.v``. Here a type for polynomials is defined: .. coqdoc:: Inductive PExpr : Type := | PEc : C -> PExpr | PEX : positive -> PExpr | PEadd : PExpr -> PExpr -> PExpr | PEsub : PExpr -> PExpr -> PExpr | PEmul : PExpr -> PExpr -> PExpr | PEopp : PExpr -> PExpr | PEpow : PExpr -> N -> PExpr. Polynomials in normal form are defined as: .. coqdoc:: Inductive Pol : Type := | Pc : C -> Pol | Pinj : positive -> Pol -> Pol | PX : Pol -> positive -> Pol -> Pol. where ``Pinj n P`` denotes ``P`` in which :math:`V_i` is replaced by :math:`V_{i+n}` , and ``PX P n Q`` denotes :math:`P \otimes V_1^n \oplus Q'`, `Q'` being `Q` where :math:`V_i` is replaced by :math:`V_{i+1}`. Variable maps are represented by lists of ring elements, and two interpretation functions, one that maps a variables map and a polynomial to an element of the concrete ring, and the second one that does the same for normal forms: .. coqdoc:: Definition PEeval : list R -> PExpr -> R := [...]. Definition Pphi_dev : list R -> Pol -> R := [...]. A function to normalize polynomials is defined, and the big theorem is its correctness w.r.t interpretation, that is: .. coqdoc:: Definition norm : PExpr -> Pol := [...]. Lemma Pphi_dev_ok : forall l pe npe, norm pe = npe -> PEeval l pe == Pphi_dev l npe. So now, what is the scheme for a normalization proof? Let p be the polynomial expression that the user wants to normalize. First a little piece of ML code guesses the type of `p`, the ring theory `T` to use, an abstract polynomial `ap` and a variables map `v` such that `p` is |bdi|- equivalent to `(PEeval v ap)`. Then we replace it by `(Pphi_dev v (norm ap))`, using the main correctness theorem and we reduce it to a concrete expression `p’`, which is the concrete normal form of `p`. This is summarized in this diagram: ========= ====== ==== `p` |ra| |re| \ |eq| \ `p’` |la| |le| ========= ====== ==== The user does not see the right part of the diagram. From outside, the tactic behaves like a |bdi| simplification extended with rewriting rules for associativity and commutativity. Basically, the proof is only the application of the main correctness theorem to well-chosen arguments. Dealing with fields ------------------------ .. tacn:: field {? [ {+ @one_term } ] } An extension of the :tacn:`ring` tactic that deals with rational expressions. Given a rational expression :math:`F = 0`. It first reduces the expression `F` to a common denominator :math:`N/D = 0` where `N` and `D` are two ring expressions. For example, if we take :math:`F = (1 − 1/x) x − x + 1`, this gives :math:`N = (x − 1) x − x^2 + x` and :math:`D = x`. It then calls ring to solve :math:`N = 0`. :n:`[ {+ @one_term } ]` If specified, the tactic decides the equality of two terms modulo field operations and the equalities defined by the :token:`one_term`\s. Each :token:`one_term` has to be a proof of some equality :g:`m = p`, where :g:`m` is a monomial (after “abstraction”), :g:`p` a polynomial and :g:`=` the corresponding equality of the field structure. .. note:: Rewriting works with the equality :g:`m = p` only if :g:`p` is a polynomial since rewriting is handled by the underlying ring tactic. Note that :n:`field` also generates nonzero conditions for all the denominators it encounters in the reduction. In our example, it generates the condition :math:`x \neq 0`. These conditions appear as one subgoal which is a conjunction if there are several denominators. Nonzero conditions are always polynomial expressions. For example when reducing the expression :math:`1/(1 + 1/x)`, two side conditions are generated: :math:`x \neq 0` and :math:`x + 1 \neq 0`. Factorized expressions are broken since a field is an integral domain, and when the equality test on coefficients is complete w.r.t. the equality of the target field, constants can be proven different from zero automatically. The tactic must be loaded by ``Require Import Field``. New field structures can be declared to the system with the ``Add Field`` command (see below). The field of real numbers is defined in module ``RealField`` (in ``plugins/ring``). It is exported by module ``Rbase``, so that requiring ``Rbase`` or ``Reals`` is enough to use the field tactics on real numbers. Rational numbers in canonical form are also declared as a field in the module ``Qcanon``. .. example:: .. coqtop:: all Require Import Reals. Open Scope R_scope. Goal forall x, x <> 0 -> (1 - 1 / x) * x - x + 1 = 0. intros; field; auto. Abort. Goal forall x y, y <> 0 -> y = x -> x / y = 1. intros x y H H1; field [H1]; auto. Abort. .. example:: :tacn:`field` that generates side goals .. coqtop:: reset all Require Import Reals. Goal forall x y:R, (x * y > 0)%R -> (x * (1 / x + x / (x + y)))%R = ((- 1 / y) * y * (- x * (x / (x + y)) - 1))%R. intros; field. .. tacn:: field_simplify {? [ {+ @one_term__eq } ] } {+ @one_term } {? in @ident } Performs the simplification in the conclusion of the goal, :math:`F_1 = F_2` becomes :math:`N_1 / D_1 = N_2 / D_2`. A normalization step (the same as the one for rings) is then applied to :math:`N_1`, :math:`D_1`, :math:`N_2` and :math:`D_2`. This way, polynomials remain in factorized form during fraction simplification. This yields smaller expressions when reducing to the same denominator since common factors can be canceled. :n:`[ {+ @one_term__eq } ]` Do simplification in the conclusion of the goal using the equalities defined by these :token:`one_term`\s. :n:`{+ @one_term }` Terms to simplify in the conclusion. :n:`in @ident` If specified, substitute in the hypothesis :n:`@ident` instead of the conclusion. .. tacn:: field_simplify_eq {? [ {+ @one_term } ] } {? in @ident } Performs the simplification in the conclusion of the goal, removing the denominator. :math:`F_1 = F_2` becomes :math:`N_1 D_2 = N_2 D_1`. :n:`[ {+ @one_term } ]` Do simplification in the conclusion of the goal using the equalities defined by these :token:`one_term`\s. :n:`in @ident` If specified, simplify in the hypothesis :n:`@ident` instead of the conclusion. Adding a new field structure --------------------------------- Declaring a new field consists in proving that a field signature (a carrier set, an equality, and field operations: ``Field_theory.field_theory`` and ``Field_theory.semi_field_theory``) satisfies the field axioms. Semi-fields (fields without + inverse) are also supported. The equality can be either Leibniz equality, or any relation declared as a setoid (see :ref:`tactics-enabled-on-user-provided-relations`). The definition of fields and semifields is: .. coqdoc:: Record field_theory : Prop := mk_field { F_R : ring_theory rO rI radd rmul rsub ropp req; F_1_neq_0 : ~ 1 == 0; Fdiv_def : forall p q, p / q == p * / q; Finv_l : forall p, ~ p == 0 -> / p * p == 1 }. Record semi_field_theory : Prop := mk_sfield { SF_SR : semi_ring_theory rO rI radd rmul req; SF_1_neq_0 : ~ 1 == 0; SFdiv_def : forall p q, p / q == p * / q; SFinv_l : forall p, ~ p == 0 -> / p * p == 1 }. The result of the normalization process is a fraction represented by the following type: .. coqdoc:: Record linear : Type := mk_linear { num : PExpr C; denum : PExpr C; condition : list (PExpr C) }. where ``num`` and ``denum`` are the numerator and denominator; ``condition`` is a list of expressions that have appeared as a denominator during the normalization process. These expressions must be proven different from zero for the correctness of the algorithm. The syntax for adding a new field is .. cmd:: Add Field @ident : @one_term {? ( {+, @field_mod } ) } .. insertprodn field_mod field_mod .. prodn:: field_mod ::= @ring_mod | completeness @one_term The :n:`@ident` is used only for error messages. :n:`@one_term` is a proof that the field signature satisfies the (semi-)field axioms. The optional list of modifiers is used to tailor the behavior of the tactic. Since field tactics are built upon ``ring`` tactics, all modifiers of :cmd:`Add Ring` apply. There is only one specific modifier: completeness :n:`@one_term` allows the field tactic to prove automatically that the image of nonzero coefficients are mapped to nonzero elements of the field. :n:`@one_term` is a proof of :g:`forall x y, [x] == [y] -> x ?=! y = true`, which is the completeness of equality on coefficients w.r.t. the field equality. History of ring -------------------- First Samuel Boutin designed the tactic ``ACDSimpl``. This tactic did lot of rewriting. But the proofs terms generated by rewriting were too big for Coq’s type checker. Let us see why: .. coqtop:: reset all Require Import ZArith. Open Scope Z_scope. Goal forall x y z : Z, x + 3 + y + y * z = x + 3 + y + z * y. intros; rewrite (Zmult_comm y z); reflexivity. Save foo. Print foo. At each step of rewriting, the whole context is duplicated in the proof term. Then, a tactic that does hundreds of rewriting generates huge proof terms. Since ``ACDSimpl`` was too slow, Samuel Boutin rewrote it using reflection (see :cite:`Bou97`). Later, it was rewritten by Patrick Loiseleur: the new tactic does not any more require ``ACDSimpl`` to compile and it makes use of |bdi|-reduction not only to replace the rewriting steps, but also to achieve the interleaving of computation and reasoning (see :ref:`discussion_reflection`). He also wrote some ML code for the ``Add Ring`` command that allows registering new rings dynamically. Proofs terms generated by ring are quite small, they are linear in the number of :math:`\oplus` and :math:`\otimes` operations in the normalized terms. Type checking those terms requires some time because it makes a large use of the conversion rule, but memory requirements are much smaller. .. _discussion_reflection: Discussion ---------------- Efficiency is not the only motivation to use reflection here. ``ring`` also deals with constants, it rewrites for example the expression ``34 + 2 * x − x + 12`` to the expected result ``x + 46``. For the tactic ``ACDSimpl``, the only constants were 0 and 1. So the expression ``34 + 2 * (x − 1) + 12`` is interpreted as :math:`V_0 \oplus V_1 \otimes (V_2 \ominus 1) \oplus V_3`\ , with the variables mapping :math:`\{V_0 \mapsto 34; V_1 \mapsto 2; V_2 \mapsto x; V_3 \mapsto 12\}`\ . Then it is rewritten to ``34 − x + 2 * x + 12``, very far from the expected result. Here rewriting is not sufficient: you have to do some kind of reduction (some kind of computation) to achieve the normalization. The tactic ``ring`` is not only faster than the old one: by using reflection, we get for free the integration of computation and reasoning that would be very difficult to implement without it. Is it the ultimate way to write tactics? The answer is: yes and no. The ``ring`` tactic intensively uses the conversion rules of the Calculus of Inductive Constructions, i.e. it replaces proofs by computations as much as possible. It can be useful in all situations where a classical tactic generates huge proof terms, like symbolic processing and tautologies. But there are also tactics like ``auto`` or ``linear`` that do many complex computations, using side-effects and backtracking, and generate a small proof term. Clearly, it would be significantly less efficient to replace them by tactics using reflection. Another idea suggested by Benjamin Werner: reflection could be used to couple an external tool (a rewriting program or a model checker) with Coq. We define (in Coq) a type of terms, a type of *traces*, and prove a correctness theorem that states that *replaying traces* is safe with respect to some interpretation. Then we let the external tool do every computation (using side-effects, backtracking, exception, or others features that are not available in pure lambda calculus) to produce the trace. Now we can check in Coq that the trace has the expected semantics by applying the correctness theorem. .. rubric:: Footnotes .. [#f1] based on previous work from Patrick Loiseleur and Samuel Boutin coq-8.15.0/doc/sphinx/addendum/sprop.rst000066400000000000000000000227641417001151100201250ustar00rootroot00000000000000.. _sprop: SProp (proof irrelevant propositions) ===================================== .. warning:: The status of strict propositions is experimental. In particular, conversion checking through bytecode or native code compilation currently does not understand proof irrelevance. This section describes the extension of Coq with definitionally proof irrelevant propositions (types in the sort :math:`\SProp`, also known as strict propositions) as described in :cite:`Gilbert:POPL2019`. Use of |SProp| may be disabled by passing ``-disallow-sprop`` to the Coq program or by turning the :flag:`Allow StrictProp` flag off. .. flag:: Allow StrictProp This :term:`flag` enables or disables the use of |SProp|. It is enabled by default. The command-line flag ``-disallow-sprop`` disables |SProp| at startup. .. exn:: SProp is disallowed because the "Allow StrictProp" flag is off. :undocumented: Some of the definitions described in this document are available through ``Coq.Logic.StrictProp``, which see. Basic constructs ---------------- The purpose of :math:`\SProp` is to provide types where all elements are convertible: .. coqtop:: all Theorem irrelevance (A : SProp) (P : A -> Prop) : forall x : A, P x -> forall y : A, P y. Proof. intros * Hx *. exact Hx. Qed. Since we have definitional :ref:`eta-expansion-sect` for functions, the property of being a type of definitionally irrelevant values is impredicative, and so is :math:`\SProp`: .. coqtop:: all Check fun (A:Type) (B:A -> SProp) => (forall x:A, B x) : SProp. In order to keep conversion tractable, cumulativity for :math:`\SProp` is forbidden, unless the :flag:`Cumulative StrictProp` flag is turned on: .. coqtop:: all Fail Check (fun (A:SProp) => A : Type). Set Cumulative StrictProp. Check (fun (A:SProp) => A : Type). .. coqtop:: none Unset Cumulative StrictProp. We can explicitly lift strict propositions into the relevant world by using a wrapping inductive type. The inductive stops definitional proof irrelevance from escaping. .. coqtop:: in Inductive Box (A:SProp) : Prop := box : A -> Box A. Arguments box {_} _. .. coqtop:: all Fail Check fun (A:SProp) (x y : Box A) => eq_refl : x = y. .. doesn't get merged with the above if coqdoc .. coqtop:: in Definition box_irrelevant (A:SProp) (x y : Box A) : x = y := match x, y with box x, box y => eq_refl end. In the other direction, we can use impredicativity to "squash" a relevant type, making an irrelevant approximation. .. coqdoc:: Definition iSquash (A:Type) : SProp := forall P : SProp, (A -> P) -> P. Definition isquash A : A -> iSquash A := fun a P f => f a. Definition iSquash_sind A (P : iSquash A -> SProp) (H : forall x : A, P (isquash A x)) : forall x : iSquash A, P x := fun x => x (P x) (H : A -> P x). Or more conveniently (but equivalently) .. coqdoc:: Inductive Squash (A:Type) : SProp := squash : A -> Squash A. Most inductives types defined in :math:`\SProp` are squashed types, i.e. they can only be eliminated to construct proofs of other strict propositions. Empty types are the only exception. .. coqtop:: in Inductive sEmpty : SProp := . .. coqtop:: all Check sEmpty_rect. .. note:: Eliminators to strict propositions are called ``foo_sind``, in the same way that eliminators to propositions are called ``foo_ind``. Primitive records in :math:`\SProp` are allowed when fields are strict propositions, for instance: .. coqtop:: in Set Primitive Projections. Record sProd (A B : SProp) : SProp := { sfst : A; ssnd : B }. On the other hand, to avoid having definitionally irrelevant types in non-:math:`\SProp` sorts (through record η-extensionality), primitive records in relevant sorts must have at least one relevant field. .. coqtop:: all Set Warnings "+non-primitive-record". Fail Record rBox (A:SProp) : Prop := rbox { runbox : A }. .. coqdoc:: Record ssig (A:Type) (P:A -> SProp) : Type := { spr1 : A; spr2 : P spr1 }. Note that ``rBox`` works as an emulated record, which is equivalent to the Box inductive. Encodings for strict propositions --------------------------------- The elimination for unit types can be encoded by a trivial function thanks to proof irrelevance: .. coqdoc:: Inductive sUnit : SProp := stt. Definition sUnit_rect (P:sUnit->Type) (v:P stt) (x:sUnit) : P x := v. By using empty and unit types as base values, we can encode other strict propositions. For instance: .. coqdoc:: Definition is_true (b:bool) : SProp := if b then sUnit else sEmpty. Definition is_true_eq_true b : is_true b -> true = b := match b with | true => fun _ => eq_refl | false => sEmpty_ind _ end. Definition eq_true_is_true b (H:true=b) : is_true b := match H in _ = x return is_true x with eq_refl => stt end. Definitional UIP ---------------- .. flag:: Definitional UIP This :term:`flag`, off by default, allows the declaration of non-squashed inductive types with 1 constructor which takes no argument in |SProp|. Since this includes equality types, it provides definitional uniqueness of identity proofs. Because squashing is a universe restriction, unsetting :flag:`Universe Checking` is stronger than setting :flag:`Definitional UIP`. Definitional UIP involves a special reduction rule through which reduction depends on conversion. Consider the following code: .. coqtop:: in Set Definitional UIP. Inductive seq {A} (a:A) : A -> SProp := srefl : seq a a. Axiom e : seq 0 0. Definition hidden_arrow := match e return Set with srefl _ => nat -> nat end. Check (fun (f : hidden_arrow) (x:nat) => (f : nat -> nat) x). By the usual reduction rules :g:`hidden_arrow` is a stuck match, but by proof irrelevance :g:`e` is convertible to :g:`srefl 0` and then by congruence :g:`hidden_arrow` is convertible to `nat -> nat`. The special reduction reduces any match on a type which uses definitional UIP when the indices are convertible to those of the constructor. For `seq`, this means a match on a value of type `seq x y` reduces if and only if `x` and `y` are convertible. Such matches are indicated in the printed representation by inserting a cast around the discriminee: .. coqtop:: out Print hidden_arrow. Non Termination with UIP ++++++++++++++++++++++++ The special reduction rule of UIP combined with an impredicative sort breaks termination of reduction :cite:`abel19:failur_normal_impred_type_theor`: .. coqtop:: all Axiom all_eq : forall (P Q:Prop), P -> Q -> seq P Q. Definition transport (P Q:Prop) (x:P) (y:Q) : Q := match all_eq P Q x y with srefl _ => x end. Definition top : Prop := forall P : Prop, P -> P. Definition c : top := fun P p => transport (top -> top) P (fun x : top => x (top -> top) (fun x => x) x) p. Fail Timeout 1 Eval lazy in c (top -> top) (fun x => x) c. The term :g:`c (top -> top) (fun x => x) c` infinitely reduces to itself. Issues with non-cumulativity ---------------------------- During normal term elaboration, we don't always know that a type is a strict proposition early enough. For instance: .. coqdoc:: Definition constant_0 : ?[T] -> nat := fun _ : sUnit => 0. While checking the type of the constant, we only know that ``?[T]`` must inhabit some sort. Putting it in some floating universe ``u`` would disallow instantiating it by ``sUnit : SProp``. In order to make the system usable without having to annotate every instance of :math:`\SProp`, we consider :math:`\SProp` to be a subtype of every universe during elaboration (i.e. outside the kernel). Then once we have a fully elaborated term it is sent to the kernel which will check that we didn't actually need cumulativity of :math:`\SProp` (in the example above, ``u`` doesn't appear in the final term). This means that some errors will be delayed until ``Qed``: .. coqtop:: in Lemma foo : Prop. Proof. pose (fun A : SProp => A : Type); exact True. .. coqtop:: all Fail Qed. .. coqtop:: in Abort. .. flag:: Elaboration StrictProp Cumulativity Unset this :term:`flag` (it is on by default) to be strict with regard to :math:`\SProp` cumulativity during elaboration. The implementation of proof irrelevance uses inferred "relevance" marks on binders to determine which variables are irrelevant. Together with non-cumulativity this allows us to avoid retyping during conversion. However during elaboration cumulativity is allowed and so the algorithm may miss some irrelevance: .. coqtop:: all Fail Definition late_mark := fun (A:SProp) (P:A -> Prop) x y (v:P x) => v : P y. The binders for ``x`` and ``y`` are created before their type is known to be ``A``, so they're not marked irrelevant. This can be avoided with sufficient annotation of binders (see ``irrelevance`` at the beginning of this chapter) or by bypassing the conversion check in tactics. .. coqdoc:: Definition late_mark := fun (A:SProp) (P:A -> Prop) x y (v:P x) => ltac:(exact_no_check v) : P y. The kernel will re-infer the marks on the fully elaborated term, and so correctly converts ``x`` and ``y``. .. warn:: Bad relevance This is a developer warning, disabled by default. It is emitted by the kernel when it is passed a term with incorrect relevance marks. To avoid conversion issues as in ``late_mark`` you may wish to use it to find when your tactics are producing incorrect marks. .. flag:: Cumulative StrictProp Set this :term:`flag` (it is off by default) to make the kernel accept cumulativity between |SProp| and other universes. This makes typechecking incomplete. coq-8.15.0/doc/sphinx/addendum/type-classes.rst000066400000000000000000000621061417001151100213700ustar00rootroot00000000000000.. _typeclasses: Typeclasses =========== This chapter presents a quick reference of the commands related to type classes. For an actual introduction to typeclasses, there is a description of the system :cite:`sozeau08` and the literature on type classes in Haskell which also applies. Class and Instance declarations ------------------------------- The syntax for class and instance declarations is the same as the record syntax of Coq: .. coqdoc:: Class classname (p1 : t1) ⋯ (pn : tn) [: sort] := { f1 : u1 ; ⋯ ; fm : um }. Instance instancename q1 ⋯ qm : classname p1 ⋯ pn := { f1 := t1 ; ⋯ ; fm := tm }. The ``pi : ti`` variables are called the *parameters* of the class and the ``fi : ti`` are called the *methods*. Each class definition gives rise to a corresponding record declaration and each instance is a regular definition whose name is given by `instancename` and type is an instantiation of the record type. We’ll use the following example class in the rest of the chapter: .. coqtop:: none Set Warnings "-deprecated-instance-without-locality". .. coqtop:: in Class EqDec (A : Type) := { eqb : A -> A -> bool ; eqb_leibniz : forall x y, eqb x y = true -> x = y }. This class implements a boolean equality test which is compatible with Leibniz equality on some type. An example implementation is: .. coqtop:: in Instance unit_EqDec : EqDec unit := { eqb x y := true ; eqb_leibniz x y H := match x, y return x = y with | tt, tt => eq_refl tt end }. Using the :attr:`refine` attribute, if the term is not sufficient to finish the definition (e.g. due to a missing field or non-inferable hole) it must be finished in proof mode. If it is sufficient a trivial proof mode with no open goals is started. .. coqtop:: in #[refine] Instance unit_EqDec' : EqDec unit := { eqb x y := true }. Proof. intros [] [];reflexivity. Defined. Note that if you finish the proof with :cmd:`Qed` the entire instance will be opaque, including the fields given in the initial term. Alternatively, in :flag:`Program Mode` if one does not give all the members in the Instance declaration, Coq generates obligations for the remaining fields, e.g.: .. coqtop:: in Require Import Program.Tactics. Program Instance eq_bool : EqDec bool := { eqb x y := if x then y else negb y }. .. coqtop:: all Next Obligation. destruct x ; destruct y ; (discriminate || reflexivity). Defined. One has to take care that the transparency of every field is determined by the transparency of the :cmd:`Instance` proof. One can use alternatively the :attr:`program` attribute to get richer facilities for dealing with obligations. Binding classes --------------- Once a typeclass is declared, one can use it in class binders: .. coqtop:: all Definition neqb {A} {eqa : EqDec A} (x y : A) := negb (eqb x y). When one calls a class method, a constraint is generated that is satisfied only in contexts where the appropriate instances can be found. In the example above, a constraint ``EqDec A`` is generated and satisfied by ``eqa : EqDec A``. In case no satisfying constraint can be found, an error is raised: .. coqtop:: all Fail Definition neqb' (A : Type) (x y : A) := negb (eqb x y). The algorithm used to solve constraints is a variant of the :tacn:`eauto` tactic that does proof search with a set of lemmas (the instances). It will use local hypotheses as well as declared lemmas in the ``typeclass_instances`` database. Hence the example can also be written: .. coqtop:: all Definition neqb' A (eqa : EqDec A) (x y : A) := negb (eqb x y). However, the generalizing binders should be used instead as they have particular support for typeclasses: + They automatically set the maximally implicit status for typeclass arguments, making derived functions as easy to use as class methods. In the example above, ``A`` and ``eqa`` should be set maximally implicit. + They support implicit quantification on partially applied type classes (:ref:`implicit-generalization`). Any argument not given as part of a typeclass binder will be automatically generalized. + They also support implicit quantification on :ref:`superclasses`. Following the previous example, one can write: .. coqtop:: all Generalizable Variables A B C. Definition neqb_implicit `{eqa : EqDec A} (x y : A) := negb (eqb x y). Here ``A`` is implicitly generalized, and the resulting function is equivalent to the one above. Parameterized Instances ----------------------- One can declare parameterized instances as in Haskell simply by giving the constraints as a binding context before the instance, e.g.: .. coqtop:: in Program Instance prod_eqb `(EA : EqDec A, EB : EqDec B) : EqDec (A * B) := { eqb x y := match x, y with | (la, ra), (lb, rb) => andb (eqb la lb) (eqb ra rb) end }. .. coqtop:: none Admit Obligations. These instances are used just as well as lemmas in the instance hint database. .. _contexts: Sections and contexts --------------------- To ease developments parameterized by many instances, one can use the :cmd:`Context` command to introduce the parameters into the :term:`local context`, it works similarly to the command :cmd:`Variable`, except it accepts any binding context as an argument, so variables can be implicit, and :ref:`implicit-generalization` can be used. For example: .. coqtop:: all Section EqDec_defs. Context `{EA : EqDec A}. .. coqtop:: in #[ global, program ] Instance option_eqb : EqDec (option A) := { eqb x y := match x, y with | Some x, Some y => eqb x y | None, None => true | _, _ => false end }. Admit Obligations. .. coqtop:: all End EqDec_defs. About option_eqb. Here the :attr:`global` attribute redeclares the instance at the end of the section, once it has been generalized by the context variables it uses. .. seealso:: Section :ref:`section-mechanism` Building hierarchies -------------------- .. _superclasses: Superclasses ~~~~~~~~~~~~ One can also parameterize classes by other classes, generating a hierarchy of classes and superclasses. In the same way, we give the superclasses as a binding context: .. coqtop:: all Class Ord `(E : EqDec A) := { le : A -> A -> bool }. Contrary to Haskell, we have no special syntax for superclasses, but this declaration is equivalent to: .. coqdoc:: Class `(E : EqDec A) => Ord A := { le : A -> A -> bool }. This declaration means that any instance of the ``Ord`` class must have an instance of ``EqDec``. The parameters of the subclass contain at least all the parameters of its superclasses in their order of appearance (here A is the only one). As we have seen, ``Ord`` is encoded as a record type with two parameters: a type ``A`` and an ``E`` of type ``EqDec A``. However, one can still use it as if it had a single parameter inside generalizing binders: the generalization of superclasses will be done automatically. .. coqtop:: all Definition le_eqb `{Ord A} (x y : A) := andb (le x y) (le y x). In some cases, to be able to specify sharing of structures, one may want to give explicitly the superclasses. It is is possible to do it directly in regular binders, and using the ``!`` modifier in class binders. For example: .. coqtop:: all Definition lt `{eqa : EqDec A, ! Ord eqa} (x y : A) := andb (le x y) (neqb x y). The ``!`` modifier switches the way a binder is parsed back to the usual interpretation of Coq. In particular, it uses the implicit arguments mechanism if available, as shown in the example. Substructures ~~~~~~~~~~~~~ .. index:: :> (substructure) Substructures are components of a class which are instances of a class themselves. They often arise when using classes for logical properties, e.g.: .. coqtop:: none Require Import Relation_Definitions. .. coqtop:: in Class Reflexive (A : Type) (R : relation A) := reflexivity : forall x, R x x. Class Transitive (A : Type) (R : relation A) := transitivity : forall x y z, R x y -> R y z -> R x z. This declares singleton classes for reflexive and transitive relations, (see the :ref:`singleton class ` variant for an explanation). These may be used as parts of other classes: .. coqtop:: all Class PreOrder (A : Type) (R : relation A) := { PreOrder_Reflexive :> Reflexive A R ; PreOrder_Transitive :> Transitive A R }. The syntax ``:>`` indicates that each ``PreOrder`` can be seen as a ``Reflexive`` relation. So each time a reflexive relation is needed, a preorder can be used instead. This is very similar to the coercion mechanism of ``Structure`` declarations. The implementation simply declares each projection as an instance. .. warn:: Ignored instance declaration for “@ident”: “@term” is not a class Using this ``:>`` syntax with a right-hand-side that is not itself a Class has no effect (apart from emitting this warning). In particular, is does not declare a coercion. One can also declare existing objects or structure projections using the Existing Instance command to achieve the same effect. Summary of the commands ----------------------- .. cmd:: Class @record_definition Class @singleton_class_definition .. insertprodn singleton_class_definition singleton_class_definition .. prodn:: singleton_class_definition ::= {? > } @ident_decl {* @binder } {? : @sort } := @constructor The first form declares a record and makes the record a typeclass with parameters :n:`{* @binder }` and the listed record fields. .. _singleton-class: The second form declares a *singleton* class with a single method. This singleton class is a so-called definitional class, represented simply as a definition ``ident binders := term`` and whose instances are themselves objects of this type. Definitional classes are not wrapped inside records, and the trivial projection of an instance of such a class is convertible to the instance itself. This can be useful to make instances of existing objects easily and to reduce proof size by not inserting useless projections. The class :term:`constant` itself is declared rigid during resolution so that the class abstraction is maintained. Like any command declaring a record, this command supports the :attr:`universes(polymorphic)`, :attr:`universes(template)`, :attr:`universes(cumulative)` and :attr:`private(matching)` attributes. When record syntax is used, this command also supports the :attr:`projections(primitive)` :term:`attribute`. .. cmd:: Existing Class @qualid This variant declares a class from a previously declared :term:`constant` or inductive definition. No methods or instances are defined. .. warn:: @ident is already declared as a typeclass This command has no effect when used on a typeclass. .. cmd:: Instance {? @ident_decl {* @binder } } : @type {? @hint_info } {? {| := %{ {* @field_def } %} | := @term } } Declares a typeclass instance named :token:`ident_decl` of the class :n:`@type` with the specified parameters and with fields defined by :token:`field_def`, where each field must be a declared field of the class. Adds one or more :token:`binder`\s to declare a parameterized instance. :token:`hint_info` may be used to specify the hint priority, where 0 is the highest priority as for :tacn:`auto` hints. If the priority is not specified, the default is the number of non-dependent binders of the instance. If :token:`one_pattern` is given, terms matching that pattern will trigger use of the instance. Otherwise, use is triggered based on the conclusion of the type. This command supports the :attr:`local`, :attr:`global` and :attr:`export` locality attributes. .. deprecated:: 8.14 The default value for instance locality will change in a future release. Instances added outside of sections without an explicit locality are now deprecated. We recommend using :attr:`export` where possible. Like :cmd:`Definition`, it also supports the :attr:`program` attribute to switch the type checking to `Program` (chapter :ref:`programs`) and to use the obligation mechanism to manage missing fields. Finally, it supports the lighter :attr:`refine` attribute: .. attr:: refine This :term:`attribute` can be used to leave holes or not provide all fields in the definition of an instance and open the tactic mode to fill them. It works exactly as if no :term:`body` had been given and the :tacn:`refine` tactic has been used first. .. cmd:: Declare Instance @ident_decl {* @binder } : @term {? @hint_info } In a :cmd:`Module Type`, declares that a corresponding concrete instance should exist in any implementation of this :cmd:`Module Type`. This is similar to the distinction between :cmd:`Parameter` vs. :cmd:`Definition`, or between :cmd:`Declare Module` and :cmd:`Module`. .. cmd:: Existing Instance @qualid {? @hint_info } Existing Instances {+ @qualid } {? %| @natural } Adds a :term:`constant` whose type ends with an applied typeclass to the instance database with an optional priority :token:`natural`. It can be used for redeclaring instances at the end of sections, or declaring structure projections as instances. This is equivalent to ``Hint Resolve ident : typeclass_instances``, except it registers instances for :cmd:`Print Instances`. .. cmd:: Print Instances @reference Shows the list of instances associated with the typeclass :token:`reference`. .. tacn:: typeclasses eauto {? {| bfs | dfs | best_effort } } {? @nat_or_var } {? with {+ @ident } } This proof search tactic uses the resolution engine that is run implicitly during type checking. This tactic uses a different resolution engine than :tacn:`eauto` and :tacn:`auto`. The main differences are the following: + Unlike :tacn:`eauto` and :tacn:`auto`, the resolution is done entirely in the proof engine, meaning that backtracking is available among dependent subgoals, and shelving goals is supported. ``typeclasses eauto`` is a multi-goal tactic. It analyses the dependencies between subgoals to avoid backtracking on subgoals that are entirely independent. + The transparency information of databases is used consistently for all hints declared in them. It is always used when calling the unifier. When considering local hypotheses, we use the transparent state of the first hint database given. Using an empty database (created with :cmd:`Create HintDb` for example) with unfoldable variables and :term:`constants ` as the first argument of ``typeclasses eauto`` hence makes resolution with the local hypotheses use full conversion during unification. + The mode hints (see :cmd:`Hint Mode`) associated with a class are taken into account by :tacn:`typeclasses eauto`. When a goal does not match any of the declared modes for its head (if any), instead of failing like :tacn:`eauto`, the goal is suspended and resolution proceeds on the remaining goals. If after one run of resolution, there remains suspended goals, resolution is launched against on them, until it reaches a fixed point when the set of remaining suspended goals does not change. Using `solve [typeclasses eauto]` can be used to ensure that no suspended goals remain. + When considering local hypotheses, we use the union of all the modes declared in the given databases. + The tactic may produce more than one success when used in backtracking tactics such as `typeclasses eauto; ...`. See :tacn:`ltac-seq`. + Use the :cmd:`Typeclasses eauto` command to customize the behavior of this tactic. :n:`{| bfs | dfs}` Specifies whether to use breadth-first search or depth-first search. The default is depth-first search, which can be changed with the :flag:`Typeclasses Iterative Deepening` flag. .. _TypeclassesEautoBestEffort: :n:`best_effort` If the `best_effort` option is given and resolution fails, `typeclasses eauto` returns the first partial solution in which all remaining subgoals fall into one of these categories: - Stuck goals: the head of the goal has at least one associated declared mode and the constraint does not match any mode declared for its head. These goals are shelved. - Mode failures: the head of the constraint has at least one matching declared mode, but the constraint couldn't be solved. These goals are left as subgoals of :n:`typeclasses eauto best_effort`. During type inference, typeclass resolution always uses the `best_effort` option: in case of failure, it constructs a partial solution for the goals and gives a more informative error message. It can be used the same way in interactive proofs to check which instances/hints are missing for a typeclass resolution to succeed. :n:`@nat_or_var` Specifies the maximum depth of the search. .. warning:: The semantics for the limit :n:`@nat_or_var` are different than for :tacn:`auto`. By default, if no limit is given, the search is unbounded. Unlike :tacn:`auto`, introduction steps count against the limit, which might result in larger limits being necessary when searching with :tacn:`typeclasses eauto` than with :tacn:`auto`. :n:`with {+ @ident }` Runs resolution with the specified hint databases. It treats typeclass subgoals the same as other subgoals (no shelving of non-typeclass goals in particular), while allowing shelved goals to remain at any point during search. When :n:`with` is not specified, :tacn:`typeclasses eauto` uses the ``typeclass_instances`` database by default (instead of ``core``). Dependent subgoals are automatically shelved, and shelved goals can remain after resolution ends (following the behavior of Coq 8.5). .. note:: ``all:once (typeclasses eauto)`` faithfully mimics what happens during typeclass resolution when it is called during refinement/type inference, except that *only* declared class subgoals are considered at the start of resolution during type inference, while ``all`` can select non-class subgoals as well. It might move to ``all:typeclasses eauto`` in future versions when the refinement engine will be able to backtrack. .. tacn:: autoapply @one_term with @ident The tactic ``autoapply`` applies :token:`one_term` using the transparency information of the hint database :token:`ident`, and does *no* typeclass resolution. This can be used in :cmd:`Hint Extern`’s for typeclass instances (in the hint database ``typeclass_instances``) to allow backtracking on the typeclass subgoals created by the lemma application, rather than doing typeclass resolution locally at the hint application time. .. _TypeclassesTransparent: Typeclasses Transparent, Typeclasses Opaque ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Typeclasses Transparent {+ @qualid } Makes :token:`qualid` transparent during typeclass resolution. A shortcut for :cmd:`Hint Transparent` :n:`{+ @qualid } : typeclass_instances` .. cmd:: Typeclasses Opaque {+ @qualid } Make :token:`qualid` opaque for typeclass search. A shortcut for :cmd:`Hint Opaque` :n:`{+ @qualid } : typeclass_instances`. It is useful when some :term:`constants ` prevent some unifications and make resolution fail. It is also useful to declare constants which should never be unfolded during proof search, like fixpoints or anything which does not look like an abbreviation. This can additionally speed up proof search as the typeclass map can be indexed by such rigid constants (see :ref:`hintdatabases`). By default, all :term:`constants ` and local variables are considered transparent. One should take care not to make opaque any constant that is used to abbreviate a type, like: .. coqdoc:: Definition relation A := A -> A -> Prop. .. versionadded:: 8.15 :cmd:`Typeclasses Transparent` and :cmd:`Typeclasses Opaque` support locality attributes like :cmd:`Hint ` commands. .. deprecated:: 8.15 The default value for typeclass transparency hints will change in a future release. Hints added outside of sections without an explicit locality are now deprecated. We recommend using :attr:`export` where possible. Settings ~~~~~~~~ .. flag:: Typeclasses Dependency Order This :term:`flag` (off by default) respects the dependency order between subgoals, meaning that subgoals on which other subgoals depend come first, while the non-dependent subgoals were put before the dependent ones previously (Coq 8.5 and below). This can result in quite different performance behaviors of proof search. .. flag:: Typeclasses Filtered Unification This :term:`flag`, which is off by default, switches the hint application procedure to a filter-then-unify strategy. To apply a hint, we first check that the goal *matches* syntactically the inferred or specified pattern of the hint, and only then try to *unify* the goal with the conclusion of the hint. This can drastically improve performance by calling unification less often, matching syntactic patterns being very quick. This also provides more control on the triggering of instances. For example, forcing a :term:`constant` to explicitly appear in the pattern will make it never apply on a goal where there is a hole in that place. .. flag:: Typeclasses Limit Intros This :term:`flag` (on by default) controls the ability to apply hints while avoiding (functional) eta-expansions in the generated proof term. It does so by allowing hints that conclude in a product to apply to a goal with a matching product directly, avoiding an introduction. .. warning:: This can be expensive as it requires rebuilding hint clauses dynamically, and does not benefit from the invertibility status of the product introduction rule, resulting in potentially more expensive proof search (i.e. more useless backtracking). .. flag:: Typeclass Resolution For Conversion This :term:`flag` (on by default) controls the use of typeclass resolution when a unification problem cannot be solved during elaboration/type inference. With this flag on, when a unification fails, typeclass resolution is tried before launching unification once again. .. flag:: Typeclasses Strict Resolution Typeclass declarations introduced when this :term:`flag` is set have a stricter resolution behavior (the flag is off by default). When looking for unifications of a goal with an instance of this class, we “freeze” all the existentials appearing in the goals, meaning that they are considered rigid during unification and cannot be instantiated. .. flag:: Typeclasses Unique Solutions When a typeclass resolution is launched we ensure that it has a single solution or fail. This :term:`flag` ensures that the resolution is canonical, but can make proof search much more expensive. .. flag:: Typeclasses Unique Instances Typeclass declarations introduced when this :term:`flag` is set have a more efficient resolution behavior (the flag is off by default). When a solution to the typeclass goal of this class is found, we never backtrack on it, assuming that it is canonical. .. flag:: Typeclasses Iterative Deepening When this :term:`flag` is set, the proof search strategy is breadth-first search. Otherwise, the search strategy is depth-first search. The default is off. :cmd:`Typeclasses eauto` is another way to set this flag. .. opt:: Typeclasses Depth @natural This :term:`option` sets the maximum proof search depth. The default is unbounded. :cmd:`Typeclasses eauto` is another way to set this option. .. flag:: Typeclasses Debug Controls whether typeclass resolution steps are shown during search. Setting this :term:`flag` also sets :opt:`Typeclasses Debug Verbosity` to 1. :cmd:`Typeclasses eauto` is another way to set this flag. .. opt:: Typeclasses Debug Verbosity @natural Determines how much information is shown for typeclass resolution steps during search. 1 is the default level. 2 shows additional information such as tried tactics and shelving of goals. Setting this :term:`option` to 1 or 2 turns on the :flag:`Typeclasses Debug` flag; setting this option to 0 turns that flag off. Typeclasses eauto ~~~~~~~~~~~~~~~~~ .. cmd:: Typeclasses eauto := {? debug } {? ( {| bfs | dfs } ) } {? @natural } Allows more global customization of the :tacn:`typeclasses eauto` tactic. The options are: ``debug`` Sets debug mode. In debug mode, a trace of successfully applied tactics is printed. Debug mode can also be set with :flag:`Typeclasses Debug`. :n:`{| bfs | dfs }` Specifies whether to use breadth-first search or depth-first search. The default is depth-first search, which can be changed with the :flag:`Typeclasses Iterative Deepening` flag. :token:`natural` Sets the depth limit for the search. The limit can also be set with :opt:`Typeclasses Depth`. coq-8.15.0/doc/sphinx/addendum/universe-polymorphism.rst000066400000000000000000000534061417001151100233570ustar00rootroot00000000000000.. _polymorphicuniverses: Polymorphic Universes ====================== :Author: Matthieu Sozeau General Presentation --------------------- .. warning:: The status of Universe Polymorphism is experimental. This section describes the universe polymorphic extension of Coq. Universe polymorphism makes it possible to write generic definitions making use of universes and reuse them at different and sometimes incompatible universe levels. A standard example of the difference between universe *polymorphic* and *monomorphic* definitions is given by the identity function: .. coqtop:: in Definition identity {A : Type} (a : A) := a. By default, :term:`constant` declarations are monomorphic, hence the identity function declares a global universe (say ``Top.1``) for its domain. Subsequently, if we try to self-apply the identity, we will get an error: .. coqtop:: all Fail Definition selfid := identity (@identity). Indeed, the global level ``Top.1`` would have to be strictly smaller than itself for this self-application to type check, as the type of :g:`(@identity)` is :g:`forall (A : Type@{Top.1}), A -> A` whose type is itself :g:`Type@{Top.1+1}`. A universe polymorphic identity function binds its domain universe level at the definition level instead of making it global. .. coqtop:: in Polymorphic Definition pidentity {A : Type} (a : A) := a. .. coqtop:: all About pidentity. It is then possible to reuse the constant at different levels, like so: .. coqtop:: in Definition selfpid := pidentity (@pidentity). Of course, the two instances of :g:`pidentity` in this definition are different. This can be seen when the :flag:`Printing Universes` flag is on: .. coqtop:: none Set Printing Universes. .. coqtop:: all Print selfpid. Now :g:`pidentity` is used at two different levels: at the head of the application it is instantiated at ``Top.3`` while in the argument position it is instantiated at ``Top.4``. This definition is only valid as long as ``Top.4`` is strictly smaller than ``Top.3``, as shown by the constraints. Note that this definition is monomorphic (not universe polymorphic), so the two universes (in this case ``Top.3`` and ``Top.4``) are actually global levels. When printing :g:`pidentity`, we can see the universes it binds in the annotation :g:`@{Top.2}`. Additionally, when :flag:`Printing Universes` is on we print the "universe context" of :g:`pidentity` consisting of the bound universes and the constraints they must verify (for :g:`pidentity` there are no constraints). Inductive types can also be declared universes polymorphic on universes appearing in their parameters or fields. A typical example is given by monoids: .. coqtop:: in Polymorphic Record Monoid := { mon_car :> Type; mon_unit : mon_car; mon_op : mon_car -> mon_car -> mon_car }. .. coqtop:: in Print Monoid. The Monoid's carrier universe is polymorphic, hence it is possible to instantiate it for example with :g:`Monoid` itself. First we build the trivial unit monoid in :g:`Set`: .. coqtop:: in Definition unit_monoid : Monoid := {| mon_car := unit; mon_unit := tt; mon_op x y := tt |}. From this we can build a definition for the monoid of :g:`Set`\-monoids (where multiplication would be given by the product of monoids). .. coqtop:: in Polymorphic Definition monoid_monoid : Monoid. refine (@Build_Monoid Monoid unit_monoid (fun x y => x)). Defined. .. coqtop:: all Print monoid_monoid. As one can see from the constraints, this monoid is “large”, it lives in a universe strictly higher than :g:`Set`. Polymorphic, Monomorphic ------------------------- .. attr:: universes(polymorphic{? = {| yes | no } }) :name: universes(polymorphic); Polymorphic; Monomorphic This :term:`boolean attribute` can be used to control whether universe polymorphism is enabled in the definition of an inductive type. There is also a legacy syntax using the ``Polymorphic`` prefix (see :n:`@legacy_attr`) which, as shown in the examples, is more commonly used. When ``universes(polymorphic=no)`` is used, global universe constraints are produced, even when the :flag:`Universe Polymorphism` flag is on. There is also a legacy syntax using the ``Monomorphic`` prefix (see :n:`@legacy_attr`). .. flag:: Universe Polymorphism This :term:`flag` is off by default. When it is on, new declarations are polymorphic unless the :attr:`universes(polymorphic=no) ` attribute is used to override the default. Many other commands can be used to declare universe polymorphic or monomorphic :term:`constants ` depending on whether the :flag:`Universe Polymorphism` flag is on or the :attr:`universes(polymorphic)` attribute is used: - :cmd:`Lemma`, :cmd:`Axiom`, etc. can be used to declare universe polymorphic constants. - Using the :attr:`universes(polymorphic)` attribute with the :cmd:`Section` command will locally set the polymorphism flag inside the section. - :cmd:`Variable`, :cmd:`Context`, :cmd:`Universe` and :cmd:`Constraint` in a section support polymorphism. See :ref:`universe-polymorphism-in-sections` for more details. - Using the :attr:`universes(polymorphic)` attribute with the :cmd:`Hint Resolve` or :cmd:`Hint Rewrite` commands will make :tacn:`auto` / :tacn:`rewrite` use the hint polymorphically, not at a single instance. .. _cumulative: Cumulative, NonCumulative ------------------------- .. attr:: universes(cumulative{? = {| yes | no } }) :name: universes(cumulative); Cumulative; NonCumulative Polymorphic inductive types, coinductive types, variants and records can be declared cumulative using this :term:`boolean attribute` or the legacy ``Cumulative`` prefix (see :n:`@legacy_attr`) which, as shown in the examples, is more commonly used. This means that two instances of the same inductive type (family) are convertible based on the universe variances; they do not need to be equal. When the attribtue is off, the inductive type is non-cumulative even if the :flag:`Polymorphic Inductive Cumulativity` flag is on. There is also a legacy syntax using the ``NonCumulative`` prefix (see :n:`@legacy_attr`). This means that two instances of the same inductive type (family) are convertible only if all the universes are equal. .. exn:: The cumulative attribute can only be used in a polymorphic context. Using this attribute requires being in a polymorphic context, i.e. either having the :flag:`Universe Polymorphism` flag on, or having used the :attr:`universes(polymorphic)` attribute as well. .. note:: :n:`#[ universes(polymorphic{? = yes }), universes(cumulative{? = {| yes | no } }) ]` can be abbreviated into :n:`#[ universes(polymorphic{? = yes }, cumulative{? = {| yes | no } }) ]`. .. flag:: Polymorphic Inductive Cumulativity When this :term:`flag` is on (it is off by default), it makes all subsequent *polymorphic* inductive definitions cumulative, unless the :attr:`universes(cumulative=no) ` attribute is used to override the default. It has no effect on *monomorphic* inductive definitions. Consider the examples below. .. coqtop:: in Polymorphic Cumulative Inductive list {A : Type} := | nil : list | cons : A -> list -> list. .. coqtop:: all Print list. When printing :g:`list`, the universe context indicates the subtyping constraints by prefixing the level names with symbols. Because inductive subtypings are only produced by comparing inductives to themselves with universes changed, they amount to variance information: each universe is either invariant, covariant or irrelevant (there are no contravariant subtypings in Coq), respectively represented by the symbols `=`, `+` and `*`. Here we see that :g:`list` binds an irrelevant universe, so any two instances of :g:`list` are convertible: :math:`E[Γ] ⊢ \mathsf{list}@\{i\}~A =_{βδιζη} \mathsf{list}@\{j\}~B` whenever :math:`E[Γ] ⊢ A =_{βδιζη} B` and this applies also to their corresponding constructors, when they are comparable at the same type. See :ref:`Conversion-rules` for more details on convertibility and subtyping. The following is an example of a record with non-trivial subtyping relation: .. coqtop:: all Polymorphic Cumulative Record packType := {pk : Type}. About packType. :g:`packType` binds a covariant universe, i.e. .. math:: E[Γ] ⊢ \mathsf{packType}@\{i\} =_{βδιζη} \mathsf{packType}@\{j\}~\mbox{ whenever }~i ≤ j Specifying cumulativity ~~~~~~~~~~~~~~~~~~~~~~~ The variance of the universe parameters for a cumulative inductive may be specified by the user. For the following type, universe ``a`` has its variance automatically inferred (it is irrelevant), ``b`` is required to be irrelevant, ``c`` is covariant and ``d`` is invariant. With these annotations ``c`` and ``d`` have less general variances than would be inferred. .. coqtop:: all Polymorphic Cumulative Inductive Dummy@{a *b +c =d} : Prop := dummy. About Dummy. Insufficiently restrictive variance annotations lead to errors: .. coqtop:: all Fail Polymorphic Cumulative Record bad@{*a} := {p : Type@{a}}. An example of a proof using cumulativity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. coqtop:: in reset Set Universe Polymorphism. Set Polymorphic Inductive Cumulativity. Inductive eq@{i} {A : Type@{i}} (x : A) : A -> Type@{i} := eq_refl : eq x x. Definition funext_type@{a b e} (A : Type@{a}) (B : A -> Type@{b}) := forall f g : (forall a, B a), (forall x, eq@{e} (f x) (g x)) -> eq@{e} f g. Section down. Universes a b e e'. Constraint e' < e. Lemma funext_down {A B} (H : @funext_type@{a b e} A B) : @funext_type@{a b e'} A B. Proof. exact H. Defined. End down. Cumulativity Weak Constraints ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. flag:: Cumulativity Weak Constraints When set, which is the default, this :term:`flag` causes "weak" constraints to be produced when comparing universes in an irrelevant position. Processing weak constraints is delayed until minimization time. A weak constraint between `u` and `v` when neither is smaller than the other and one is flexible causes them to be unified. Otherwise the constraint is silently discarded. This heuristic is experimental and may change in future versions. Disabling weak constraints is more predictable but may produce arbitrary numbers of universes. Global and local universes --------------------------- Each universe is declared in a global or local context before it can be used. To ensure compatibility, every *global* universe is set to be strictly greater than :g:`Set` when it is introduced, while every *local* (i.e. polymorphically quantified) universe is introduced as greater or equal to :g:`Set`. Conversion and unification --------------------------- The semantics of conversion and unification have to be modified a little to account for the new universe instance arguments to polymorphic references. The semantics respect the fact that definitions are transparent, so indistinguishable from their :term:`bodies ` during conversion. This is accomplished by changing one rule of unification, the first- order approximation rule, which applies when two applicative terms with the same head are compared. It tries to short-cut unfolding by comparing the arguments directly. In case the :term:`constant` is universe polymorphic, we allow this rule to fire only when unifying the universes results in instantiating a so-called flexible universe variables (not given by the user). Similarly for conversion, if such an equation of applicative terms fail due to a universe comparison not being satisfied, the terms are unfolded. This change implies that conversion and unification can have different unfolding behaviors on the same development with universe polymorphism switched on or off. Minimization ------------- Universe polymorphism with cumulativity tends to generate many useless inclusion constraints in general. Typically at each application of a polymorphic :term:`constant` :g:`f`, if an argument has expected type :g:`Type@{i}` and is given a term of type :g:`Type@{j}`, a :math:`j ≤ i` constraint will be generated. It is however often the case that an equation :math:`j = i` would be more appropriate, when :g:`f`\'s universes are fresh for example. Consider the following example: .. coqtop:: none Polymorphic Definition pidentity {A : Type} (a : A) := a. Set Printing Universes. .. coqtop:: in Definition id0 := @pidentity nat 0. .. coqtop:: all Print id0. This definition is elaborated by minimizing the universe of :g:`id0` to level :g:`Set` while the more general definition would keep the fresh level :g:`i` generated at the application of :g:`id` and a constraint that :g:`Set` :math:`≤ i`. This minimization process is applied only to fresh universe variables. It simply adds an equation between the variable and its lower bound if it is an atomic universe (i.e. not an algebraic max() universe). .. flag:: Universe Minimization ToSet Turning this :term:`flag` off (it is on by default) disallows minimization to the sort :g:`Set` and only collapses floating universes between themselves. .. _explicit-universes: Explicit Universes ------------------- .. insertprodn universe_name univ_constraint .. prodn:: universe_name ::= @qualid | Set | Prop univ_annot ::= @%{ {* @universe_level } %} universe_level ::= Set | Prop | Type | _ | @qualid univ_decl ::= @%{ {* @ident } {? + } {? %| {*, @univ_constraint } {? + } } %} cumul_univ_decl ::= @%{ {* {? {| + | = | * } } @ident } {? + } {? %| {*, @univ_constraint } {? + } } %} univ_constraint ::= @universe_name {| < | = | <= } @universe_name The syntax has been extended to allow users to explicitly bind names to universes and explicitly instantiate polymorphic definitions. .. cmd:: Universe {+ @ident } In the monomorphic case, declares new global universes with the given names. Global universe names live in a separate namespace. The command supports the :attr:`universes(polymorphic)` attribute (or the ``Polymorphic`` legacy attribute) only in sections, meaning the universe quantification will be discharged for each section definition independently. .. exn:: Polymorphic universes can only be declared inside sections, use Monomorphic Universe instead. :undocumented: .. cmd:: Constraint {+, @univ_constraint } Declares new constraints between named universes. If consistent, the constraints are then enforced in the global environment. Like :cmd:`Universe`, it can be used with the :attr:`universes(polymorphic)` attribute (or the ``Polymorphic`` legacy attribute) in sections only to declare constraints discharged at section closing time. One cannot declare a global constraint on polymorphic universes. .. exn:: Undeclared universe @ident. :undocumented: .. exn:: Universe inconsistency. :undocumented: .. exn:: Polymorphic universe constraints can only be declared inside sections, use Monomorphic Constraint instead :undocumented: .. _printing-universes: Printing universes ------------------ .. flag:: Printing Universes Turn this :term:`flag` on to activate the display of the actual level of each occurrence of :g:`Type`. See :ref:`Sorts` for details. This wizard flag, in combination with :flag:`Printing All` can help to diagnose failures to unify terms apparently identical but internally different in the Calculus of Inductive Constructions. .. cmd:: Print {? Sorted } Universes {? Subgraph ( {* @qualid } ) } {? @string } :name: Print Universes This command can be used to print the constraints on the internal level of the occurrences of :math:`\Type` (see :ref:`Sorts`). The :n:`Subgraph` clause limits the printed graph to the requested names (adjusting constraints to preserve the implied transitive constraints between kept universes). The :n:`Sorted` clause makes each universe equivalent to a numbered label reflecting its level (with a linear ordering) in the universe hierarchy. :n:`@string` is an optional output filename. If :n:`@string` ends in ``.dot`` or ``.gv``, the constraints are printed in the DOT language, and can be processed by Graphviz tools. The format is unspecified if `string` doesn’t end in ``.dot`` or ``.gv``. Polymorphic definitions ~~~~~~~~~~~~~~~~~~~~~~~ For polymorphic definitions, the declaration of (all) universe levels introduced by a definition uses the following syntax: .. coqtop:: in Polymorphic Definition le@{i j} (A : Type@{i}) : Type@{j} := A. .. coqtop:: all Print le. During refinement we find that :g:`j` must be larger or equal than :g:`i`, as we are using :g:`A : Type@{i} <= Type@{j}`, hence the generated constraint. At the end of a definition or proof, we check that the only remaining universes are the ones declared. In the term and in general in proof mode, introduced universe names can be referred to in terms. Note that local universe names shadow global universe names. During a proof, one can use :cmd:`Show Universes` to display the current context of universes. It is possible to provide only some universe levels and let Coq infer the others by adding a :g:`+` in the list of bound universe levels: .. coqtop:: all Fail Definition foobar@{u} : Type@{u} := Type. Definition foobar@{u +} : Type@{u} := Type. Set Printing Universes. Print foobar. This can be used to find which universes need to be explicitly bound in a given definition. Definitions can also be instantiated explicitly, giving their full instance: .. coqtop:: all Check (pidentity@{Set}). Monomorphic Universes k l. Check (le@{k l}). User-named universes and the anonymous universe implicitly attached to an explicit :g:`Type` are considered rigid for unification and are never minimized. Flexible anonymous universes can be produced with an underscore or by omitting the annotation to a polymorphic definition. .. coqtop:: all Check (fun x => x) : Type -> Type. Check (fun x => x) : Type -> Type@{_}. Check le@{k _}. Check le. .. flag:: Strict Universe Declaration Turning this :term:`flag` off allows one to freely use identifiers for universes without declaring them first, with the semantics that the first use declares it. In this mode, the universe names are not associated with the definition or proof once it has been defined. This is meant mainly for debugging purposes. .. flag:: Private Polymorphic Universes This :term:`flag`, on by default, removes universes which appear only in the :term:`body` of an opaque polymorphic definition from the definition's universe arguments. As such, no value needs to be provided for these universes when instantiating the definition. Universe constraints are automatically adjusted. Consider the following definition: .. coqtop:: all Lemma foo@{i} : Type@{i}. Proof. exact Type. Qed. Print foo. The universe :g:`Top.xxx` for the :g:`Type` in the :term:`body` cannot be accessed, we only care that one exists for any instantiation of the universes appearing in the type of :g:`foo`. This is guaranteed when the transitive constraint ``Set <= Top.xxx < i`` is verified. Then when using the :term:`constant` we don't need to put a value for the inner universe: .. coqtop:: all Check foo@{_}. and when not looking at the :term:`body` we don't mention the private universe: .. coqtop:: all About foo. To recover the same behavior with regard to universes as :g:`Defined`, the :flag:`Private Polymorphic Universes` flag may be unset: .. coqtop:: all Unset Private Polymorphic Universes. Lemma bar : Type. Proof. exact Type. Qed. About bar. Fail Check bar@{_}. Check bar@{_ _}. Note that named universes are always public. .. coqtop:: all Set Private Polymorphic Universes. Unset Strict Universe Declaration. Lemma baz : Type@{outer}. Proof. exact Type@{inner}. Qed. About baz. .. _universe-polymorphism-in-sections: Universe polymorphism and sections ---------------------------------- :cmd:`Variables`, :cmd:`Context`, :cmd:`Universe` and :cmd:`Constraint` in a section support polymorphism. This means that the universe variables and their associated constraints are discharged polymorphically over definitions that use them. In other words, two definitions in the section sharing a common variable will both get parameterized by the universes produced by the variable declaration. This is in contrast to a “mononorphic” variable which introduces global universes and constraints, making the two definitions depend on the *same* global universes associated with the variable. It is possible to mix universe polymorphism and monomorphism in sections, except in the following ways: - no monomorphic constraint may refer to a polymorphic universe: .. coqtop:: all reset Section Foo. Polymorphic Universe i. Fail Constraint i = i. This includes constraints implicitly declared by commands such as :cmd:`Variable`, which may need to be used with universe polymorphism activated (locally by attribute or globally by option): .. coqtop:: all Fail Variable A : (Type@{i} : Type). Polymorphic Variable A : (Type@{i} : Type). (in the above example the anonymous :g:`Type` constrains polymorphic universe :g:`i` to be strictly smaller.) - no monomorphic :term:`constant` or inductive may be declared if polymorphic universes or universe constraints are present. These restrictions are required in order to produce a sensible result when closing the section (the requirement on :term:`constants ` and inductive types is stricter than the one on constraints, because constants and inductives are abstracted by *all* the section's polymorphic universes and constraints). coq-8.15.0/doc/sphinx/appendix/000077500000000000000000000000001417001151100162445ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/appendix/history-and-changes/000077500000000000000000000000001417001151100221135ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/appendix/history-and-changes/index.rst000066400000000000000000000013351417001151100237560ustar00rootroot00000000000000.. _history-and-changes: ========================== History and recent changes ========================== This chapter is divided in two parts. The first one is about the :ref:`early history of Coq ` and is presented in chronological order. The second one provides :ref:`release notes about recent versions of Coq ` and is presented in reverse chronological order. When updating your copy of Coq to a new version (especially a new major version), it is strongly recommended that you read the corresponding release notes. They may contain advice that will help you understand the differences with the previous version and upgrade your projects. .. toctree:: :maxdepth: 1 ../../history ../../changes coq-8.15.0/doc/sphinx/appendix/indexes/000077500000000000000000000000001417001151100177035ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/appendix/indexes/index.rst000066400000000000000000000010011417001151100215340ustar00rootroot00000000000000:orphan: .. _indexes: ======== Indexes ======== We provide various specialized indexes that are helpful to quickly find what you are looking for. .. toctree:: ../../std-glossindex ../../coq-cmdindex ../../coq-tacindex ../../coq-attrindex ../../coq-optindex ../../coq-exnindex ../../genindex For reference, here are direct links to the documentation of: - :ref:`attributes` - :ref:`flags-options-tables`; - controlling the display of warning messages with the :opt:`Warnings` option; coq-8.15.0/doc/sphinx/biblio.bib000066400000000000000000000513751417001151100163650ustar00rootroot00000000000000@String{jfp = "Journal of Functional Programming"} @String{lncs = "Lecture Notes in Computer Science"} @String{lnai = "Lecture Notes in Artificial Intelligence"} @String{SV = "{Springer-Verlag}"} @InCollection{Asp00, Title = {Proof General: A Generic Tool for Proof Development}, Author = {Aspinall, David}, Booktitle = {Tools and Algorithms for the Construction and Analysis of Systems, {TACAS} 2000}, Publisher = {Springer Berlin Heidelberg}, Year = {2000}, Editor = {Graf, Susanne and Schwartzbach, Michael}, Pages = {38--43}, Series = {Lecture Notes in Computer Science}, Volume = {1785}, Doi = {10.1007/3-540-46419-0_3}, ISBN = {978-3-540-67282-1}, } @Book{Bar81, author = {H.P. Barendregt}, publisher = {North-Holland}, title = {The Lambda Calculus its Syntax and Semantics}, year = {1981} } @InProceedings{Bou97, title = {Using reflection to build efficient and certified decision procedure s}, author = {S. Boutin}, booktitle = {TACS'97}, editor = {Martin Abadi and Takahashi Ito}, publisher = SV, series = lncs, volume = 1281, year = {1997} } @Article{Bru72, author = {N.J. de Bruijn}, journal = {Indag. Math.}, title = {{Lambda-Calculus Notation with Nameless Dummies, a Tool for Automatic Formula Manipulation, with Application to the Church-Rosser Theorem}}, volume = {34}, year = {1972} } @inproceedings{CH85, title={Constructions: a higher order proof system for mechanizing mathematics}, author={Coquand, Thierry and Huet, Gérard}, booktitle={European Conference on Computer Algebra}, pages={151--184}, year={1985}, issn = {1611-3349}, doi = {10.1007/3-540-15983-5_13}, url = {http://dx.doi.org/10.1007/3-540-15983-5_13}, isbn = 9783540396840, publisher = {Springer Berlin Heidelberg} } @techreport{CH88 TITLE = {{The calculus of constructions}}, AUTHOR = {Coquand, T. and Huet, G{\'e}rard}, URL = {https://hal.inria.fr/inria-00076024}, NUMBER = {RR-0530}, INSTITUTION = {{INRIA}}, YEAR = {1986}, MONTH = May, PDF = {https://hal.inria.fr/inria-00076024/file/RR-0530.pdf}, HAL_ID = {inria-00076024}, HAL_VERSION = {v1}, } @techreport{CH87, TITLE = {{Concepts mathematiques et informatiques formalises dans le calcul des constructions}}, AUTHOR = {Coquand, T. and Huet, G{\'e}rard}, URL = {https://hal.inria.fr/inria-00076039}, NUMBER = {RR-0515}, INSTITUTION = {{INRIA}}, YEAR = {1986}, MONTH = Apr, PDF = {https://hal.inria.fr/inria-00076039/file/RR-0515.pdf}, HAL_ID = {inria-00076039}, HAL_VERSION = {v1}, } @techreport{C90, TITLE = {{Metamathematical investigations of a calculus of constructions}}, AUTHOR = {Coquand, T.}, URL = {https://hal.inria.fr/inria-00075471}, NUMBER = {RR-1088}, INSTITUTION = {{INRIA}}, YEAR = {1989}, MONTH = Sep, PDF = {https://hal.inria.fr/inria-00075471/file/RR-1088.pdf}, HAL_ID = {inria-00075471}, HAL_VERSION = {v1}, } @PhDThesis{Coq85, author = {Th. Coquand}, month = jan, school = {Universit\'e Paris~7}, title = {Une Th\'eorie des Constructions}, year = {1985} } @InProceedings{Coq86, author = {Th. Coquand}, address = {Cambridge, MA}, booktitle = {Symposium on Logic in Computer Science}, publisher = {IEEE Computer Society Press}, title = {{An Analysis of Girard's Paradox}}, year = {1986} } @InProceedings{Coq92, author = {Th. Coquand}, title = {{Pattern Matching with Dependent Types}}, year = {1992}, booktitle = {Proceedings of the 1992 Workshop on Types for Proofs and Programs} } @InProceedings{DBLP:conf/types/CornesT95, author = {Cristina Cornes and Delphine Terrasse}, title = {Automating Inversion of Inductive Predicates in Coq}, booktitle = {TYPES}, year = {1995}, pages = {85-104}, crossref = {DBLP:conf/types/1995}, bibsource = {DBLP, http://dblp.uni-trier.de} } @inproceedings{CP90, title={Inductively defined types}, author={Coquand, Thierry and Paulin, Christine}, booktitle={COLOG-88}, pages={50--66}, year={1990}, issn = {1611-3349}, doi = {10.1007/3-540-52335-9_47}, url = {http://dx.doi.org/10.1007/3-540-52335-9_47}, isbn = 9783540469636, publisher = {Springer Berlin Heidelberg} } @Book{Cur58, author = {Haskell B. Curry and Robert Feys and William Craig}, title = {Combinatory Logic}, volume = 1, publisher = "North-Holland", year = 1958, note = {{\S{9E}}}, } @Article{CSlessadhoc, author = {Gonthier, Georges and Ziliani, Beta and Nanevski, Aleksandar and Dreyer, Derek}, title = {How to Make Ad Hoc Proof Automation Less Ad Hoc}, journal = {SIGPLAN Not.}, issue_date = {September 2011}, volume = {46}, number = {9}, month = sep, year = {2011}, issn = {0362-1340}, pages = {163--175}, numpages = {13}, url = {http://doi.acm.org/10.1145/2034574.2034798}, doi = {10.1145/2034574.2034798}, acmid = {2034798}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {canonical structures, coq, custom proof automation, hoare type theory, interactive theorem proving, tactics, type classes}, } @InProceedings{CSwcu, hal_id = {hal-00816703}, url = {http://hal.inria.fr/hal-00816703}, title = {{Canonical Structures for the working Coq user}}, author = {Mahboubi, Assia and Tassi, Enrico}, booktitle = {{ITP 2013, 4th Conference on Interactive Theorem Proving}}, publisher = {Springer}, pages = {19-34}, address = {Rennes, France}, volume = {7998}, editor = {Sandrine Blazy and Christine Paulin and David Pichardie }, series = {LNCS }, doi = {10.1007/978-3-642-39634-2_5}, year = {2013}, } @InProceedings{Del00, author = {Delahaye, D.}, title = {A {T}actic {L}anguage for the {S}ystem {Coq}}, booktitle = {Proceedings of Logic for Programming and Automated Reasoning (LPAR), Reunion Island}, publisher = SV, series = LNCS, volume = {1955}, pages = {85--95}, month = {November}, year = {2000}, url = {http://www.lirmm.fr/%7Edelahaye/papers/ltac%20(LPAR%2700).pdf} } @Article{Dyc92, author = {Roy Dyckhoff}, journal = {The Journal of Symbolic Logic}, month = sep, number = {3}, title = {Contraction-free sequent calculi for intuitionistic logic}, volume = {57}, year = {1992} } @Book{Fourier, author = {Jean-Baptiste-Joseph Fourier}, publisher = {Gauthier-Villars}, title = {Fourier's method to solve linear inequations/equations systems.}, year = {1890} } @article{Gilbert:POPL2019, author = {Gilbert, Ga\"{e}tan and Cockx, Jesper and Sozeau, Matthieu and Tabareau, Nicolas}, title = {{Definitional Proof Irrelevance Without K}}, journal = {Proc. ACM Program. Lang.}, issue_date = {January 2019}, volume = {3}, number = {POPL}, year = {2019}, issn = {2475-1421}, pages = {3:1--3:28}, articleno = {3}, numpages = {28}, url = {http://doi.acm.org/10.1145/3290316}, acmid = {3290316}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {proof assistants, proof irrelevance, type theory}, } @InProceedings{Gim94, author = {E. Gim\'enez}, booktitle = {Types'94 : Types for Proofs and Programs}, note = {Extended version in LIP research report 95-07, ENS Lyon}, publisher = SV, series = LNCS, title = {Codifying guarded definitions with recursive schemes}, volume = {996}, year = {1994} } @TechReport{Gim98, author = {E. Gim\'enez}, title = {A Tutorial on Recursive Types in Coq}, institution = {INRIA}, year = 1998, month = mar } @Unpublished{GimCas05, author = {E. Gim\'enez and P. Cast\'eran}, title = {A Tutorial on [Co-]Inductive Types in Coq}, institution = {INRIA}, year = 2005, month = jan, note = {available at \url{http://coq.inria.fr/doc}} } @InProceedings{Gimenez95b, author = {E. Gim\'enez}, booktitle = {Workshop on Types for Proofs and Programs}, series = LNCS, number = {1158}, pages = {135-152}, title = {An application of co-Inductive types in Coq: verification of the Alternating Bit Protocol}, editorS = {S. Berardi and M. Coppo}, publisher = SV, year = {1995} } @Book{Gir89, author = {J.-Y. Girard and Y. Lafont and P. Taylor}, publisher = {Cambridge University Press}, series = {Cambridge Tracts in Theoretical Computer Science 7}, title = {Proofs and Types}, year = {1989} } @InCollection{How80, author = {W.A. Howard}, booktitle = {to H.B. Curry : Essays on Combinatory Logic, Lambda Calculus and Formalism.}, editor = {J.P. Seldin and J.R. Hindley}, note = {Unpublished 1969 Manuscript}, publisher = {Academic Press}, title = {The Formulae-as-Types Notion of Constructions}, year = {1980} } @inproceedings{H88, title={Induction principles formalized in the Calculus of Constructions}, author={Huet, G{\'e}rard}, booktitle={Programming of Future Generation Computers. Elsevier Science}, year={1988}, issn = {1611-3349}, doi = {10.1007/3-540-17660-8_62}, url = {http://dx.doi.org/10.1007/3-540-17660-8_62}, isbn = 9783540477464, publisher = {Springer Berlin Heidelberg} } @InProceedings{H89, author = {G. Huet}, booktitle = {A perspective in Theoretical Computer Science. Commemorative Volume for Gift Siromoney}, editor = {R. Narasimhan}, publisher = {World Scientific Publishing}, title = {{The Constructive Engine}}, year = {1989} } @Article{LeeWerner11, author = {Gyesik Lee and Benjamin Werner}, title = {Proof-irrelevant model of {CC} with predicative induction and judgmental equality}, journal = {Logical Methods in Computer Science}, volume = {7}, number = {4}, year = {2011}, ee = {http://dx.doi.org/10.2168/LMCS-7(4:5)2011}, bibsource = {DBLP, http://dblp.uni-trier.de} } @TechReport{Leroy90, author = {X. Leroy}, title = {The {ZINC} experiment: an economical implementation of the {ML} language}, institution = {INRIA}, number = {117}, year = {1990} } @InProceedings{Let02, author = {P. Letouzey}, title = {A New Extraction for Coq}, booktitle = {TYPES}, year = 2002, crossref = {DBLP:conf/types/2002}, url = {http://www.irif.fr/~letouzey/download/extraction2002.pdf} } @InProceedings{Luttik97specificationof, author = {Sebastiaan P. Luttik and Eelco Visser}, booktitle = {2nd International Workshop on the Theory and Practice of Algebraic Specifications (ASF+SDF'97), Electronic Workshops in Computing}, publisher = SV, title = {Specification of Rewriting Strategies}, year = {1997} } @inproceedings{Visser98, author = {Eelco Visser and Zine{-}El{-}Abidine Benaissa and Andrew P. Tolmach}, title = {Building Program Optimizers with Rewriting Strategies}, booktitle = {ICFP}, pages = {13--26}, year = {1998}, } @inproceedings{Visser01, author = {Eelco Visser}, title = {Stratego: {A} Language for Program Transformation Based on Rewriting Strategies}, booktitle = {RTA}, pages = {357--362}, year = {2001}, series = {LNCS}, volume = {2051}, } @InProceedings{DBLP:conf/types/McBride00, author = {Conor McBride}, title = {Elimination with a Motive}, booktitle = {TYPES}, year = {2000}, pages = {197-216}, ee = {http://link.springer.de/link/service/series/0558/bibs/2277/22770197.htm}, crossref = {DBLP:conf/types/2000}, bibsource = {DBLP, http://dblp.uni-trier.de} } @InProceedings{Moh93, author = {C. Paulin-Mohring}, booktitle = {Proceedings of the conference Typed Lambda Calculi and Applications}, editor = {M. Bezem and J.-F. Groote}, note = {Also LIP research report 92-49, ENS Lyon}, number = {664}, publisher = SV, series = {LNCS}, title = {{Inductive Definitions in the System Coq - Rules and Properties}}, year = {1993} } @MastersThesis{Mun94, author = {C. Muñoz}, month = sep, school = {DEA d'Informatique Fondamentale, Universit\'e Paris 7}, title = {D\'emonstration automatique dans la logique propositionnelle intuitionniste}, year = {1994} } @Article{Myers, author = {Eugene Myers}, title = {An {O(ND)} difference algorithm and its variations}, journal = {Algorithmica}, volume = {1}, number = {2}, year = {1986}, bibsource = {https://link.springer.com/article/10.1007\%2FBF01840446}, url = {http://www.xmailserver.org/diff2.pdf} } @inproceedings{P86, title={Algorithm development in the calculus of constructions}, author={Mohring, Christine}, booktitle={LICS}, pages={84--91}, year={1986} } @inproceedings{P89, title={Extracting $\Omega$'s programs from proofs in the calculus of constructions}, author={Paulin-Mohring, Christine}, booktitle={Proceedings of the 16th ACM SIGPLAN-SIGACT symposium on Principles of programming languages}, pages={89--104}, year={1989}, doi = {10.1145/75277.75285}, url = {http://dx.doi.org/10.1145/75277.75285}, isbn = 0897912942, organization = {ACM Press} } @inproceedings{P93, title={Inductive definitions in the system coq rules and properties}, author={Paulin-Mohring, Christine}, booktitle={International Conference on Typed Lambda Calculi and Applications}, pages={328--345}, year={1993}, doi = {10.1007/bfb0037116}, url = {http://dx.doi.org/10.1007/bfb0037116}, isbn = 3540565175, organization = {Springer-Verlag} } @inproceedings{PP90, title={Inductively defined types in the Calculus of Constructions}, author={Pfenning, Frank and Paulin-Mohring, Christine}, booktitle={International Conference on Mathematical Foundations of Programming Semantics}, pages={209--228}, year={1989}, doi = {10.1007/bfb0040259}, url = {http://dx.doi.org/10.1007/bfb0040259}, isbn = 0387973753, organization = {Springer-Verlag} } @InProceedings{Parent95b, author = {C. Parent}, booktitle = {{Mathematics of Program Construction'95}}, publisher = SV, series = {LNCS}, title = {{Synthesizing proofs from programs in the Calculus of Inductive Constructions}}, volume = {947}, year = {1995} } @InProceedings{Pit16, Title = {Company-Coq: Taking Proof General one step closer to a real IDE}, Author = {Pit-Claudel, Clément and Courtieu, Pierre}, Booktitle = {CoqPL'16: The Second International Workshop on Coq for PL}, Year = {2016}, Month = jan, Doi = {10.5281/zenodo.44331}, } @Book{RC95, author = {di~Cosmo, R.}, title = {Isomorphisms of Types: from $\lambda$-calculus to information retrieval and language design}, series = {Progress in Theoretical Computer Science}, publisher = {Birkhauser}, year = {1995}, note = {ISBN-0-8176-3763-X} } @Article{Rushby98, title = {Subtypes for Specifications: Predicate Subtyping in {PVS}}, author = {John Rushby and Sam Owre and N. Shankar}, journal = {IEEE Transactions on Software Engineering}, pages = {709--720}, volume = 24, number = 9, month = sep, year = 1998 } @InProceedings{sozeau06, author = {Matthieu Sozeau}, title = {Subset Coercions in {C}oq}, year = {2007}, booktitle = {TYPES'06}, pages = {237-252}, volume = {4502}, publisher = "Springer", series = {LNCS} } @InProceedings{sozeau08, Author = {Matthieu Sozeau and Nicolas Oury}, booktitle = {TPHOLs'08}, Pdf = {http://www.lri.fr/~sozeau/research/publications/drafts/classes.pdf}, Title = {{F}irst-{C}lass {T}ype {C}lasses}, Year = {2008}, } @InProceedings{sugar, author = {Alessandro Giovini and Teo Mora and Gianfranco Niesi and Lorenzo Robbiano and Carlo Traverso}, title = {"One sugar cube, please" or Selection strategies in the Buchberger algorithm}, booktitle = { Proceedings of the ISSAC'91, ACM Press}, year = {1991}, pages = {5--4}, publisher = {} } @PhDThesis{Wer94, author = {B. Werner}, school = {Universit\'e Paris 7}, title = {Une th\'eorie des constructions inductives}, type = {Th\`ese de Doctorat}, year = {1994} } @InProceedings{CompiledStrongReduction, author = {Benjamin Gr{\'{e}}goire and Xavier Leroy}, editor = {Mitchell Wand and Simon L. Peyton Jones}, title = {A compiled implementation of strong reduction}, booktitle = {Proceedings of the Seventh {ACM} {SIGPLAN} International Conference on Functional Programming {(ICFP} '02), Pittsburgh, Pennsylvania, USA, October 4-6, 2002.}, pages = {235--246}, publisher = {{ACM}}, year = {2002}, url = {http://doi.acm.org/10.1145/581478.581501}, doi = {10.1145/581478.581501}, timestamp = {Tue, 11 Jun 2013 13:49:16 +0200}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/icfp/GregoireL02}, bibsource = {dblp computer science bibliography, http://dblp.org} } @InProceedings{FullReduction, author = {Mathieu Boespflug and Maxime D{\'{e}}n{\`{e}}s and Benjamin Gr{\'{e}}goire}, editor = {Jean{-}Pierre Jouannaud and Zhong Shao}, title = {Full Reduction at Full Throttle}, booktitle = {Certified Programs and Proofs - First International Conference, {CPP} 2011, Kenting, Taiwan, December 7-9, 2011. Proceedings}, series = {Lecture Notes in Computer Science}, volume = {7086}, pages = {362--377}, publisher = {Springer}, year = {2011}, url = {http://dx.doi.org/10.1007/978-3-642-25379-9_26}, doi = {10.1007/978-3-642-25379-9_26}, timestamp = {Thu, 17 Nov 2011 13:33:48 +0100}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/cpp/BoespflugDG11}, bibsource = {dblp computer science bibliography, http://dblp.org} } @inproceedings{MilnerPrincipalTypeSchemes, author = {Damas, Luis and Milner, Robin}, title = {Principal Type-schemes for Functional Programs}, booktitle = {Proceedings of the 9th ACM SIGPLAN-SIGACT Symposium on Principles of Programming Languages}, series = {POPL '82}, year = {1982}, isbn = {0-89791-065-6}, location = {Albuquerque, New Mexico}, pages = {207--212}, numpages = {6}, url = {http://doi.acm.org/10.1145/582153.582176}, doi = {10.1145/582153.582176}, acmid = {582176}, publisher = {ACM}, address = {New York, NY, USA}, } @techreport{abel19:failur_normal_impred_type_theor, author = {Andreas Abel AND Thierry Coquand}, title = {{Failure of Normalization in Impredicative Type Theory with Proof-Irrelevant Propositional Equality}}, year = 2019, institution = {Chalmers and Gothenburg University}, } @inproceedings{ConchonFilliatre07wml, author = {Sylvain Conchon and Jean-Christophe Filliâtre}, title = {A Persistent Union-Find Data Structure}, booktitle = {ACM SIGPLAN Workshop on ML}, publisher = {ACM Press}, pages = {37--45}, year = 2007, address = {Freiburg, Germany}, month = {October}, topics = {team, lri}, type_publi = {icolcomlec}, type_digiteo = {conf_isbn}, x-pdf = {https://www.lri.fr/~filliatr/ftp/publis/puf-wml07.pdf}, url = {https://www.lri.fr/~filliatr/ftp/publis/puf-wml07.pdf}, abstract = { The problem of disjoint sets, also known as union-find, consists in maintaining a partition of a finite set within a data structure. This structure provides two operations: a function find returning the class of an element and a function union merging two classes. An optimal and imperative solution is known since 1975. However, the imperative nature of this data structure may be a drawback when it is used in a backtracking algorithm. This paper details the implementation of a persistent union-find data structure as efficient as its imperative counterpart. To achieve this result, our solution makes heavy use of imperative features and thus it is a significant example of a data structure whose side effects are safely hidden behind a persistent interface. To strengthen this last claim, we also detail a formalization using the Coq proof assistant which shows both the correctness of our solution and its observational persistence. }, x-equipes = {demons PROVAL}, x-type = {article}, x-support = {actes_aux}, x-cle-support = {ML} } coq-8.15.0/doc/sphinx/changes.rst000066400000000000000000016164071417001151100166150ustar00rootroot00000000000000.. _changes: -------------- Recent changes -------------- .. ifconfig:: not is_a_released_version .. include:: ../unreleased.rst Version 8.15 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.15 integrates many bug fixes, deprecations and cleanups as well as a few new features. We highlight some of the most impactful changes here: - The :tacn:`apply with ` tactic :ref:`no longer renames arguments <815ApplyWith>` unless compatibility flag :flag:`Apply With Renaming` is set. - :ref:`Improvements <815Auto>` to the :tacn:`auto` tactic family, fixing the :cmd:`Hint Unfold` behavior, and generalizing the use of discrimination nets. - The :tacn:`typeclasses eauto` tactic has a new :ref:`best_effort <815BestEffort>` option allowing it to return *partial* solutions to a proof search problem, depending on the mode declarations associated to each constraint. This mode is used by typeclass resolution during type inference to provide more precise error messages. - Many :ref:`commands and options <815Commands>` were deprecated or removed after deprecation and more consistently support locality attributes. - The :cmd:`Import` command is extended with :token:`import_categories` to :ref:`select the components <815Import>` of a module to import or not, including features such as hints, coercions, and notations. - A :ref:`visual Ltac debugger <815LtacDebugger>` is now available in CoqIDE. See the `Changes in 8.15.0`_ section below for the detailed list of changes, including potentially breaking changes marked with **Changed**. Coq's `reference manual for 8.15 `_, `documentation of the 8.15 standard library `_ and `developer documentation of the 8.15 ML API `_ are also available. Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop and Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. Erik Martin-Dorel has maintained the `Coq Docker images `_ that are used in many Coq projects for continuous integration. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Karl Palmskog, Matthieu Sozeau and Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The `Coq Platform `_ has been maintained by Michael Soegtrop and Enrico Tassi. Our current maintainers are Yves Bertot, Frédéric Besson, Ali Caglayan, Tej Chajed, Cyril Cohen, Pierre Corbineau, Pierre Courtieu, Maxime Dénès, Jim Fehrle, Julien Forest, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Georges Gonthier, Benjamin Grégoire, Jason Gross, Hugo Herbelin, Vincent Laporte, Olivier Laurent, Assia Mahboubi, Kenji Maillard, Guillaume Melquiond, Pierre-Marie Pédrot, Clément Pit-Claudel, Pierre Roux, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Arnaud Spiwack, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Anton Trunov, Li-yao Xia and Théo Zimmermann. See the `Coq Team face book `_ page for more details. The 41 contributors to this version are Tanaka Akira, Frédéric Besson, Juan Conejero, Ali Caglayan, Cyril Cohen, Adrian Dapprich, Maxime Dénès, Stéphane Desarzens, Christian Doczkal, Andrej Dudenhefner, Jim Fehrle, Emilio Jesús Gallego Arias, Attila Gáspár, Gaëtan Gilbert, Jason Gross, Hugo Herbelin, Jasper Hugunin, Bart Jacobs, Ralf Jung, Grant Jurgensen, Jan-Oliver Kaiser, Wojciech Karpiel, Fabian Kunze, Olivier Laurent, Yishuai Li, Erik Martin-Dorel, Guillaume Melquiond, Jean-Francois Monin, Pierre-Marie Pédrot, Rudy Peterson, Clément Pit-Claudel, Seth Poulsen, Pierre Roux, Takafumi Saikawa, Kazuhiko Sakaguchi, Michael Soegtrop, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Anton Trunov and Théo Zimmerman. The Coq community at large helped improve the design of this new version via the GitHub issue and pull request system, the Coq development mailing list coqdev@inria.fr, the coq-club@inria.fr mailing list, the `Discourse forum `_ and the `Coq Zulip chat `_. Version 8.15's development spanned 3 months from the release of Coq 8.14.0. Gaëtan Gilbert is the release manager of Coq 8.15. This release is the result of 384 merged PRs, closing 67 issues. | Nantes, January 2022, | Matthieu Sozeau for the Coq development team Changes in 8.15.0 ~~~~~~~~~~~~~~~~~ .. contents:: :local: Kernel ^^^^^^ - **Fixed:** Name clash in a computation of the type of parameters of functorial module types; this computation was provided for the purpose of clients using the algebraic form of module types such as :cmd:`Print Module Type` (`#15385 `_, fixes `#9555 `_, by Hugo Herbelin). Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** :cmd:`Instance` warns about the default locality immediately rather than waiting until the instance is ready to be defined. This changes which command warns when the instance has a separate proof: the :cmd:`Instance` command itself warns instead of the proof closing command (such as :cmd:`Defined`). (`#14705 `_, by Gaëtan Gilbert). - **Removed:** Arguments of section variables may no longer be renamed with :cmd:`Arguments` (this was previously applied inconsistently) (`#14573 `_, by Gaëtan Gilbert). - **Added:** Non-dependent implicit arguments can be provided explicitly using the syntax :n:`(@natural := @term)` where :token:`natural` is the index of the implicit argument among all non-dependent arguments of the function, starting from 1 (`#11099 `_, by Hugo Herbelin). - **Added:** :cmd:`Succeed`, a :n:`@control_command` that verifies that the given :n:`@sentence` succeeds without changing the proof state (`#14750 `_, by Gaëtan Gilbert). - **Fixed:** The :n:`@term.(@qualid {* @arg })` syntax now takes into account the position of the main argument :n:`@term` when computing the implicit arguments of :n:`@qualid` (`#14606 `_, fixes `#4167 `_, by Hugo Herbelin). - **Fixed:** Source and target of coercions preserved by module instantiation (`#14668 `_, fixes `#3527 `_, by Hugo Herbelin). - **Fixed:** Made reference manual consistent with the implementation regarding the role of recursively non-uniform parameters of inductive types in the nested positivity condition (`#14967 `_, fixes `#14938 `_, by Hugo Herbelin) Notations ^^^^^^^^^ - **Changed:** Terms printed in error messages may be more verbose if syntactic sugar would make it appear that the obtained and expected terms only differ in existential variables (`#14672 `_, by Gaëtan Gilbert). - **Removed:** the ``Numeral Notation`` command that was renamed to :cmd:`Number Notation` in 8.13. (`#14819 `_, by Pierre Roux). - **Removed:** primitive float notations ``<``, ``<=`` and ``==`` that were replaced by ```_, by Pierre Roux). - **Removed:** primitive integer notations ``\%``, ``<``, ``<=`` and ``==`` that were replaced by ``mod``, ```_, by Pierre Roux). - **Added:** Include floats in the number notation mechanism (`#14525 `_, by Pierre Roux). - **Added:** Coercion entries and :n:`ident`/:n:`global` entries in custom notations now respect the :n:`only parsing` modifier (`#15340 `_, fixes `#15335 `_, by Hugo Herbelin). - **Fixed:** :cmd:`Reserved Infix` now accept further parameters in the infix notation (`#14379 `_, fixes `#11402 `_, by Hugo Herbelin). - **Fixed:** Useless self reference when printing abbreviations declared in nested modules (`#14493 `_, fixes one part of `#12777 `_ and `#14486 `_, by Hugo Herbelin). - **Fixed:** anomalies with notation applied in `match` patterns when the notation have a notation variable at head (`#14713 `_, fixes `#14708 `_, by Hugo Herbelin). - **Fixed:** Regression in parsing error reporting in case of empty custom entry (`#15338 `_, fixes `#15334 `_, by Hugo Herbelin). Tactics ^^^^^^^ .. _815ApplyWith: - **Changed:** ``apply with`` does not rename arguments unless using compatibility flag :flag:`Apply With Renaming` (`#13837 `_, fixes `#13759 `_, by Gaëtan Gilbert). Porting hint: if the renaming is because of a goal variable (eg ``intros x; apply foo with (x0 := bar)`` where ``About foo.`` says the argument is called ``x``) it is probably caused by an interaction with implicit arguments and ``apply @foo with (x := bar)`` will usually be a backwards compatible fix. .. _815Auto: - **Changed:** :cmd:`Hint Unfold` in discriminated databases now respects its specification, namely that a constant may be unfolded only when it is the head of the goal. The previous behavior was to perform unfolding on any goal, without any limitation. An unexpected side-effect of this was that a database that contained ``Unfold`` hints would sometimes trigger silent strong βι-normalization of the goal. Indeed, :tacn:`unfold` performs such a normalization regardless of the presence of its argument in the goal. This does introduce a bit of backwards incompatibility, but it occurs in very specific situations and is easily circumvented. Since by default hint bases are not discriminated, it means that incompatibilities are typically observed when adding unfold hints to the typeclass database. In order to recover the previous behavior, it is enough to replace instances of ``Hint Unfold foo.`` with ``Hint Extern 4 => progress (unfold foo).``. A less compatible but finer-grained change can be achieved by only adding the missing normalization phase with ``Hint Extern 4 => progress (lazy beta iota).`` (`#14679 `_, fixes `#14874 `_, by Pierre-Marie Pédrot). - **Changed:** Correctly consider variables without a body to be rigid for the pattern recognition algorithm of discriminated hints (`#14722 `_, by Pierre-Marie Pédrot). - **Changed:** Use discrimination nets for goals containing evars in all :tacn:`auto` tactics. It essentially makes the behavior of undiscriminated databases to be the one of discriminated databases where all constants are considered transparent. This may be incompatible with previous behavior in very rare cases (`#14848 `_, by Pierre-Marie Pédrot). - **Changed:** The ``choice`` strategy for :tacn:`rewrite_strat` is now of arbitrary arity (`#14989 `_, fixes `#6109 `_, by Gaëtan Gilbert). - **Changed:** The :tacn:`exact` tactic now takes a :g:`uconstr` as argument instead of an ad-hoc one. In very rare cases, this can change the order of resolution of dependent evars when used over several goals at once (`#15171 `_, by Pierre-Marie Pédrot). - **Changed:** :tacn:`cbn` interprets the combination of the ``!`` and ``/`` modifiers (from :cmd:`Arguments`) to mean "unfold as soon as all arguments before the ``/`` are provided and all arguments marked with ``!`` reduce to a constructor". This makes it unfold more often than without the ``/`` when all arguments are provided. Previously adding ``/`` would only prevent unfolding when insufficient arguments are provided without adding new unfoldings. Note that this change only takes effect in default mode (as opposed to when ``simpl nomatch`` was used) (`#15204 `_, fixes `#4555 `_ and `#7674 `_, by Gaëtan Gilbert). - **Removed:** the deprecated new auto tactic (`#14527 `_, by Pierre-Marie Pédrot). - **Removed:** deprecated syntax for :tacn:`instantiate` using capitalized ``Value`` or ``Type`` (`#15193 `_, by Gaëtan Gilbert). - **Removed:** deprecated ``autoapply ... using`` syntax for :tacn:`autoapply` (`#15194 `_, by Gaëtan Gilbert). - **Deprecated:** the :tacn:`bfs eauto` tactic. Since its introduction it has behaved exactly like the :tacn:`eauto` tactic. Use :tacn:`typeclasses eauto` with the `bfs` flag instead (`#15314 `_, fixes `#15300 `_, by Pierre-Marie Pédrot). - **Added:** The :tacn:`zify` tactic can now recognize `Pos.Nsucc_double`, `Pos.Ndouble`, `N.succ_double`, `N.double`, `N.succ_pos`, `N.div2`, `N.pow`, `N.square`, and `Z.to_pos`. Moreover, importing module `ZifyBool` lets it recognize `Pos.eqb`, `Pos.leb`, `Pos.ltb`, `N.eqb`, `N.leb`, and `N.ltb` (`#10998 `_, by Kazuhiko Sakaguchi). .. _815BestEffort: - **Added:** :ref:`best_effort ` option to :tacn:`typeclasses eauto`, to return a *partial* solution to its initial proof-search problem. The goals that can remain unsolved are determined according to the modes declared for their head (see :cmd:`Hint Mode`). This is used by typeclass resolution during type inference to provide more informative error messages. (`#13952 `_, fixes `#13942 `_ and `#14125 `_, by Matthieu Sozeau). - **Added:** A new :table:`Keep Equalities` table to selectively control the preservation of subterm equalities for the :tacn:`injection` tactic. It allows a finer control than the boolean flag :flag:`Keep Proof Equalities` that acts globally. (`#14439 `_, by Pierre-Marie Pédrot). - **Added:** :tacn:`simple congruence` tactic which works like :tacn:`congruence` but does not unfold definitions. (`#14657 `_, fixes `#13778 `_ and `#5394 `_ and `#13189 `_, by Andrej Dudenhefner). - **Added:** Small enhancement of unification in the presence of local definitions (`#14673 `_, fixes `#4415 `_, by Hugo Herbelin). - **Added:** `dfs` option in :tacn:`typeclasses eauto` to use depth-first search (`#14693 `_, fixes `#13859 `_, by Ali Caglayan). - **Fixed:** More flexible hypothesis specialization in :tacn:`congruence`. (`#14650 `_, fixes `#14651 `_ and `#14662 `_, by Andrej Dudenhefner). - **Fixed:** Added caching to congruence initialization to avoid quadratic runtime. (`#14683 `_, fixes `#5548 `_, by Andrej Dudenhefner). - **Fixed:** Correctly handle matching up to η-expansion in discriminated hints (`#14732 `_, fixes `#14731 `_, by Pierre-Marie Pédrot). - **Fixed:** Old unification understands some inductive cumulativity (`#14758 `_, fixes `#14734 `_ and `#6976 `_, by Gaëtan Gilbert). - **Fixed:** The :tacn:`clear dependent ` tactic now does not backtrack internally, preventing an exponential blowup (`#14984 `_, fixes `#11689 `_, by Pierre-Marie Pédrot). - **Fixed:** :tacn:`setoid_rewrite` now works when the rewriting lemma has non dependent arguments and rewriting under binders (`#14986 `_, fixes `#5369 `_, by Gaëtan Gilbert). - **Fixed:** Regression in 8.14.0 and 8.14.1 with action pattern :n:`%` in :n:`as` clause of tactic :tacn:`specialize` (`#15245 `_, fixes `#15244 `_, by Hugo Herbelin). Tactic language ^^^^^^^^^^^^^^^ - **Fixed:** the parsing level of the Ltac2 tactic :tacn:`now` was set to level 6 in order to behave as it did before 8.14 (`#15250 `_, fixes `#15122 `_, by Pierre-Marie Pédrot). SSReflect ^^^^^^^^^ - **Changed:** rewrite generates subgoals in the expected order (side conditions first, by default) also when rewriting with a setoid relation. (`#14314 `_, fixes `#5706 `_, by Enrico Tassi). - **Removed:** The ssrsearch plugin and the ssr Search command (`#13760 `_, by Jim Fehrle). - **Added:** port the additions made to `ssrbool.v` in math-comp `PR #757 `_, namely `reflect` combinators `negPP`, `orPP`, `andPP` and `implyPP` (`#15059 `_, by Christian Doczkal). - **Fixed:** SSR patterns now work with primitive values such as ints, floats or arrays (`#14660 `_, fixes `#12770 `_, by Juan Conejero). - **Fixed:** A bug where :tacn:`suff` would fail due to use of :tacn:`apply` under the hood. (`#14687 `_, fixes `#14678 `_, by Ali Caglayan helped by Enrico Tassi). Commands and options ^^^^^^^^^^^^^^^^^^^^ .. _815Commands: - **Changed:** :cmd:`About` and :cmd:`Print` now display all known argument names (`#14596 `_, grants `#13830 `_, by Hugo Herbelin). - **Changed:** :cmd:`Typeclasses Transparent` and :cmd:`Typeclasses Opaque` support ``#[local]``, ``#[export]`` and ``#[global]`` attributes (`#14685 `_, fixes `#14513 `_, by Gaëtan Gilbert). - **Changed:** In extraction to OCaml, empty types in :n:`Type` (such as :n:`Empty_set`) are now extracted to an abstract type (empty by construction) rather than to the OCaml's :n:`unit` type (`#14802 `_, fixes a remark at `#14801 `_, by Hugo Herbelin). - **Changed:** Closed modules now live in a separate namespace from open modules and sections. (`#15078 `_, fixes `#14529 `_, by Gaëtan Gilbert). - **Removed:** boolean attributes ``monomorphic``, ``noncumulative`` and ``notemplate`` that were replaced by ``polymorphic=no``, ``cumulative=no`` and ``template=no`` in 8.13. (`#14819 `_, by Pierre Roux). - **Removed:** command ``Grab Existential Variables`` that was deprecated in 8.13. Use :cmd:`Unshelve` that is mostly equivalent, up to the reverse order of the resulting subgoals. (`#14819 `_, by Pierre Roux). - **Removed:** command ``Existential`` that was deprecated in 8.13. Use :cmd:`Unshelve` and :tacn:`exact`. (`#14819 `_, by Pierre Roux). - **Removed:** the `-outputstate` command line argument and the corresponding vernacular commands `Write State` and `Restore State` (`#14940 `_, by Pierre-Marie Pédrot) - **Deprecated:** ambiguous :cmd:`Proof using` and :cmd:`Collection` usage (`#15056 `_, fixes `#13296 `_, by Wojciech Karpiel). - **Deprecated:** `Universal Lemma Under Conjunction` flag that was introduced for compatibility with Coq versions prior to 8.4 (`#15272 `_, by Théo Zimmermann). - **Deprecated:** using :cmd:`Hint Cut`, :cmd:`Hint Mode`, :cmd:`Hint Transparent`, :cmd:`Hint Opaque`, :cmd:`Typeclasses Transparent` or :cmd:`Typeclasses Opaque` without an explicit locality outside sections. (`#14697 `_, by Pierre-Marie Pédrot, and `#14685 `_, by Gaëtan Gilbert) - **Added:** The :flag:`Mangle Names Light` flag, which changes the behavior of :flag:`Mangle Names`. For example, the name `foo` becomes `_0` with :flag:`Mangle Names`, but with :flag:`Mangle Names Light` set, it will become `_foo`. (`#14695 `_, fixes `#14548 `_, by Ali Caglayan). - **Added:** The :cmd:`Hint Cut`, :cmd:`Hint Mode`, :cmd:`Hint Transparent`, :cmd:`Hint Opaque`, :cmd:`Typeclasses Transparent` and :cmd:`Typeclasses Opaque` commands now accept the :attr:`local`, :attr:`export` and :attr:`global` locality attributes inside sections. With either attribute, the commands will trigger the `non-local-section-hint` warning if the arguments refer to local section variables (`#14697 `_, by Pierre-Marie Pédrot, and `#14685 `_, fixes `#14513 `_, by Gaëtan Gilbert). - **Added:** :attr:`projections(primitive)` attribute to make a record use primitive projections (`#14699 `_, fixes `#13150 `_, by Ali Caglayan). .. _815Import: - **Added:** Syntax for :token:`import_categories` providing selective import of module components (eg ``Import(notations) M`` (`#14892 `_, by Gaëtan Gilbert). - **Added:** :cmd:`Search` understands modifier ``in`` as an alias of ``inside`` (`#15139 `_, fixes `#14930 `_, by Gaëtan Gilbert). This is intended to ease transition for ssreflect Search users. - **Fixed:** interaction of Program's obligation state and modules and sections: obligations started in a parent module or section are not available to be solved until the submodules and subsections are closed (`#14780 `_, fixes `#14446 `_, by Gaëtan Gilbert). - **Fixed:** :cmd:`Eval` and :cmd:`Compute` now beta-iota-simplify the type of the result, like :cmd:`Check` does (`#14901 `_, fixes `#14899 `_, by Hugo Herbelin) Command-line tools ^^^^^^^^^^^^^^^^^^ - **Changed:** Coqdoc options ``--coqlib`` and ``--coqlib_path`` have been renamed to ``--coqlib_url`` and ``--coqlib`` to make them more consistent with flags used by other Coq executables (`#14059 `_, by Emilio Jesus Gallego Arias). - **Changed:** Syntax of `_CoqProject` files: `-arg` is now handled by :ref:`coq_makefile ` and not by `make`. Unquoted `#` now start line comments. (`#14558 `_, by Stéphane Desarzens, with help from Jim Fehrle and Enrico Tassi). - **Changed:** :cmd:`Require` now selects files whose logical name exactly matches the required name, making it possible to unambiguously select a given file: if several :n:`-Q` or :n:`-R` options bind the same logical name to a different file, the option appearing last on the command line takes precedence. Moreover, it is now an error to require a file using a partial logical name which does not resolve to a non-ambiguous path (`#14718 `_, by Hugo Herbelin). - **Changed:** ``coq_makefile`` now declares variable ``COQBIN`` to avoid warnings in ``make --warn`` mode (`#14787 `_, by Clément Pit-Claudel). - **Changed:** ``coqchk`` respects the :flag:`Kernel Term Sharing` flag instead of forcing it on (`#14957 `_, by Gaëtan Gilbert) - **Removed:** These options of :ref:`coq_makefile `: `-extra`, `-extra-phony`, `-custom`, `-no-install`, `-install`, `-no-opt`, `-byte`. Support for subdirectories is also removed. (`#14558 `_, by Stéphane Desarzens, with help from Jim Fehrle and Enrico Tassi). - **Added:** :ref:`coq_makefile ` now takes the `-docroot` option as alternative to the `INSTALLCOQDOCROOT` variable (`#14558 `_, by Stéphane Desarzens, with help from Jim Fehrle and Enrico Tassi). - **Fixed:** Various `coqdep` issues with the `From` clause of :cmd:`Require` and a few inconsistencies between `coqdep` and `coqc` disambiguation of :cmd:`Require` (`#14718 `_, fixes `#11631 `_ and `#14539 `_, by Hugo Herbelin). - **Fixed:** ``coq_makefile`` has improved logic when dealing with incorrect ``_CoqProject`` files (`#13541 `_, fixes `#9319 `_, by Fabian Kunze). - **Fixed:** ``coqdep`` was confusing periods occurring in comments with periods ending Coq sentences (`#14996 `_, fixes `#7393 `_, by Hugo Herbelin). CoqIDE ^^^^^^ - **Changed:** CoqIDE unicode keys for brackets (e.g. `\langle`) now bind to unicode mathematical symbols rather than unicode CJK brackets (`#14452 `_, by Bart Jacobs). - **Changed:** All occurrences of the name `CoqIde` to `CoqIDE`. This may cause issues with installing and uninstalling desktop icons, causing apparent duplicates. (`#14696 `_, fixes `#14310 `_, by Ali Caglayan). .. _815LtacDebugger: - **Added:** Initial version of a visual debugger in CoqIDE. Supports setting breakpoints visually and jumping to the stopping point plus continue, step over, step in and step out operations. Displays the call stack and variable values for each stack frame. Currently only for Ltac. See the documentation :ref:`here `. (`#14644 `_, fixes `#13967 `_, by Jim Fehrle) - **Fixed:** It is now possible to deactivate the unicode completion mechanism in CoqIDE (`#14863 `_, by Pierre-Marie Pédrot). Standard library ^^^^^^^^^^^^^^^^ - **Changed:** Permutation-related Proper instances are now at default priority instead of priority ``10`` (`#14574 `_, fixes `#14571 `_, by Gaëtan Gilbert). - **Changed:** The new type of `epsilon_smallest` is `(exists n : nat, P n) -> { n : nat | P n /\ forall k, P k -> n <= k }`. Here the minimality of `n` is expressed by `forall k, P k -> n <= k` corresponding to the intuitive meaning of minimality "the others are greater", whereas the previous version used the negative equivalent formulation `forall k, k < n -> ~P k`. Scripts using `epsilon_smallest` can easily be adapted using lemmas `le_not_lt` and `lt_not_le` from the standard library. (`#14601 `_, by Jean-Francois Monin). - **Changed:** ``ltb`` and ``leb`` functions for ``ascii``, into comparison-based definition (`#14234 `_, by Yishuai Li). - **Removed:** the file ``Numeral.v`` that was replaced by ``Number.v`` in 8.13. (`#14819 `_, by Pierre Roux). - **Removed:** some ``*_invol`` functions that were renamed ``*_involutive`` for consistency with the remaining of the stdlib in 8.13. (`#14819 `_, by Pierre Roux). - **Deprecated:** ``frexp`` and ``ldexp`` in `FloatOps.v`, renamed ``Z.frexp`` and ``Z.ldexp`` (`#15085 `_, by Pierre Roux). - **Added:** A proof that incoherent equivalences can be adjusted to adjoint equivalences in ``Logic.Adjointification``. (`#13408 `_, by Jasper Hugunin). - **Added:** ``ltb`` and ``leb`` functions for ``string``, and some lemmas about them; - **Added:** simple non dependent product ``slexprod`` in ``Relations/Relation_Operators.v`` and its proof of well-foundness ``wf_slexprod`` in ``Wellfounded/Lexicographic_Product.v`` (`#14809 `_, by Laurent Thery). - **Added:** The notations ``(x; y)``, ``x.1``, ``x.2`` for sigT are now exported and available after ``Import SigTNotations.`` (`#14813 `_, by Laurent Théry). - **Added:** The function ``sigT_of_prod`` turns a pair ``A * B`` into ``{_ : A & B}``. Its inverse function is ``prod_of_sigT``. This is shown by theorems ``sigT_prod_sigT`` and ``prod_sigT_prod``. (`#14813 `_, by Laurent Théry). - **Fixed:** ``split_combine`` lemma for lists, making it usable (`#14458 `_, by Yishuai Li). Infrastructure and dependencies ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** Coq's continuous integration now provides a more accessible Windows installer artifact in the "Checks" GitHub tab, both for pull requests and the `master` branch. This facilitates testing Coq's bleeding edge builds on Windows, and should be more reliable than the previous setup (`#12425 `_, by Emilio Jesus Gallego Arias). - **Changed:** Coq's ``./configure`` script has gone through a major cleanup. In particular, the following options have been removed: - ``-force-caml-version``, ``-force-findlib-version``: Coq won't compile with OCaml or findlib lower than the required versions; - ``-vmbyteflags``, ``-custom``, ``-no-custom``: linking options for toplevels are now controlled in ``topbin/dune``; - ``-ocamlfind``: Coq will now use the toolchain specified in the Dune configuration; this can be controlled using the workspaces feature; - ``-nodebug``: Coq will now follow the standard, which is to always pass ``-g`` to OCaml; this can be modified using a custom Dune workspace; - ``-flambda-opts``: compilation options are now set in Coq's root ``dune`` file, can be updated using a custom Dune workspace; - ``-local``, ``-bindir``, ``-coqdocdir``, ``-annotate``, ``-camldir``, ``-profiling``: these flags were deprecated in 8.14, and are now removed. Moreover, the ``-annot`` and ``-bin-annot`` flags only take effect to set ``coq-makefile``'s defaults. (`#14189 `_, by Emilio Jesus Gallego Arias). - **Changed:** Configure will now detect the Dune version, and will correctly pass ``-etcdir`` and ``-docdir`` to the install procedure if Dune >= 2.9 is available. Note that the ``-docdir`` configure option now refers to root path for documentation. If you would like to install Coq documentation in ``foo/coq``, use ``-docdir foo``. (`#14844 `_, by Emilio Jesus Gallego Arias). - **Changed:** OCaml 4.13 is now officially supported (`#14879 `_, by Emilio Jesus Gallego Arias) - **Changed:** Sphinx 3.0.2 or above is now required to build the reference manual (`#14963 `_, by Théo Zimmermann) Extraction ^^^^^^^^^^ - **Changed:** replaced ``Big`` module with ``Big_int_Z`` functions from ``zarith``. OCaml code extracted with the following modules should be linked to the `Zarith `_ library. + ``ExtrOcamlNatBigInt`` + ``ExtrOcamlZBigInt`` Removed ``ExtrOcamlBigIntConv`` module. (`#8252 `_, by Yishuai Li). - **Fixed:** compilation errors in ExtrOcamlString and ExtrOcamlNativeString (`#15075 `_, fixes `#15076 `_, by Yishuai Li). Version 8.14 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.14 integrates many usability improvements, as well as an important change in the core language. The main changes include: - The :ref:`internal representation <814CaseRepresentation>` of `match` has changed to a more space-efficient and cleaner structure, allowing the fix of a completeness issue with cumulative inductive types in the type-checker. The internal representation is now closer to the user-level view of `match`, where the argument context of branches and the inductive binders `in` and `as` do not carry type annotations. - A :ref:`new <814CoqNative>` `coqnative` binary performs separate native compilation of libraries, starting from a `.vo` file. It is supported by `coq_makefile`. - :ref:`Improvements <814TCCanon>` to typeclasses and canonical structure resolution, allowing more terms to be considered as classes or keys. - More control over :ref:`notations <814Notations>` declarations and support for primitive types in string and number notations. - :ref:`Removal <814Tactics>` of deprecated tactics, notably `omega`, which has been replaced by a greatly improved `lia`, along with many bug fixes. - New :ref:`Ltac2 <814Ltac2>` APIs for interaction with Ltac1, manipulation of inductive types and printing. - Many :ref:`changes and additions <814Stdlib>` to the standard library in the numbers, vectors and lists libraries. A new signed primitive integers library `Sint63` is available in addition to the unsigned `Uint63` library. See the `Changes in 8.14.0`_ section below for the detailed list of changes, including potentially breaking changes marked with **Changed**. Coq's `reference manual `_, `documentation of the standard library `_ and `developer documentation of the ML API `_ are also available. Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop and Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. Erik Martin-Dorel has maintained the `Coq Docker images `_ that are used in many Coq projects for continuous integration. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Karl Palmskog, Matthieu Sozeau and Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The `Coq Platform `_ has been maintained by Michael Soegtrop and Enrico Tassi. Our current maintainers are Yves Bertot, Frédéric Besson, Ali Caglayan, Tej Chajed, Cyril Cohen, Pierre Corbineau, Pierre Courtieu, Maxime Dénès, Jim Fehrle, Julien Forest, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Georges Gonthier, Benjamin Grégoire, Jason Gross, Hugo Herbelin, Vincent Laporte, Olivier Laurent, Assia Mahboubi, Kenji Maillard, Guillaume Melquiond, Pierre-Marie Pédrot, Clément Pit-Claudel, Pierre Roux, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Arnaud Spiwack, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Anton Trunov, Li-yao Xia and Théo Zimmermann. See the `Coq Team face book `_ page for more details. The 54 contributors to this version are Reynald Affeldt, Arthur Azevedo de Amorim, Yves Bertot, Frédéric Besson, Lasse Blaauwbroek, Ana Borges, Ali Caglayan, Cyril Cohen, Pierre Courtieu, Maxime Dénès, Stéphane Desarzens, Andrej Dudenhefner, Jim Fehrle, Yannick Forster, Simon Friis Vindum, Gaëtan Gilbert, Jason Gross, Samuel Gruetter, Stefan Haan, Hugo Herbelin, Jasper Hugunin, Emilio Jesús Gallego Arias, Jacques-Henri Jourdan, Ralf Jung, Jan-Oliver Kaiser, Fabian Kunze, Vincent Laporte, Olivier Laurent, Yishuai Li, Barry M. Trager, Kenji Maillard, Erik Martin-Dorel, Guillaume Melquiond, Isaac Oscar Gariano, Pierre-Marie Pédrot, Rudy Peterson, Clément Pit-Claudel, Pierre Roux, Takafumi Saikawa, Kazuhiko Sakaguchi, Gabriel Scherer, Vincent Semeria, shenlebantongying, Avi Shinnar, slrnsc, Michael Soegtrop, Matthieu Sozeau, Enrico Tassi, Hendrik Tews, Anton Trunov, Karolin Varner, Li-yao Xia, Beta Ziliani and Théo Zimmermann. The Coq community at large helped improve the design of this new version via the GitHub issue and pull request system, the Coq development mailing list coqdev@inria.fr, the coq-club@inria.fr mailing list, the `Discourse forum `_ and the `Coq Zulip chat `_. Version 8.14's development spanned 9 months from the release of Coq 8.13.0. Guillaume Melquiond is the release manager of Coq 8.14. This release is the result of 522 merged PRs, closing ~150 issues. | Nantes, September 2021, | Matthieu Sozeau for the Coq development team Changes in 8.14.0 ~~~~~~~~~~~~~~~~~ .. contents:: :local: Kernel ^^^^^^ .. _814CaseRepresentation: - **Changed:** The term representation of pattern-matchings now uses a compact form that provides a few static guarantees such as eta-expansion of branches and return clauses and is usually more efficient. The most visible user change is that for the time being, the :tacn:`destruct` tactic and its variants generate dummy cuts (β redexes) in the branches of the generated proof. This can also generate very uncommon backwards incompatibilities, such as a change of occurrence numbering for subterms, or breakage of unification in complex situations involving pattern-matchings whose underlying inductive type declares let-bindings in parameters, arity or constructor types. For ML plugin developers, an in-depth description of the new representation, as well as porting tips, can be found in dev/doc/case-repr.md (`#13563 `_, fixes `#3166 `_, by Pierre-Marie Pédrot). - **Changed:** Linking of native-code libraries used by :tacn:`native_compute` is now delayed until an actual call to the :tacn:`native_compute` machinery is performed. This should make Coq more responsive on some systems (`#13853 `_, fixes `#13849 `_, by Guillaume Melquiond). - **Removed:** The ability to change typing flags inside sections to prevent exploiting a weakness in :cmd:`Print Assumptions` (`#14395 `_, fixes `#14317 `_, by Gaëtan Gilbert). Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. _814TCCanon: - **Changed:** The hints mode ``!`` matches a term iff the applicative head is not an existential variable. It now also matches projections applied to any term or a `match` on any term. (`#14392 `_, by Matthieu Sozeau). - **Removed:** The little used `:>` type cast, which was only interpreted in Program-mode (`#13911 `_, by Jim Fehrle and Théo Zimmermann). - **Added:** Enable canonical `fun _ => _` projections, see :ref:`canonicalstructures` for details. (`#14041 `_, by Jan-Oliver Kaiser and Pierre Roux, reviewed by Cyril Cohen and Enrico Tassi). - **Added:** :cmd:`Canonical Structure` declarations now accept dependent function types `forall _, _` as keys. (`#14386 `_, by Jan-Oliver Kaiser and Kazuhiko Sakaguchi). - **Added:** Ability to declare primitive projections as class, for dependent typeclass resolutions (`#9711 `_, fixes `#12975 `_, by Matthieu Sozeau). - **Fixed:** Multiple printing of same warning about unused variables catching several cases (`#14261 `_, fixes `#14207 `_, by Hugo Herbelin). - **Fixed:** Constants :g:`id` and :g:`not` were unduly set opaque in some parts of the unification algorithm (`#14371 `_, fixes `#14374 `_, by Hugo Herbelin). Notations ^^^^^^^^^ .. _814Notations: - **Changed:** Flag :flag:`Printing Notations` no longer controls whether strings and numbers are printed raw (`#13840 `_, by Enrico Tassi). - **Changed:** The error ``Argument X was previously inferred to be in scope XXX_scope but is here used in YYY_scope.`` is now the warning ``[inconsistent-scopes,syntax]`` and can be silenced by specifying the scope of the argument (`#13965 `_, by Enrico Tassi). - **Removed:** Decimal-only number notations which were deprecated in 8.12. (`#13842 `_, by Pierre Roux). - **Added:** :cmd:`Number Notation` and :cmd:`String Notation` now support parsing and printing of primitive floats, primitive arrays and type constants of primitive types. (`#13519 `_, fixes `#13484 `_ and `#13517 `_, by Fabian Kunze, with help of Jason Gross) - **Added:** Flag :flag:`Printing Raw Literals` to control whether strings and numbers are printed raw. (`#13840 `_, by Enrico Tassi). - **Added:** Let the user specify a scope for abbreviation arguments, e.g. ``Notation abbr X := t (X in scope my_scope)``. (`#13965 `_, by Enrico Tassi). - **Added:** Look-ahead of tokens is changed from sequential to tree-based, allowing more automatic rule factorizations in notations (`#14070 `_, by Hugo Herbelin). - **Fixed:** Non-local custom entries survive module closing and are declared when a file is Required (`#14183 `_, fixes `#13654 `_, by Gaëtan Gilbert). - **Fixed:** :g:`ident` modifier in custom entry notations gave fatal errors at printing time (`#14257 `_, fixes `#14211 `_, by Hugo Herbelin). - **Fixed:** Anomaly when overriding a notation with different applicability in :g:`match` patterns (`#14377 `_, fixes `#13966 `_, by Hugo Herbelin). Tactics ^^^^^^^ .. _814Tactics: - **Changed:** More systematic checks that occurrences of an :n:`at` clause are valid in tactics such as :tacn:`rewrite` or :tacn:`pattern` (`#13568 `_, fixes `#13566 `_, by Hugo Herbelin). - **Removed:** :tacn:`fail` and :tacn:`gfail`, which formerly accepted negative values as a parameter, now give syntax errors for negative values (`#13469 `_, by Jim Fehrle). - **Removed:** Deprecated flag ``Bracketing Last Introduction Pattern`` affecting the behavior of trailing disjunctive introduction patterns is definitively removed (`#13509 `_, by Hugo Herbelin). - **Removed:** The `omega` tactic (deprecated in 8.12) and four `* Omega *` flags. Use `lia` instead. (`#13741 `_, by Jim Fehrle, who addressed the final details, building on much work by Frédéric Besson, who greatly improved :tacn:`lia`, Maxime Dénès, Vincent Laporte and with the help of many package maintainers, among others). - **Removed:** convert_concl_no_check. Use :tacn:`change_no_check` instead (`#13761 `_, by Jim Fehrle). - **Removed:** double induction tactic. Replace :n:`double induction @ident @ident` with :n:`induction @ident; induction @ident` (or :n:`induction @ident ; destruct @ident` depending on the exact needs). Replace :n:`double induction @natural__1 @natural__2` with :n:`induction @natural__1; induction natural__3` where :n:`natural__3` is the result of :n:`natural__2 - natural__1` (`#13762 `_, by Jim Fehrle). - **Deprecated:** In :tacn:`change` and :tacn:`change_no_check`, the `at ... with ...` form is deprecated. Use `with ... at ...` instead. For `at ... with ... in H |-`, use `with ... in H at ... |-`. (`#13696 `_, by Jim Fehrle). - **Deprecated:** The micromega option :flag:`Simplex`, which is currently set by default (`#13781 `_, by Frédéric Besson). - **Deprecated:** the undocumented `new auto` tactic (`#14528 `_, by Pierre-Marie Pédrot). - **Added:** :tacn:`lia` supports the boolean operator `Bool.implb` (`#13715 `_, by Frédéric Besson). - **Added:** ``zify`` (``lia``/``nia``) support for :g:`div`, :g:`mod`, :g:`pow` for :g:`Nat` (via ``ZifyNat`` module) and :g:`N` (via ``ZifyN`` module). The signature of :g:`Z_div_mod_eq_full` has no assumptions. (`#14037 `_, fixes `#11447 `_, by Andrej Dudenhefner, Jason Gross, and Frédéric Besson). - **Added:** Ltac2 now has a `unify` tactic (`#14089 `_, fixes `#14083 `_, by Samuel Gruetter). - **Added:** :tacn:`inversion_sigma` can now be applied to a specified hypothesis and additionally supports intropatterns, so it can be used much like :tacn:`induction` and :tacn:`inversion`. Additionally, :tacn:`inversion_sigma` now supports the types :n:`ex` (:n:`exists x : A, P x`) and :n:`ex2` (:n:`exists2 x : A, P x & Q x`) in cases where the first argument :n:`A` is a :n:`Prop` (`#14174 `_, by Jason Gross). - **Added:** ``zify`` (``lia``/``nia``) support for ``Sint63``. (`#14408 `_, by Ana Borges, with help from Frédéric Besson). - **Fixed:** Possible collision between a user-level name and an internal name when using the :n:`%` introduction pattern (`#13512 `_, fixes `#13413 `_, by Hugo Herbelin). - **Fixed:** :tacn:`simpl` and :tacn:`hnf` now reduce primitive functions on primitive integers, floats and arrays (`#13699 `_, fixes `#13579 `_, by Pierre Roux). - **Fixed:** Setoid rewriting now remembers the (invisible) binder names of non-dependent product types. SSReflect's rewrite tactic expects these names to be retained when using ``rewrite foo in H``. This also fixes SSR ``rewrite foo in H *`` erroneously reverting ``H``. (`#13882 `_, fixes `#12011 `_, by Gaëtan Gilbert). - **Fixed:** Properly expand projection parameters in hint discrimination nets. (`#14033 `_, fixes `#9000 `_, `#14009 `_, by Pierre-Marie Pédrot). - **Fixed:** anomalies caused by empty strings in Ltac notations are now errors (`#14378 `_, fixes `#14124 `_, by Hugo Herbelin). - **Fixed:** Print a message instead of a Diff_Failure anomaly when old and new goals can't be matched; show the goal without diff highlights (`#14457 `_, fixes `#14425 `_, by Jim Fehrle). - **Fixed:** Anomaly of :tacn:`destruct` on terms with dependent variables unused in goal (`#15099 `_, fixes `#11504 `_ and `#14090 `_, by Lasse Blaauwbroek and Hugo Herbelin). - **Fixed:** Correct convertibility of multiple terms selected by patterns in tactics such as :tacn:`set` when these terms have subterms in `SProp` (`#14610 `_, fixes `#14609 `_, by Hugo Herbelin). Tactic language ^^^^^^^^^^^^^^^ .. _814Ltac2: - **Changed:** Renamed Ltac2 ``Bool.eq`` into ``Bool.equal`` for uniformity. The old function is now a deprecated alias (`#14128 `_, by Pierre-Marie Pédrot). - **Added:** A ``printf`` macro to Ltac2. It can be made accessible by importing the ``Ltac2.Printf`` module. See the documentation there for more information (`#13236 `_, fixes `#10108 `_, by Pierre-Marie Pédrot). - **Added:** A function ``Ltac1.lambda`` allowing to embed Ltac2 functions into Ltac1 runtime values (`#13442 `_, fixes `#12871 `_, by Pierre-Marie Pédrot). - **Added:** Ltac2 commands defining terms now accept the :attr:`deprecated` attribute (`#13774 `_, fixes `#12317 `_, by Pierre-Marie Pédrot). - **Added:** Allow the presence of type casts for function return values, let bindings and global definitions in Ltac2 (`#13914 `_, by Pierre-Marie Pédrot). - **Added:** The Ltac2 API `Ltac2.Ind` for manipulating inductive types (`#13920 `_, fixes `#10095 `_, by Pierre-Marie Pédrot). - **Added:** Allow scope delimiters in Ltac2 ``open_constr:(...)`` quotation (`#13939 `_, fixes `#12806 `_, by Pierre-Marie Pédrot). - **Added:** A FFI to convert between Ltac1 and Ltac2 identifiers (`#13997 `_, fixes `#13996 `_, by Pierre-Marie Pédrot). - **Added:** Lazy evaluating boolean operators ``lazy_and``, ``lazy_or``, ``lazy_impl`` and infix notations ``&&`` and ``||`` to the Ltac2 `Bool.v` library l. (`#14081 `_, fixes `#13964 `_, by Michael Soegtrop). - **Fixed:** Ltac2 notations now correctly take into account their assigned level (`#14094 `_, fixes `#11866 `_, by Pierre-Marie Pédrot). SSReflect ^^^^^^^^^ - **Added:** A test that the notations `{in _, _}` and `{pred _}` from `ssrbool.v` are displayed correctly. (`#13473 `_, by Cyril Cohen). - **Added:** Lemmas about interaction between :n:`{in _, _}`, :n:`{on _, _}`, and :n:`sig` have been backported from Mathematical Components 1.12.0 (`#13490 `_, by Kazuhiko Sakaguchi). Commands and options ^^^^^^^^^^^^^^^^^^^^ - **Changed:** :cmd:`Hint Rewrite` now supports locality attributes (including :attr:`export`) like other :ref:`Hint ` commands (`#13725 `_, fixes `#13724 `_, by Gaëtan Gilbert). - **Changed:** In :cmd:`Record`, alpha-rename the variable associated with the record to avoid alpha-renaming parameters of projections (`#13852 `_, fixes `#13727 `_, by Li-yao Xia). - **Changed:** Improve the :cmd:`Coercion` command to reduce the number of ambiguous paths to report. A pair of multiple inheritance paths that can be reduced to smaller adjoining pairs will not be reported as ambiguous paths anymore. (`#13909 `_, by Kazuhiko Sakaguchi). - **Changed:** The printing order of :cmd:`Print Classes` and :cmd:`Print Graph`, due to the changes for the internal tables of coercion classes and coercion paths. (`#13912 `_, by Kazuhiko Sakaguchi). - **Removed:** The Hide Obligations flag, deprecated in 8.12 (`#13758 `_, by Jim Fehrle). - **Removed:** SearchHead command. Use the `headconcl:` clause of :cmd:`Search` instead (`#13763 `_, by Jim Fehrle). - **Removed:** `Show Zify Spec`, `Add InjTyp` and 11 similar `Add *` commands. For `Show Zify Spec`, use `Show Zify UnOpSpec` or `Show Zify BinOpSpec` instead. For `Add *`, `Use Add Zify *` intead of `Add *` (`#13764 `_, by Jim Fehrle). - **Deprecated:** Like hints, typeclass instances added outside of sections without an explicit locality now generate a deprecation warning. See :ref:`Hint ` (`#14208 `_, fixes `#13562 `_, by Pierre-Marie Pédrot). - **Deprecated:** the :flag:`Regular Subst Tactic` flag (`#14336 `_, by Pierre-Marie Pédrot). - **Added:** :opt:`Debug` to control debug messages, functioning similarly to the warning system (`#13202 `_, by Maxime Dénès and Gaëtan Gilbert). The following flags have been converted (such that ``Set Flag`` becomes ``Set Debug "flag"``): - ``Debug Unification`` to ``unification`` - ``Debug HO Unification`` to ``ho-unification`` - ``Debug Tactic Unification`` to ``tactic-unification`` - ``Congruence Verbose`` to ``congruence`` - ``Debug Cbv`` to ``cbv`` - ``Debug RAKAM`` to ``RAKAM`` - ``Debug Ssreflect`` to ``ssreflect`` - **Added:** The Ltac2 grammar can now be printed using the Print Grammar ltac2 command (`#14093 `_, fixes `#14092 `_, by Pierre-Marie Pédrot). - **Added:** :cmd:`Instance` now accepts the :attr:`export` locality attribute (`#14148 `_, by Pierre-Marie Pédrot). - **Fixed:** extraction failure of a parameterized type in :g:`Prop` exported in an module interface as an assumption in :g:`Type` (`#14102 `_, fixes `#14100 `_, by Hugo Herbelin). - **Fixed:** Print Assumptions now treats delayed opaque proofs generated by vos compilation as if they were axioms (`#14382 `_, fixes `#13589 `_, by Pierre-Marie Pédrot). - **Fixed:** Incorrect de Bruijn index handling in vernac class declaration, preventing users from marking existing instances of existing classes which are primitive projections (`#14664 `_, fixes `#14652 `_, by Ali Caglayan and Hugo Herbelin). Command-line tools ^^^^^^^^^^^^^^^^^^ - **Changed:** `coqc` now enforces that at most a single `.v` file can be passed in the command line. Support for multiple `.v` files in the form of `coqc f1.v f2.v` didn't properly work in 8.13, tho it was accepted. (`#13876 `_, by Emilio Jesus Gallego Arias). - **Changed:** ``coqdep`` now reports an error if files specified on the command line don't exist or if it encounters unreadable files. Unknown options now generate a warning. Previously these conditions were ignored. (`#14024 `_, fixes `#14023 `_, by Hendrik Tews). - **Changed:** Makefiles produced by ``coq_makefile`` now use ``.DELETE_ON_ERROR`` (`#14238 `_, by Gaëtan Gilbert). - **Removed:** Previously deprecated command line options ``-sprop-cumulative`` and ``-input-state`` and its alias ``-is`` (`#13822 `_, by Gaëtan Gilbert). - **Added:** ``coq_makefile``\-made ``Makefile``\s now support inclusion of a ``.local-late`` file at the end, allowing the user to access more variables (`#12411 `_, fixes `#10912 `_, by Jason Gross). - **Fixed:** Failure of extraction in the presence of inductive types with local definitions in parameters (`#13624 `_, fixes `#13581 `_, by Hugo Herbelin). - **Fixed:** File name was missing in coqdoc error position reporting (`#14285 `_, fixes `#14283 `_, by Arthur Charguéraud and Hugo Herbelin). Native Compilation ^^^^^^^^^^^^^^^^^^ .. _814CoqNative: - **Changed:** `coq_makefile` now uses the `coqnative` binary to generate native compilation files. Project files also understand directly the `-native-compiler` flag without having to wrap it with `-arg` (`#14265 `_, by Pierre-Marie Pédrot). - **Deprecated:** the `-native-compiler` option for coqc. It is now recommended to use the :ref:`coqnative` binary instead to generate native compilation files ahead of time (`#14309 `_, by Pierre-Marie Pédrot). - **Added:** A standalone `coqnative` binary that performs native compilation out of `vo` files, allowing to split library compilation from native compilation. See :ref:`coqnative`. The hybrid build system was adapted to perform a split compilation on the stdlib (`#13287 `_, by Pierre-Marie Pédrot). CoqIDE ^^^^^^ .. _814CoqIDE: - **Added:** Ltac debugger support in CoqIDE (see :flag:`Ltac Debug`). Debugger output and prompts appear in the Messages panel (`#13783 `_, by Jim Fehrle and Emilio J. Gallego Arias). - **Added:** Shift-return in the Find dialog now searches backwards (`#13810 `_, by slrnsc). Standard library ^^^^^^^^^^^^^^^^ .. _814Stdlib: - **Changed:** Minor Changes to ``Rpower``: Generalizes ``exp_ineq1`` to hold for all non-zero numbers. Adds ``exp_ineq1_le``, which holds for all reals (but is a ``<=`` instead of a ``<``). (`#13582 `_, by Avi Shinnar and Barry Trager, with help from Laurent Théry). - **Changed:** set :g:`n mod 0 = n` uniformly for :g:`nat`, :g:`N`, :g:`Z`, :g:`int63`, :g:`sint63`, :g:`int31` such that :g:`m = (m / n) * n + (m mod n)` holds (also for :g:`n = 0`) .. warning:: code that relies on :g:`n mod 0 = 0` will break; for compatibility with both :g:`n mod 0 = n` and :g:`n mod 0 = 0` you can use :g:`n mod 0 = ltac:(match eval hnf in (1 mod 0) with |0 => exact 0 |_ => exact n end)` (`#14086 `_, by Andrej Dudenhefner with help of Guillaume Melquiond, Jason Gross, and Kazuhiko Sakaguchi). - **Changed:** The standard library now contains a more complete theory of equality on types of the form :g:`exists x : A, P x` and :g:`exists2 x : A, P x & Q x` when we have :g:`A : Prop`. To bring this theory more in line with the existing theory about sigma types, :g:`eq_ex_uncurried`, :g:`eq_ex2_uncurried`, :g:`eq_ex`, :g:`eq_ex2`, :g:`eq_ex_hprop`, :g:`eq_ex2_hprop` have been renamed into :g:`eq_ex_intro_uncurried`, :g:`eq_ex_intro2_uncurried`, :g:`eq_ex_intro`, :g:`eq_ex_intro2`, :g:`eq_ex_intro_hprop`, :g:`eq_ex_intro2_hprop` respectively and the implicit status of these lemmas has changed slightly (`#14174 `_, by Jason Gross). - **Changed** Moved 39 lemmas and notations about the rationals `Q` from the constructive reals private file `theories/Reals/Cauchy/QExtra.v` to appropriate files in `theories/QArith`. The now public lemmas are mostly about compatibility of multiplication and power with relational operators and simple convenience lemmas e.g. for reduction of `Q` values. The following moved lemmas have been renamed: `Q_factorDenom` to `Qmult_frac_l`, `Q_reduce_fl` to `Qreduce_num_l`, `Qle_neq` to `Qlt_leneq`, `Qmult_lt_le_compat_nonneg` to `Qmult_le_lt_compat_pos`, `Qpower_pos_lt` to `Qpower_0_lt`, `Qpower_lt_1_increasing` to `Qpower_1_lt_pos`, `Qpower_lt_1_increasing'` to `Qpower_1_lt`, `Qpower_le_1_increasing` to `Qpower_1_le_pos`, `Qpower_le_1_increasing'` to `Qpower_1_le`, `Qzero_eq` to `Qreduce_zero`, `Qpower_lt_compat` to `Qpower_lt_compat_l`, `Qpower_le_compat` to `Qpower_le_compat_l`, `Qpower_lt_compat_inv` to `Qpower_lt_compat_l_inv`, `Qpower_le_compat_inv` to `Qpower_le_compat_l_inv`, `Qpower_decomp'` to `Qpower_decomp_pos` and `QarchimedeanExp2_Pos` to `Qarchimedean_power2_pos`. The following lemmas have been renamed and the sides of the equality swapped: `Qinv_swap_pos` to `Qinv_pos`, `Qinv_swap_neg` to `Qinv_neg` and. The following lemmas have been deleted: `Q_factorNum_l` and `Q_factorNum`. The lemma `Qopp_lt_compat` has been moved from `theories/QArith/Qround.v` to `theories/QArith/QArith_base.v`. About 10 additional lemmas have been added for similar cases as the moved lemmas. Compatibility notations are not provided because QExtra is considered internal (excluded from the library documentation). (`#14293 `_, by Michael Soegtrop). - **Changed:** Importing `ZArith` no longer has the side-effect of closing `Z_scope`. (`#14343 `_, fixes `#13307 `_, by Ralf Jung). - **Removed:** ``IF_then_else`` definition and corresponding ``IF P then Q else R`` notation (`#13871 `_, by Yishuai Li). - **Removed:** from ``List.v`` deprecated/unexpected dependencies ``Setoid``, ``Le``, ``Gt``, ``Minus``, ``Lt`` (`#13986 `_, by Andrej Dudenhefner). - **Deprecated:** Unsigned primitive integers are now named ``uint63`` instead of ``int63``. The ``Int63`` module is replaced by ``Uint63``. The full list of changes is described in the PR. (`#13895 `_, by Ana Borges). - **Added:** ``leb`` and ``ltb`` functions for ``ascii`` (`#13080 `_, by Yishuai Li). - **Added:** Library for signed primitive integers, Sint63. The following operations were added to the kernel: division, remainder, comparison functions, and arithmetic shift right. Everything else works the same for signed and unsigned ints. (`#13559 `_, fixes `#12109 `_, by Ana Borges, Guillaume Melquiond and Pierre Roux). - **Added:** Lemmas about vectors related with ``to_list``: ``length_to_list``, ``of_list_to_list_opp``, ``to_list_nil``, ``to_list_cons``, ``to_list_hd``, ``to_list_last``, ``to_list_const``, ``to_list_nth_order``, ``to_list_tl``, ``to_list_append``, ``to_list_rev_append_tail``, ``to_list_rev_append``, ``to_list_rev``, ``to_list_map``, ``to_list_fold_left``, ``to_list_fold_right``, ``to_list_Forall``, ``to_list_Exists``, ``to_list_In``, ``to_list_Forall2`` (`#13671 `_, by Olivier Laurent). - **Added:** Lemmas about ``count_occ``: ``count_occ_app``, ``count_occ_elt_eq``, ``count_occ_elt_neq``, ``count_occ_bound``, ``count_occ_repeat_eq``, ``count_occ_repeat_neq``, ``count_occ_unique``, ``count_occ_repeat_excl``, ``count_occ_sgt``, ``Permutation_count_occ`` (`#13804 `_, by Olivier Laurent with help of Jean-Christophe Léchenet). - **Added:** Lemmas to ``List``: ``Exists_map``, ``Exists_concat``, ``Exists_flat_map``, ``Forall_map``, ``Forall_concat``, ``Forall_flat_map``, ``nth_error_map``, ``nth_repeat``, ``nth_error_repeat`` (`#13955 `_, by Andrej Dudenhefner, with help from Olivier Laurent). - **Added:** ``Cantor.v`` containing the Cantor pairing function and its inverse. ``Cantor.to_nat : nat * nat -> nat`` and ``Cantor.of_nat : nat -> nat * nat`` are the respective bijections between ``nat * nat`` and ``nat``. (`#14008 `_, by Andrej Dudenhefner). - **Added:** Lemmas to ``Q``: ``Qeq_from_parts``, ``Qden_cancel``, ``Qnum_cancel``, ``Qreduce_l``, ``Qreduce_r``, ``Qmult_inject_Z_l``, ``Qmult_inject_Z_r`` QArith_base Reduction of rationals; establishing equality for Qden/Qnum separately. (`#14087 `_, by Karolin Varner). - **Added:** ``Coq.Structures.OrdersEx.String_as_OT`` and ``Coq.Structures.OrdersEx.Ascii_as_OT`` to make strings and ascii ordered types (using lexical order). (`#14096 `_, by Jason Gross). - **Added:** Lemmas :g:`app_eq_app`, :g:`Forall_nil_iff`, :g:`Forall_cons_iff` to ``List.v`` (`#14153 `_, closes `#1803 `_, by Andrej Dudenhefner, with help from Olivier Laurent). - **Added:** ``Z``, ``positive`` and ``N`` constants can now be printed in hexadecimal by opening ``hex_Z_scope``, ``hex_positive_scope``, and ``hex_N_scope`` respectively (`#14263 `_, by Jason Gross). - **Added:** Absolute value function for Sint63. (`#14384 `_, by Ana Borges). - **Added:** Lemmas showing :g:`firstn` and :g:`skipn` commute with :g:`map`. (`#14406 `_, by Rudy Peterson). - **Fixed:** Haskell extraction is now compatible with GHC versions >= 9.0. Some ``#if`` statements have been added to extract ``unsafeCoerce`` to its new location in newer versions of GHC. (`#14345 `_, fixes `#14256 `_, by Jason Gross). Infrastructure and dependencies ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. _814Dune: - **Changed:** Coq's configure script now requires absolute paths for the `-prefix` option. (`#12567 `_, by Emilio Jesus Gallego Arias). - **Changed:** The regular Coq package has been split in two: coq-core, with OCaml-based libraries and tools; and coq-stdlib, which contains the Gallina-based standard library. The package Coq now depends on both for compatiblity. (`#12567 `_, by Emilio Jesus Gallego Arias, review by Vincent Laporte, Guillaume Melquiond, Enrico Tassi, and Théo Zimmerman). - **Changed:** Coq's OCaml parts and tools [``coq-core``] are now built using Dune. The main user-facing change is that Dune >= 2.5 is now required to build Coq. This was a large and complex change. If you are packager you may find some minor differences if you were using a lot of custom optimizations. Note that, in particular, the configure option ``-datadir`` is not customizable anymore, and ``-bindir`` has been removed in favor of ``$prefix/bin``. Moreover, the install procedure will ignore ``-docdir`` and ``-etcdir``, unless you patch the makefile and use Dune >= 2.9. We usually recommended using a recent Dune version, if possible. For developers and plugin authors, see the entry in `dev/doc/changes.md`. For packagers and users, see `dev/doc/INSTALL.make.md`. (`#13617 `_, by Emilio Jesús Gallego Arias, Rudi Grinberg, and Théo Zimmerman; review and testing by Gaëtan Gilbert, Guillaume Melquiond, and Enrico Tassi) - **Changed:** Undocumented variables ``OLDROOT`` and ``COQPREFIXINSTALL`` which added a prefix path to ``make install`` have been removed. Now, ``make install`` does support the more standard ``DESTDIR`` variable, akin to what ``coq_makefile`` does. (`#14258 `_, by Emilio Jesus Gallego Arias). - **Added:** Support OCaml 4.12 (`#13885 `_, by Emilio Jesus Gallego Arias, review by Gaëtan Gilbert and Théo Zimmerman). Miscellaneous ^^^^^^^^^^^^^ - **Changed:** The representation of micromega caches was slightly altered for efficiency purposes. As a consequence all stale caches must be cleaned up (`#13405 `_, by Pierre-Marie Pédrot). - **Fixed:** Fix the timeout facility on Unix to allow for nested timeouts. Previous behavior on nested timeouts was that an "inner" timeout would replace an "outer" timeout, so that the outer timeout would no longer fire. With the new behavior, Unix and Windows implementations should be (approximately) equivalent. (`#13586 `_, by Lasse Blaauwbroek). Changes in 8.14.1 ~~~~~~~~~~~~~~~~~ Kernel ^^^^^^ - **Fixed:** Fix the implementation of persistent arrays used by the VM and native compute so that it uses a uniform representation. Previously, storing primitive floats inside primitive arrays could cause memory corruption (`#15081 `_, closes `#15070 `_, by Pierre-Marie Pédrot). Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Fixed:** Missing registration of universe constraints in :cmd:`Module Type` elaboration (`#14666 `_, fixes `#14505 `_, by Hugo Herbelin). Tactics ^^^^^^^ - **Fixed:** :tacn:`abstract` more robust with respect to Ltac `constr` bindings containing existential variables (`#14671 `_, fixes `#10796 `_, by Hugo Herbelin). - **Fixed:** correct support of trailing :n:`let` by tactic :tacn:`specialize` (`#15046 `_, fixes `#15043 `_, by Hugo Herbelin). Commands and options ^^^^^^^^^^^^^^^^^^^^ - **Fixed:** anomaly with :flag:`Extraction Conservative Types` when extracting pattern-matching on singleton types (`#14669 `_, fixes `#3527 `_, by Hugo Herbelin). - **Fixed:** a regular error instead of an anomaly when calling :cmd:`Separate Extraction` in a module (`#14670 `_, fixes `#10796 `_, by Hugo Herbelin). Version 8.13 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.13 integrates many usability improvements, as well as extensions of the core language. The main changes include: - :ref:`Introduction <813PrimArrays>` of :ref:`primitive persistent arrays` in the core language, implemented using imperative persistent arrays. - Introduction of :ref:`definitional proof irrelevance <813UIP>` for the equality type defined in the SProp sort. - Cumulative record and inductive type declarations can now :ref:`specify <813VarianceDecl>` the variance of their universes. - Various bugfixes and uniformization of behavior with respect to the use of implicit arguments and the handling of existential variables in declarations, unification and tactics. - New warning for :ref:`unused variables <813UnusedVar>` in catch-all match branches that match multiple distinct patterns. - New :ref:`warning <813HintWarning>` for `Hint` commands outside sections without a locality attribute, whose goal is to eventually remove the fragile default behavior of importing hints only when using `Require`. The recommended fix is to declare hints as `export`, instead of the current default `global`, meaning that they are imported through `Require Import` only, not `Require`. See the following `rationale and guidelines `_ for details. - General support for :ref:`boolean attributes <813BooleanAttrs>`. - Many improvements to the handling of :ref:`notations <813Notations>`, including number notations, recursive notations and notations with bindings. A new algorithm chooses the most precise notation available to print an expression, which might introduce changes in printing behavior. - Tactic :ref:`improvements <813Tactics>` in :tacn:`lia` and its :tacn:`zify` preprocessing step, now supporting reasoning on boolean operators such as :g:`Z.leb` and supporting primitive integers :g:`Int63`. - Typing flags can now be specified :ref:`per-constant / inductive <813TypingFlags>`. - Improvements to the reference manual including updated syntax descriptions that match Coq's grammar in several chapters, and splitting parts of the tactics chapter to independent sections. See the `Changes in 8.13+beta1`_ section and following sections for the detailed list of changes, including potentially breaking changes marked with **Changed**. Coq's documentation is available at https://coq.github.io/doc/v8.13/refman (reference manual), and https://coq.github.io/doc/v8.13/stdlib (documentation of the standard library). Developer documentation of the ML API is available at https://coq.github.io/doc/v8.13/api. Maxime Dénès, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop and Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. Erik Martin-Dorel has maintained the `Coq Docker images `_ that are used in many Coq projects for continuous integration. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Karl Palmskog, Matthieu Sozeau and Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. Our current 32 maintainers are Yves Bertot, Frédéric Besson, Tej Chajed, Cyril Cohen, Pierre Corbineau, Pierre Courtieu, Maxime Dénès, Jim Fehrle, Julien Forest, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Georges Gonthier, Benjamin Grégoire, Jason Gross, Hugo Herbelin, Vincent Laporte, Olivier Laurent, Assia Mahboubi, Kenji Maillard, Guillaume Melquiond, Pierre-Marie Pédrot, Clément Pit-Claudel, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Arnaud Spiwack, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Anton Trunov, Li-yao Xia and Théo Zimmermann. The 51 contributors to this version are Reynald Affeldt, Tanaka Akira, Frédéric Besson, Lasse Blaauwbroek, Clément Blaudeau, Martin Bodin, Ali Caglayan, Tej Chajed, Cyril Cohen, Julien Coolen, Matthew Dempsky, Maxime Dénès, Andres Erbsen, Jim Fehrle, Emilio Jesús Gallego Arias, Paolo G. Giarrusso, Attila Gáspár, Gaëtan Gilbert, Jason Gross, Benjamin Grégoire, Hugo Herbelin, Wolf Honore, Jasper Hugunin, Ignat Insarov, Ralf Jung, Fabian Kunze, Vincent Laporte, Olivier Laurent, Larry D. Lee Jr, Thomas Letan, Yishuai Li, James Lottes, Jean-Christophe Léchenet, Kenji Maillard, Erik Martin-Dorel, Yusuke Matsushita, Guillaume Melquiond, Carl Patenaude-Poulin, Clément Pit-Claudel, Pierre-Marie Pédrot, Pierre Roux, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Matthieu Sozeau, Enrico Tassi, Anton Trunov, Edward Wang, Li-yao Xia, Beta Ziliani and Théo Zimmermann. The Coq community at large helped improve the design of this new version via the GitHub issue and pull request system, the Coq development mailing list coqdev@inria.fr, the coq-club@inria.fr mailing list, the `Discourse forum `_ and the `Coq Zulip chat `_. Version 8.13's development spanned 5 months from the release of Coq 8.12.0. Enrico Tassi and Maxime Dénès are the release managers of Coq 8.13. This release is the result of 400 merged PRs, closing ~100 issues. | Nantes, November 2020, | Matthieu Sozeau for the Coq development team | Changes in 8.13+beta1 ~~~~~~~~~~~~~~~~~~~~~ .. contents:: :local: Kernel ^^^^^^ .. _813UIP: - **Added:** Definitional UIP, only when :flag:`Definitional UIP` is enabled. This models definitional uniqueness of identity proofs for the equality type in SProp. It is deactivated by default as it can lead to non-termination in combination with impredicativity. Use of this flag is also printed by :cmd:`Print Assumptions`. See documentation of the flag for details. (`#10390 `_, by Gaëtan Gilbert). .. _813PrimArrays: - **Added:** Built-in support for persistent arrays, which expose a functional interface but are implemented using an imperative data structure, for better performance. (`#11604 `_, by Maxime Dénès and Benjamin Grégoire, with help from Gaëtan Gilbert). Primitive arrays are irrelevant in their single polymorphic universe (same as a polymorphic cumulative list inductive would be) (`#13356 `_, fixes `#13354 `_, by Gaëtan Gilbert). - **Fixed:** A loss of definitional equality for declarations obtained through :cmd:`Include` when entering the scope of a :cmd:`Module` or :cmd:`Module Type` was causing :cmd:`Search` not to see the included declarations (`#12537 `_, fixes `#12525 `_ and `#12647 `_, by Hugo Herbelin). - **Fixed:** Fix an incompleteness in the typechecking of `match` for cumulative inductive types. This could result in breaking subject reduction. (`#13501 `_, fixes `#13495 `_, by Matthieu Sozeau). Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. _813BooleanAttrs: - **Changed:** :term:`Boolean attributes ` are now specified using key/value pairs, that is to say :n:`@ident__attr{? = {| yes | no } }`. If the value is missing, the default is :n:`yes`. The old syntax is still supported, but produces the ``deprecated-attribute-syntax`` warning. Deprecated attributes are ``universes(monomorphic)``, ``universes(notemplate)`` and ``universes(noncumulative)``, which are respectively replaced by :attr:`universes(polymorphic=no) `, :attr:`universes(template=no) ` and :attr:`universes(cumulative=no) `. Attributes :attr:`program` and :attr:`canonical` are also affected, with the syntax :n:`@ident__attr(false)` being deprecated in favor of :n:`@ident__attr=no`. (`#13312 `_, by Emilio Jesus Gallego Arias). - **Changed:** Heuristics for universe minimization to :g:`Set`: also use constraints ``Prop <= i`` (`#10331 `_, by Gaëtan Gilbert with help from Maxime Dénès and Matthieu Sozeau, fixes `#12414 `_). - **Changed:** The type given to :cmd:`Instance` is no longer automatically generalized over unbound and :ref:`generalizable ` variables. Use ``Instance : `{type}`` instead of :n:`Instance : @type` to get the old behavior, or enable the compatibility flag ``Instance Generalized Output``. (`#13188 `_, fixes `#6042 `_, by Gaëtan Gilbert). - **Changed:** Tweaked the algorithm giving default names to arguments. Should reduce the frequency that argument names get an unexpected suffix. Also makes :flag:`Mangle Names` not mess up argument names. (`#12756 `_, fixes `#12001 `_ and `#6785 `_, by Jasper Hugunin). - **Removed:** Undocumented and experimental forward class hint feature ``:>>``. Use ``:>`` (see :n:`@of_type`) instead (`#13106 `_, by Pierre-Marie Pédrot). .. _813VarianceDecl: - **Added:** Commands :cmd:`Inductive`, :cmd:`Record` and synonyms now support syntax `Inductive foo@{=i +j *k l}` to specify variance information for their universes (in :ref:`Cumulative ` mode) (`#12653 `_, by Gaëtan Gilbert). .. _813UnusedVar: - **Added:** Warning on unused variables in pattern-matching branches of :n:`match` serving as catch-all branches for at least two distinct patterns. (`#12768 `_, fixes `#12762 `_, by Hugo Herbelin). - **Added:** Definition and (Co)Fixpoint now support the :attr:`using` attribute. It has the same effect as :cmd:`Proof using`, which is only available in interactive mode. (`#13183 `_, by Enrico Tassi). .. _813TypingFlags: - **Added:** Typing flags can now be specified per-constant / inductive, this allows to fine-grain specify them from plugins or attributes. See :ref:`controlling-typing-flags` for details on attribute syntax. (`#12586 `_, by Emilio Jesus Gallego Arias). - **Added:** Inference of return predicate of a :g:`match` by inversion takes sort elimination constraints into account (`#13290 `_, grants `#13278 `_, by Hugo Herbelin). - **Fixed:** Implicit arguments taken into account in defined fields of a record type declaration (`#13166 `_, fixes `#13165 `_, by Hugo Herbelin). - **Fixed:** Allow use of typeclass inference for the return predicate of a :n:`match` (was deactivated in versions 8.10 to 8.12, `#13217 `_, fixes `#13216 `_, by Hugo Herbelin). - **Fixed:** A case of unification raising an anomaly IllTypedInstance (`#13376 `_, fixes `#13266 `_, by Hugo Herbelin). - **Fixed:** Using :n:`{wf ...}` in local fixpoints is an error, not an anomaly (`#13383 `_, fixes `#11816 `_, by Hugo Herbelin). - **Fixed:** Issue when two expressions involving different projections and one is primitive need to be unified (`#13386 `_, fixes `#9971 `_, by Hugo Herbelin). - **Fixed:** A bug producing ill-typed instances of existential variables when let-ins interleaved with assumptions (`#13387 `_, fixes `#12348 `_, by Hugo Herbelin). .. _813Notations: Notations ^^^^^^^^^ - **Changed:** In notations (except in custom entries), the misleading :n:`@syntax_modifier` :n:`@ident ident` (which accepted either an identifier or a :g:`_`) is deprecated and should be replaced by :n:`@ident name`. If the intent was really to only parse identifiers, this will eventually become possible, but only as of Coq 8.15. In custom entries, the meaning of :n:`@ident ident` is silently changed from parsing identifiers or :g:`_` to parsing only identifiers without warning, but this presumably affects only rare, recent and relatively experimental code (`#11841 `_, fixes `#9514 `_, by Hugo Herbelin). - **Changed:** Improved support for notations/abbreviations with mixed terms and patterns (such as the forcing modality) (`#12099 `_, by Hugo Herbelin). - **Changed** Rational and real constants are parsed differently. The exponent is now encoded separately from the fractional part using ``Z.pow_pos``. This way, parsing large exponents can no longer blow up and constants are printed in a form closer to the one in which they were parsed (i.e., ``102e-2`` is reprinted as such and not ``1.02``). (`#12218 `_, by Pierre Roux). - **Changed:** Scope information is propagated in indirect applications to a reference prefixed with :g:`@`; this covers for instance the case :g:`r.(@p) t` where scope information from :g:`p` is now taken into account for interpreting :g:`t` (`#12685 `_, by Hugo Herbelin). - **Changed:** New model for ``only parsing`` and ``only printing`` notations with support for at most one parsing-and-printing or only-parsing notation per notation and scope, but an arbitrary number of only-printing notations (`#12950 `_, fixes `#4738 `_ and `#9682 `_ and part 2 of `#12908 `_, by Hugo Herbelin). - **Changed:** Redeclaring a notation also reactivates its printing rule; in particular a second :cmd:`Import` of the same module reactivates the printing rules declared in this module. In theory, this leads to changes in behavior for printing. However, this is mitigated in general by the adoption in `#12986 `_ of a priority given to notations which match a larger part of the term to print (`#12984 `_, fixes `#7443 `_ and `#10824 `_, by Hugo Herbelin). - **Changed:** Use of notations for printing now gives preference to notations which match a larger part of the term to abbreviate (`#12986 `_, by Hugo Herbelin). - **Removed** OCaml parser and printer for real constants have been removed. Real constants are now handled with proven Coq code. (`#12218 `_, by Pierre Roux). - **Deprecated** ``Numeral.v`` is deprecated, please use ``Number.v`` instead. (`#12218 `_, by Pierre Roux). - **Deprecated:** `Numeral Notation`, please use :cmd:`Number Notation` instead (`#12979 `_, by Pierre Roux). - **Added:** :flag:`Printing Float` flag to print primitive floats as hexadecimal instead of decimal values. This is included in the :flag:`Printing All` flag (`#11986 `_, by Pierre Roux). - **Added:** :ref:`Number Notation ` and :ref:`String Notation ` commands now support parameterized inductive and non-inductive types (`#12218 `_, fixes `#12035 `_, by Pierre Roux, review by Jason Gross and Jim Fehrle for the reference manual). - **Added:** Added support for encoding notations of the form :g:`x ⪯ y ⪯ .. ⪯ z ⪯ t`. This feature is considered experimental. (`#12765 `_, by Hugo Herbelin). - **Added:** The :n:`@binder` entry of :cmd:`Notation` can now be used in notations expecting a single (non-recursive) binder (`#13265 `_, by Hugo Herbelin, see section :ref:`notations-and-binders` of the reference manual). - **Fixed:** Issues in the presence of notations recursively referring to another applicative notations, such as missing scope propagation, or failure to use a notation for printing (`#12960 `_, fixes `#9403 `_ and `#10803 `_, by Hugo Herbelin). - **Fixed:** Capture the names of global references by binders in the presence of notations for binders (`#12965 `_, fixes `#9569 `_, by Hugo Herbelin). - **Fixed:** Preventing notations for constructors to involve binders (`#13092 `_, fixes `#13078 `_, by Hugo Herbelin). - **Fixed:** Notations understand universe names without getting confused by different imported modules between declaration and use locations (`#13415 `_, fixes `#13303 `_, by Gaëtan Gilbert). .. _813Tactics: Tactics ^^^^^^^ - **Changed:** In :tacn:`refine`, new existential variables unified with existing ones are no longer considered as fresh. The behavior of :tacn:`simple refine` no longer depends on the orientation of evar-evar unification problems, and new existential variables are always turned into (unshelved) goals. This can break compatibility in some cases (`#7825 `_, by Matthieu Sozeau, with help from Maxime Dénès, review by Pierre-Marie Pédrot and Enrico Tassi, fixes `#4095 `_ and `#4413 `_). - **Changed:** Giving an empty list of occurrences after :n:`in` in tactics is no longer permitted. Omitting the :n:`in` gives the same behavior (`#13237 `_, fixes `#13235 `_, by Hugo Herbelin). - **Removed:** :n:`at @occs_nums` clauses in tactics such as :tacn:`unfold` no longer allow negative values. A "-" before the list (for set complement) is still supported. Ex: "at -1 -2" is no longer supported but "at -1 2" is. (`#13403 `_, by Jim Fehrle). - **Removed:** A number of tactics that formerly accepted negative numbers as parameters now give syntax errors for negative values. These include {e}constructor, do, timeout, 9 {e}auto tactics and psatz*. (`#13417 `_, by Jim Fehrle). - **Removed:** The deprecated and undocumented `prolog` tactic was removed (`#12399 `_, by Pierre-Marie Pédrot). - **Removed:** `info` tactic that was deprecated in 8.5. (`#12423 `_, by Jim Fehrle). - **Deprecated:** Undocumented :n:`eauto @nat_or_var @nat_or_var` syntax in favor of new :tacn:`bfs eauto`. Also deprecated 2-integer syntax for :tacn:`debug eauto` and :tacn:`info_eauto`. (Use :tacn:`bfs eauto` with the :flag:`Info Eauto` or :flag:`Debug Eauto` flags instead.) (`#13381 `_, by Jim Fehrle). - **Added:** :tacn:`lia` is extended to deal with boolean operators e.g. `andb` or `Z.leb`. (As `lia` gets more powerful, this may break proof scripts relying on `lia` failure.) (`#11906 `_, by Frédéric Besson). - **Added:** :tacn:`apply … in` supports several hypotheses (`#12246 `_, by Hugo Herbelin; grants `#9816 `_). - **Added:** The :tacn:`zify` tactic can now be extended by redefining the `zify_pre_hook` tactic. (`#12552 `_, by Kazuhiko Sakaguchi). - **Added:** The :tacn:`zify` tactic provides support for primitive integers (module :g:`ZifyInt63`). (`#12648 `_, by Frédéric Besson). - **Fixed:** Avoid exposing an internal name of the form :n:`_tmp` when applying the :n:`_` introduction pattern which would break a dependency (`#13337 `_, fixes `#13336 `_, by Hugo Herbelin). - **Fixed:** The case of tactics, such as :tacn:`eapply`, producing existential variables under binders with an ill-formed instance (`#13373 `_, fixes `#13363 `_, by Hugo Herbelin). Tactic language ^^^^^^^^^^^^^^^ - **Added:** An if-then-else syntax to Ltac2 (`#13232 `_, fixes `#10110 `_, by Pierre-Marie Pédrot). - **Fixed:** Printing of the quotation qualifiers when printing :g:`Ltac` functions (`#13028 `_, fixes `#9716 `_ and `#13004 `_, by Hugo Herbelin). SSReflect ^^^^^^^^^ - **Added:** SSReflect intro pattern ltac views ``/[dup]``, ``/[swap]`` and ``/[apply]`` (`#13317 `_, by Cyril Cohen). - **Fixed:** Working around a bug of interaction between + and /(ltac:(...)) cf `#13458 `_ (`#13459 `_, by Cyril Cohen). Commands and options ^^^^^^^^^^^^^^^^^^^^ - **Changed:** Drop prefixes from grammar non-terminal names, e.g. "constr:global" -> "global", "Prim.name" -> "name". Visible in the output of :cmd:`Print Grammar` and :cmd:`Print Custom Grammar`. (`#13096 `_, by Jim Fehrle). - **Changed:** When declaring arbitrary terms as hints, unsolved evars are not abstracted implicitly anymore and instead raise an error (`#13139 `_, by Pierre-Marie Pédrot). - **Removed:** In the :cmd:`Extraction Language` command, remove `Ocaml` as a valid value. Use `OCaml` instead. This was deprecated in Coq 8.8, `#6261 `_ (`#13016 `_, by Jim Fehrle). .. _813HintWarning: - **Deprecated:** Hint locality currently defaults to :attr:`local` in a section and :attr:`global` otherwise, but this will change in a future release. Hints added outside of sections without an explicit locality now generate a deprecation warning. We recommend using :attr:`export` where possible (`#13384 `_, by Pierre-Marie Pédrot). - **Deprecated:** ``Grab Existential Variables`` and ``Existential`` commands (`#12516 `_, by Maxime Dénès). - **Added:** The :attr:`export` locality can now be used for all Hint commands, including :cmd:`Hint Cut`, :cmd:`Hint Mode`, :cmd:`Hint Transparent` / :cmd:`Opaque ` and :cmd:`Remove Hints` (`#13388 `_, by Pierre-Marie Pédrot). - **Added:** Support for automatic insertion of coercions in :cmd:`Search` patterns. Additionally, head patterns are now automatically interpreted as types (`#13255 `_, fixes `#13244 `_, by Hugo Herbelin). - **Added:** The :cmd:`Proof using` command can now be used without loading the Ltac plugin (`-noinit` mode) (`#13339 `_, by Théo Zimmermann). - **Added:** Clarify in the documentation that :cmd:`Add ML Path` is not exported to compiled files (`#13345 `_, fixes `#13344 `_, by Hugo Herbelin). Tools ^^^^^ - **Changed:** Option `-native-compiler` of the configure script now impacts the default value of the `-native-compiler` option of coqc. The `-native-compiler` option of the configure script supports a new `ondemand` value, which becomes the default, thus preserving the previous default behavior. The stdlib is still precompiled when configuring with `-native-compiler yes`. It is not precompiled otherwise. This an implementation of point 2 of `CEP #48 `_ (`#13352 `_, by Pierre Roux). - **Changed:** Added the ability for coq_makefile to directly set the installation folders, through the `COQLIBINSTALL` and `COQDOCINSTALL` variables. See :ref:`coqmakefilelocal`. (`#12389 `_, by Martin Bodin, review of Enrico Tassi). - **Removed:** The option ``-I`` of coqchk was removed (it was deprecated in Coq 8.8) (`#12613 `_, by Gaëtan Gilbert). - **Fixed:** ``coqchk`` no longer reports names from inner modules of opaque modules as axioms (`#12862 `_, fixes `#12845 `_, by Jason Gross). CoqIDE ^^^^^^ - **Added:** Support showing diffs for :cmd:`Show Proof` in CoqIDE from the :n:`View` menu. See :ref:`showing_proof_diffs`. (`#12874 `_, by Jim Fehrle and Enrico Tassi) - **Added:** Support for flag :flag:`Printing Goal Names` in View menu (`#13145 `_, by Hugo Herbelin). Standard library ^^^^^^^^^^^^^^^^ - **Changed:** In the reals theory changed the epsilon in the definition of the modulus of convergence for CReal from 1/n (n in positive) to 2^z (z in Z) so that a precision coarser than one is possible. Also added an upper bound to CReal to enable more efficient computations. (`#12186 `_, by Michael Soegtrop). - **Changed:** Int63 notations now match up with the rest of the standard library: :g:`a \% m`, :g:`m == n`, :g:`m < n`, :g:`m <= n`, and :g:`m ≤ n` have been replaced with :g:`a mod m`, :g:`m =? n`, :g:`m `_, fixes `#12454 `_, by Jason Gross). - **Changed:** PrimFloat notations now match up with the rest of the standard library: :g:`m == n`, :g:`m < n`, and :g:`m <= n` have been replaced with :g:`m =? n`, :g:`m `_, fixes `#12454 `_, by Jason Gross). - **Changed:** the sort of cyclic numbers from Type to Set. For backward compatibility, a dynamic sort was defined in the 3 packages bignums, coqprime and color. See for example commit 6f62bda in bignums. (`#12801 `_, by Vincent Semeria). - **Changed:** ``Require Import Coq.nsatz.NsatzTactic`` now allows using :tacn:`nsatz` with `Z` and `Q` without having to supply instances or using ``Require Import Coq.nsatz.Nsatz``, which transitively requires unneeded files declaring axioms used in the reals (`#12861 `_, fixes `#12860 `_, by Jason Gross). - **Deprecated:** ``prod_curry`` and ``prod_uncurry``, in favor of ``uncurry`` and ``curry`` (`#12716 `_, by Yishuai Li). - **Added:** New lemmas about ``repeat`` in ``List`` and ``Permutation``: ``repeat_app``, ``repeat_eq_app``, ``repeat_eq_cons``, ``repeat_eq_elt``, ``Forall_eq_repeat``, ``Permutation_repeat`` (`#12799 `_, by Olivier Laurent). - **Added:** Extend some list lemmas to both directions: `app_inj_tail_iff`, `app_inv_head_iff`, `app_inv_tail_iff`. (`#12094 `_, fixes `#12093 `_, by Edward Wang). - **Added:** ``Decidable`` instance for negation (`#12420 `_, by Yishuai Li). - **Fixed:** `Coq.Program.Wf.Fix_F_inv` and `Coq.Program.Wf.Fix_eq` are now axiom-free. They no longer assume proof irrelevance. (`#13365 `_, by Li-yao Xia). Infrastructure and dependencies ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** When compiled with OCaml >= 4.10.0, Coq will use the new best-fit GC policy, which should provide some performance benefits. Coq's policy is optimized for speed, but could increase memory consumption in some cases. You are welcome to tune it using the ``OCAMLRUNPARAM`` variable and report back on good settings so we can improve the defaults. (`#13040 `_, fixes `#11277 `_, by Emilio Jesus Gallego Arias). - **Changed:** Coq now uses the `zarith `_ library, based on GNU's gmp instead of ``num`` which is deprecated upstream. The custom ``bigint`` module is no longer provided. (`#11742 `_, `#13007 `_, by Emilio Jesus Gallego Arias and Vicent Laporte, with help from Frédéric Besson). Changes in 8.13.0 ~~~~~~~~~~~~~~~~~ Commands and options ^^^^^^^^^^^^^^^^^^^^ - **Changed:** The warning `custom-entry-overriden` has been renamed to `custom-entry-overridden` (with two d's). (`#13556 `_, by Simon Friis Vindum). Changes in 8.13.1 ~~~~~~~~~~~~~~~~~ Kernel ^^^^^^ - **Fixed:** Fix arities of VM opcodes for some floating-point operations that could cause memory corruption (`#13867 `_, by Guillaume Melquiond). CoqIDE ^^^^^^ - **Added:** Option ``-v`` and ``--version`` to CoqIDE (`#13870 `_, by Guillaume Melquiond). Changes in 8.13.2 ~~~~~~~~~~~~~~~~~ Kernel ^^^^^^ - **Fixed:** Crash when using :tacn:`vm_compute` on an irreducible ``PArray.set`` (`#14005 `_, fixes `#13998 `_, by Guillaume Melquiond). - **Fixed:** Never store persistent arrays as VM / native structured values. This could be used to make vo marshalling crash, and probably breaking some other invariants of the kernel (`#14007 `_, fixes `#14006 `_, by Pierre-Marie Pédrot). Tactic language ^^^^^^^^^^^^^^^^ - **Fixed:** Ltac2 ``Array.init`` no longer incurs exponential overhead when used recursively (`#14012 `_, fixes `#14011 `_, by Jason Gross). Version 8.12 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.12 integrates many usability improvements, in particular with respect to notations, scopes and implicit arguments, along with many bug fixes and major improvements to the reference manual. The main changes include: - New :ref:`binder notation<812Implicit>` for non-maximal implicit arguments using :g:`[ ]` allowing to set and see the implicit status of arguments immediately. - New notation :g:`Inductive I A | x : s := ...` to distinguish the :ref:`uniform<812Uniform>` from the non-uniform parameters in inductive definitions. - More robust and expressive treatment of :ref:`implicit inductive<812ImplicitInductive>` parameters in inductive declarations. - Improvements in the treatment of implicit arguments and partially applied constants in :ref:`notations<812Notations>`, parsing of hexadecimal number notation and better handling of scopes and coercions for printing. - A correct and efficient :ref:`coercion coherence<812Coercions>` checking algorithm, avoiding spurious or duplicate warnings. - An improved :cmd:`Search` :ref:`command<812Search>` which accepts complex queries. Note that this takes precedence over the now deprecated :ref:`ssreflect search<812SSRSearch>`. - Many additions and improvements of the :ref:`standard library<812Stdlib>`. - Improvements to the :ref:`reference manual<812Refman>` include a more logical organization of chapters along with updated syntax descriptions that match Coq's grammar in most but not all chapters. Additionally, the `omega` tactic is deprecated in this version of Coq, and we recommend users to switch to :tacn:`lia` in new proof scripts. See the `Changes in 8.12+beta1`_ section and following sections for the detailed list of changes, including potentially breaking changes marked with **Changed**. Coq's documentation is available at https://coq.github.io/doc/v8.12/refman (reference manual), and https://coq.github.io/doc/v8.12/stdlib (documentation of the standard library). Developer documentation of the ML API is available at https://coq.github.io/doc/v8.12/api. Maxime Dénès, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop and Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. Erik Martin-Dorel has maintained the `Coq Docker images `_ that are used in many Coq projects for continuous integration. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Karl Palmskog, Matthieu Sozeau and Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. Previously, most components of Coq had a single principal maintainer. This was changed in 8.12 (`#11295 `_) so that every component now has a team of maintainers, who are in charge of reviewing and merging incoming pull requests. This gave us a chance to significantly expand the pool of maintainters and provide faster feedback to contributors. Special thanks to all our maintainers! Our current 31 maintainers are Yves Bertot, Frédéric Besson, Tej Chajed, Cyril Cohen, Pierre Corbineau, Pierre Courtieu, Maxime Dénès, Jim Fehrle, Julien Forest, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Georges Gonthier, Benjamin Grégoire, Jason Gross, Hugo Herbelin, Vincent Laporte, Assia Mahboubi, Kenji Maillard, Guillaume Melquiond, Pierre-Marie Pédrot, Clément Pit-Claudel, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Arnaud Spiwack, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Anton Trunov, Li-yao Xia, Théo Zimmermann The 59 contributors to this version are Abhishek Anand, Yves Bertot, Frédéric Besson, Lasse Blaauwbroek, Simon Boulier, Quentin Carbonneaux, Tej Chajed, Arthur Charguéraud, Cyril Cohen, Pierre Courtieu, Matthew Dempsky, Maxime Dénès, Andres Erbsen, Erika (@rrika), Nikita Eshkeev, Jim Fehrle, @formalize, Emilio Jesús Gallego Arias, Paolo G. Giarrusso, Gaëtan Gilbert, Jason Gross, Samuel Gruetter, Attila Gáspár, Hugo Herbelin, Jan-Oliver Kaiser, Robbert Krebbers, Vincent Laporte, Olivier Laurent, Xavier Leroy, Thomas Letan, Yishuai Li, Kenji Maillard, Erik Martin-Dorel, Guillaume Melquiond, Ike Mulder, Guillaume Munch-Maccagnoni, Antonio Nikishaev, Karl Palmskog, Pierre-Marie Pédrot, Clément Pit-Claudel, Ramkumar Ramachandra, Lars Rasmusson, Daniel de Rauglaudre, Talia Ringer, Pierre Roux, Kazuhiko Sakaguchi, Vincent Semeria, @scinart, Kartik Singhal, Michael Soegtrop, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Ralf Treinen, Anton Trunov, Bernhard M. Wiedemann, Li-yao Xia, Nickolai Zeldovich and Théo Zimmermann. Many power users helped to improve the design of this new version via the GitHub issue and pull request system, the Coq development mailing list coqdev@inria.fr, the coq-club@inria.fr mailing list, the `Discourse forum `_ and the new `Coq Zulip chat `_ (thanks to Cyril Cohen for organizing the move from Gitter). Version 8.12's development spanned 6 months from the release of Coq 8.11.0. Emilio Jesus Gallego Arias and Théo Zimmermann are the release managers of Coq 8.12. This release is the result of ~500 PRs merged, closing ~100 issues. | Nantes, June 2020, | Matthieu Sozeau for the Coq development team | Changes in 8.12+beta1 ~~~~~~~~~~~~~~~~~~~~~ .. contents:: :local: Kernel ^^^^^^ - **Fixed:** Specification of :n:`PrimFloat.leb` which made :n:`(x <= y)%float` true for any non-NaN :n:`x` and :n:`y`. (`#12484 `_, fixes `#12483 `_, by Pierre Roux). Specification language, type inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** The deprecation warning raised since Coq 8.10 when a trailing implicit is declared to be non-maximally inserted (with the command :cmd:`Arguments`) has been turned into an error (`#11368 `_, by SimonBoulier). - **Changed:** Typeclass resolution, accessible through :tacn:`typeclasses eauto`, now suspends constraints according to their modes instead of failing. If a typeclass constraint does not match any of the declared modes for its class, the constraint is postponed, and the proof search continues on other goals. Proof search does a fixed point computation to try to solve them at a later stage of resolution. It does not fail if there remain only stuck constraints at the end of resolution. This makes typeclasses with declared modes more robust with respect to the order of resolution. (`#10858 `_, fixes `#9058 `_, by Matthieu Sozeau). - **Added:** Warn when manual implicit arguments are used in unexpected positions of a term (e.g. in `Check id (forall {x}, x)`) or when an implicit argument name is shadowed (e.g. in `Check fun f : forall {x:nat} {x}, nat => f`) (`#10202 `_, by Hugo Herbelin). - **Added:** :cmd:`Arguments` now supports setting implicit an anonymous argument, as e.g. in `Arguments id {A} {_}` (`#11098 `_, by Hugo Herbelin, fixes `#4696 `_, `#5173 `_, `#9098 `_). .. _812Implicit: - **Added:** Syntax for non-maximal implicit arguments in definitions and terms using square brackets. The syntax is ``[x : A]``, ``[x]``, ```[A]`` to be consistent with the command :cmd:`Arguments` (`#11235 `_, by Simon Boulier). - **Added:** :cmd:`Implicit Types` are now taken into account for printing. To inhibit it, unset the :flag:`Printing Use Implicit Types` flag (`#11261 `_, by Hugo Herbelin, granting `#10366 `_). .. _812Uniform: - **Added:** New syntax :cmd:`Inductive` :n:`@ident {* @binder } | {* @binder } := ...` to specify which parameters of an inductive type are uniform. See :ref:`parametrized-inductive-types` (`#11600 `_, by Gaëtan Gilbert). - **Added:** Warn when using :cmd:`Fixpoint` or :cmd:`CoFixpoint` for definitions which are not recursive (`#12121 `_, by Hugo Herbelin). .. _812ImplicitInductive: - **Fixed:** More robust and expressive treatment of implicit inductive parameters in inductive declarations (`#11579 `_, by Maxime Dénès, Gaëtan Gilbert and Jasper Hugunin; fixes `#7253 `_ and `#11585 `_). - **Fixed:** Anomaly which could be raised when printing binders with implicit types (`#12323 `_, by Hugo Herbelin; fixes `#12322 `_). - **Fixed:** Case of an anomaly in trying to infer the return clause of an ill-typed :g:`match` (`#12422 `_, fixes `#12418 `_, by Hugo Herbelin). .. _812Notations: Notations ^^^^^^^^^ - **Changed:** Notation scopes are now always inherited in notations binding a partially applied constant, including for notations binding an expression of the form :n:`@@qualid`. The latter was not the case beforehand (part of `#11120 `_). - **Changed:** The printing algorithm now interleaves search for notations and removal of coercions (`#11172 `_, by Hugo Herbelin). - **Changed:** Nicer printing for decimal constants in R and Q. 1.5 is now printed 1.5 rather than 15e-1 (`#11848 `_, by Pierre Roux). - **Removed:** deprecated ``compat`` modifier of :cmd:`Notation` and :cmd:`Infix` commands. Use the :attr:`deprecated` attribute instead (`#11113 `_, by Théo Zimmermann, with help from Jason Gross). - **Deprecated:** Numeral Notation on ``Decimal.uint``, ``Decimal.int`` and ``Decimal.decimal`` are replaced respectively by numeral notations on ``Numeral.uint``, ``Numeral.int`` and ``Numeral.numeral`` (`#11948 `_, by Pierre Roux). - **Added:** Notations declared with the ``where`` clause in the declaration of inductive types, coinductive types, record fields, fixpoints and cofixpoints now support the ``only parsing`` modifier (`#11602 `_, by Hugo Herbelin). - **Added:** :flag:`Printing Parentheses` flag to print parentheses even when implied by associativity or precedence (`#11650 `_, by Hugo Herbelin and Abhishek Anand). - **Added:** Numeral notations now parse hexadecimal constants such as ``0x2a`` or ``0xb.2ap-2``. Parsers added for :g:`nat`, :g:`positive`, :g:`Z`, :g:`N`, :g:`Q`, :g:`R`, primitive integers and primitive floats (`#11948 `_, by Pierre Roux). - **Added:** Abbreviations support arguments occurring both in term and binder position (`#8808 `_, by Hugo Herbelin). - **Fixed:** Different interpretations in different scopes of the same notation string can now be associated with different printing formats (`#10832 `_, by Hugo Herbelin, fixes `#6092 `_ and `#7766 `_). - **Fixed:** Parsing and printing consistently handle inheritance of implicit arguments in notations. With the exception of notations of the form :n:`Notation @string := @@qualid` and :n:`Notation @ident := @@qualid` which inhibit implicit arguments, all notations binding a partially applied constant, as e.g. in :n:`Notation @string := (@qualid {+ @arg })`, or :n:`Notation @string := (@@qualid {+ @arg })`, or :n:`Notation @ident := (@qualid {+ @arg })`, or :n:`Notation @ident := (@@qualid {+ @arg })`, inherit the remaining implicit arguments (`#11120 `_, by Hugo Herbelin, fixing `#4690 `_ and `#11091 `_). - **Fixed:** Notations in ``only printing`` mode do not uselessly reserve parsing keywords (`#11590 `_, by Hugo Herbelin, fixes `#9741 `_). - **Fixed:** Numeral Notations now play better with multiple scopes for the same inductive type. Previously, when multiple numeral notations were defined for the same inductive, only the last one was considered for printing. Now, among the notations that are usable for printing and either have a scope delimiter or are open, the selection is made according to the order of open scopes, or according to the last defined notation if no appropriate scope is open (`#12163 `_, fixes `#12159 `_, by Pierre Roux, review by Hugo Herbelin and Jason Gross). Tactics ^^^^^^^ - **Changed:** The :tacn:`rapply` tactic in :g:`Coq.Program.Tactics` now handles arbitrary numbers of underscores and takes in a :g:`uconstr`. In rare cases where users were relying on :tacn:`rapply` inserting exactly 15 underscores and no more, due to the lemma having a completely unspecified codomain (and thus allowing for any number of underscores), the tactic will now loop instead (`#10760 `_, by Jason Gross). - **Changed:** The :g:`auto with zarith` tactic and variations (including :tacn:`intuition`) may now call :tacn:`lia` instead of `omega` (when the `Omega` module is loaded); more goals may be automatically solved, fewer section variables will be captured spuriously (`#11018 `_, by Vincent Laporte). - **Changed:** The new :flag:`NativeCompute Timing` flag causes calls to :tacn:`native_compute` (as well as kernel calls to the native compiler) to emit separate timing information about conversion to native code, compilation, execution, and reification. It replaces the timing information previously emitted when the `-debug` command-line flag was set, and allows more fine-grained timing of the native compiler (`#11025 `_, by Jason Gross). Additionally, the timing information now uses real time rather than user time (fixes `#11962 `_, `#11963 `_, by Jason Gross) - **Changed:** Improve the efficiency of `PreOmega.elim_let` using an iterator implemented in OCaml (`#11370 `_, by Frédéric Besson). - **Changed:** Improve the efficiency of :tacn:`zify` by rewritting the remaining Ltac code in OCaml (`#11429 `_, by Frédéric Besson). - **Changed:** Backtrace information for tactics has been improved (`#11755 `_, by Emilio Jesus Gallego Arias). - **Changed:** The default tactic used by :g:`firstorder` is :g:`auto with core` instead of :g:`auto with *`; see :ref:`decisionprocedures` for details; old behavior can be reset by using the `-compat 8.12` command-line flag; to ease the migration of legacy code, the default solver can be set to `debug auto with *` with `Set Firstorder Solver debug auto with *` (`#11760 `_, by Vincent Laporte). - **Changed:** :tacn:`autounfold` no longer fails when the :cmd:`Opaque` command is used on constants in the hint databases (`#11883 `_, by Attila Gáspár). - **Changed:** Tactics with qualified name of the form ``Coq.Init.Notations`` are now qualified with prefix ``Coq.Init.Ltac``; users of the ``-noinit`` option should now import ``Coq.Init.Ltac`` if they want to use Ltac (`#12023 `_, by Hugo Herbelin; minor source of incompatibilities). - **Changed:** Tactic :tacn:`subst` :n:`@ident` now fails over a section variable which is indirectly dependent in the goal; the incompatibility can generally be fixed by first clearing the hypotheses causing an indirect dependency, as reported by the error message, or by using :tacn:`rewrite` :n:`... in *` instead; similarly, :tacn:`subst` has no more effect on such variables (`#12146 `_, by Hugo Herbelin; fixes `#10812 `_ and `#12139 `_). - **Changed:** The check that :tacn:`unfold` arguments were indeed unfoldable has been moved to runtime (`#12256 `_, by Pierre-Marie Pédrot; fixes `#5764 `_, `#5159 `_, `#4925 `_ and `#11727 `_). - **Changed** When the tactic :tacn:`functional induction` :n:`c__1 c__2 ... c__n` is used with no parenthesis around :n:`c__1 c__2 ... c__n`, :n:`c__1 c__2 ... c__n` is now read as one single applicative term. In particular implicit arguments should be omitted. Rare source of incompatibility (`#12326 `_, by Pierre Courtieu). - **Changed:** When using :tacn:`exists` or :tacn:`eexists` with multiple arguments, the evaluation of arguments and applications of constructors are now interleaved. This improves unification in some cases (`#12366 `_, fixes `#12365 `_, by Attila Gáspár). - **Removed:** Undocumented ``omega with``. Using :tacn:`lia` is the recommended replacement, although the old semantics of ``omega with *`` can also be recovered with ``zify; omega`` (`#11288 `_, by Emilio Jesus Gallego Arias). - **Removed:** Deprecated syntax `_eqn` for :tacn:`destruct` and :tacn:`remember`. Use `eqn:` syntax instead (`#11877 `_, by Hugo Herbelin). - **Removed:** `at` clauses can no longer be used with :tacn:`autounfold`. Since they had no effect, it is safe to remove them (`#11883 `_, by Attila Gáspár). - **Deprecated:** The `omega` tactic is deprecated; use :tacn:`lia` from the :ref:`Micromega ` plugin instead (`#11976 `_, by Vincent Laporte). - **Added:** The :tacn:`zify` tactic is now aware of `Pos.pred_double`, `Pos.pred_N`, `Pos.of_nat`, `Pos.add_carry`, `Pos.pow`, `Pos.square`, `Z.pow`, `Z.double`, `Z.pred_double`, `Z.succ_double`, `Z.square`, `Z.div2`, and `Z.quot2`. Injections for internal definitions in module `ZifyBool` (`isZero` and `isLeZero`) are also added to help users to declare new :tacn:`zify` class instances using Micromega tactics (`#10998 `_, by Kazuhiko Sakaguchi). - **Added:** :cmd:`Show Lia Profile` prints some statistics about :tacn:`lia` calls (`#11474 `_, by Frédéric Besson). - **Added:** Syntax :tacn:`pose proof` :n:`(@ident:=@term)` as an alternative to :tacn:`pose proof` :n:`@term as @ident`, following the model of :tacn:`pose` :n:`(@ident:=@term)` (`#11522 `_, by Hugo Herbelin). - **Added:** New tactical :tacn:`with_strategy` which behaves like the command :cmd:`Strategy`, with effects local to the given tactic (`#12129 `_, by Jason Gross). - **Added:** The :tacn:`zify` tactic is now aware of `Nat.le`, `Nat.lt` and `Nat.eq` (`#12213 `_, by Frédéric Besson; fixes `#12210 `_). - **Fixed:** :tacn:`zify` now handles :g:`Z.pow_pos` by default. In Coq 8.11, this was the case only when loading module :g:`ZifyPow` because this triggered a regression of :tacn:`lia`. The regression is now fixed, and the module kept only for compatibility (`#11362 `_, fixes `#11191 `_, by Frédéric Besson). - **Fixed:** Efficiency regression of :tacn:`lia` (`#11474 `_, fixes `#11436 `_, by Frédéric Besson). - **Fixed:** The behavior of :tacn:`autounfold` no longer depends on the names of terms and modules (`#11883 `_, fixes `#7812 `_, by Attila Gáspár). - **Fixed:** Wrong type error in tactic :tacn:`functional induction` (`#12326 `_, by Pierre Courtieu, fixes `#11761 `_, reported by Lasse Blaauwbroek). Tactic language ^^^^^^^^^^^^^^^ - **Changed:** The "reference" tactic generic argument now accepts arbitrary variables of the goal context (`#12254 `_, by Pierre-Marie Pédrot). - **Added:** An array library for Ltac2 (as compatible as possible with OCaml standard library) (`#10343 `_, by Michael Soegtrop). - **Added:** The Ltac2 rebinding command :cmd:`Ltac2 Set` has been extended with the ability to give a name to the old value so as to be able to reuse it inside the new one (`#11503 `_, by Pierre-Marie Pédrot). - **Added:** Ltac2 notations for :tacn:`enough` and :tacn:`eenough` (`#11740 `_, by Michael Soegtrop). - **Added:** New Ltac2 function ``Fresh.Free.of_goal`` to return the list of names of declarations of the current goal; new Ltac2 function ``Fresh.in_goal`` to return a variable fresh in the current goal (`#11882 `_, by Hugo Herbelin). - **Added:** Ltac2 notations for reductions in terms: :n:`eval @red_expr in @term` (`#11981 `_, by Michael Soegtrop). - **Fixed:** The :flag:`Ltac Profiling` machinery now correctly handles backtracking into multi-success tactics. The call-counts of some tactics are unfortunately inflated by 1, as some tactics are implicitly implemented as :g:`tac + fail`, which has two entry-points rather than one (fixes `#12196 `_, `#12197 `_, by Jason Gross). SSReflect ^^^^^^^^^ .. _812SSRSearch: - **Changed:** The `Search (ssreflect)` command that used to be available when loading the `ssreflect` plugin has been moved to a separate plugin that needs to be loaded separately: `ssrsearch` (part of `#8855 `_, fixes `#12253 `_, by Théo Zimmermann). - **Deprecated:** `Search (ssreflect)` (available through `Require ssrsearch.`) in favor of the `headconcl:` clause of :cmd:`Search` (part of `#8855 `_, by Théo Zimmermann). Flags, options and attributes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** :term:`Legacy attributes ` can now be passed in any order (`#11665 `_, by Théo Zimmermann). - **Removed:** ``Typeclasses Axioms Are Instances`` flag, deprecated since 8.10. Use :cmd:`Declare Instance` for axioms which should be instances (`#11185 `_, by Théo Zimmermann). - **Removed:** Deprecated unsound compatibility ``Template Check`` flag that was introduced in 8.10 to help users gradually move their template polymorphic inductive type definitions outside sections (`#11546 `_, by Pierre-Marie Pédrot). - **Removed:** Deprecated ``Shrink Obligations`` flag (`#11828 `_, by Emilio Jesus Gallego Arias). - **Removed:** Unqualified ``polymorphic``, ``monomorphic``, ``template``, ``notemplate`` attributes (they were deprecated since Coq 8.10). Use :attr:`universes(polymorphic)`, ``universes(monomorphic)``, :attr:`universes(template)` and ``universes(notemplate)`` instead (`#11663 `_, by Théo Zimmermann). - **Deprecated:** `Hide Obligations` flag (`#11828 `_, by Emilio Jesus Gallego Arias). - **Added:** Handle the :attr:`local` attribute in :cmd:`Canonical Structure` declarations (`#11162 `_, by Enrico Tassi). - **Added:** New attributes supported when defining an inductive type :attr:`universes(cumulative)`, ``universes(noncumulative)`` and :attr:`private(matching)`, which correspond to legacy attributes ``Cumulative``, ``NonCumulative``, and the previously undocumented ``Private`` (`#11665 `_, by Théo Zimmermann). - **Added:** The :ref:`Hint ` commands now accept the :attr:`export` locality as an attribute, allowing to make import-scoped hints (`#11812 `_, by Pierre-Marie Pédrot). - **Added:** :flag:`Cumulative StrictProp` to control cumulativity of |SProp| (`#12034 `_, by Gaëtan Gilbert). Commands ^^^^^^^^ .. _812Coercions: - **Changed:** The :cmd:`Coercion` command has been improved to check the coherence of the inheritance graph. It checks whether a circular inheritance path of `C >-> C` is convertible with the identity function or not, then report it as an ambiguous path if it is not. The new mechanism does not report ambiguous paths that are redundant with others. For example, checking the ambiguity of `[f; g]` and `[f'; g]` is redundant with that of `[f]` and `[f']` thus will not be reported (`#11258 `_, by Kazuhiko Sakaguchi). - **Changed:** Several commands (:cmd:`Search`, :cmd:`About`, ...) now print the implicit arguments in brackets when printing types (`#11795 `_, by Simon Boulier). - **Changed:** The warning when using :cmd:`Require` inside a section moved from the ``deprecated`` category to the ``fragile`` category, because there is no plan to remove the functionality at this time (`#11972 `_, by Gaëtan Gilbert). - **Changed:** :cmd:`Redirect` now obeys the :opt:`Printing Width` and :opt:`Printing Depth` options (`#12358 `_, by Emilio Jesus Gallego Arias). - **Removed:** Recursive OCaml loadpaths are not supported anymore; the command ``Add Rec ML Path`` has been removed; :cmd:`Add ML Path` is now the preferred one. We have also dropped support for the non-qualified version of the :cmd:`Add LoadPath` command, that is to say, the ``Add LoadPath dir`` version; now, you must always specify a prefix now using ``Add Loadpath dir as Prefix`` (`#11618 `_, by Emilio Jesus Gallego Arias). - **Removed:** undocumented ``Chapter`` command. Use :cmd:`Section` instead (`#11746 `_, by Théo Zimmermann). - **Removed:** ``SearchAbout`` command that was deprecated since 8.5. Use :cmd:`Search` instead (`#11944 `_, by Jim Fehrle). - **Deprecated:** Declaration of arbitrary terms as hints. Global references are now preferred (`#7791 `_, by Pierre-Marie Pédrot). - **Deprecated:** `SearchHead` in favor of the new `headconcl:` clause of :cmd:`Search` (part of `#8855 `_, by Théo Zimmermann). - **Added:** :cmd:`Print Canonical Projections` can now take constants as arguments and prints only the unification rules that involve or are synthesized from the given constants (`#10747 `_, by Kazuhiko Sakaguchi). - **Added:** A section variable introduced with :cmd:`Let` can be declared as a :cmd:`Canonical Structure` (`#11164 `_, by Enrico Tassi). - **Added:** Support for universe bindings and universe contrainsts in :cmd:`Let` definitions (`#11534 `_, by Théo Zimmermann). .. _812Search: - **Added:** Support for new clauses `hyp:`, `headhyp:`, `concl:`, `headconcl:`, `head:` and `is:` in :cmd:`Search`. Support for complex search queries combining disjunctions, conjunctions and negations (`#8855 `_, by Hugo Herbelin, with ideas from Cyril Cohen and help from Théo Zimmermann). - **Fixed:** A printing bug in the presence of elimination principles with local definitions (`#12295 `_, by Hugo Herbelin; fixes `#12233 `_). - **Fixed:** Anomalies with :cmd:`Show Proof` (`#12296 `_, by Hugo Herbelin; fixes `#12234 `_). Tools ^^^^^ - **Changed:** Internal options and behavior of ``coqdep``. ``coqdep`` no longer works as a replacement for ``ocamldep``, thus ``.ml`` files are not supported as input. Also, several deprecated options have been removed: ``-w``, ``-D``, ``-mldep``, ``-prefix``, ``-slash``, and ``-dumpbox``. Passing ``-boot`` to ``coqdep`` will not load any path by default now, ``-R/-Q`` should be used instead (`#11523 `_ and `#11589 `_, by Emilio Jesus Gallego Arias). - **Changed:** The order in which the require flags `-ri`, `-re`, `-rfrom`, etc. and the option flags `-set`, `-unset` are given now matters. In particular, it is now possible to interleave the loading of plugins and the setting of options by choosing the right order for these flags. The load flags `-l` and `-lv` are still processed afterward for now (`#11851 `_ and `#12097 `_, by Lasse Blaauwbroek). - **Changed:** The ``cleanall`` target of a makefile generated by ``coq_makefile`` now erases ``.lia.cache`` and ``.nia.cache`` (`#12006 `_, by Olivier Laurent). - **Changed:** The output of ``make TIMED=1`` (and therefore the timing targets such as ``print-pretty-timed`` and ``print-pretty-timed-diff``) now displays the full name of the output file being built, rather than the stem of the rule (which was usually the filename without the extension, but in general could be anything for user-defined rules involving ``%``) (`#12126 `_, by Jason Gross). - **Changed:** When passing ``TIMED=1`` to ``make`` with either Coq's own makefile or a ``coq_makefile``\-made makefile, timing information is now printed for OCaml files as well (`#12211 `_, by Jason Gross). - **Changed:** The pretty-timed scripts and targets now print a newline at the end of their tables, rather than creating text with no trailing newline (`#12368 `_, by Jason Gross). - **Removed:** The `-load-ml-source` and `-load-ml-object` command-line options have been removed; their use was very limited, you can achieve the same adding additional object files in the linking step or using a plugin (`#11409 `_, by Emilio Jesus Gallego Arias). - **Removed:** The confusingly-named `-require` command-line option, which was deprecated since 8.11. Use the equivalent `-require-import` / `-ri` options instead (`#12005 `_, by Théo Zimmermann). - **Deprecated:** ``-cumulative-sprop`` command-line flag in favor of the new :flag:`Cumulative StrictProp` flag (`#12034 `_, by Gaëtan Gilbert). - **Added:** A new documentation environment ``details`` to make certain portion of a Coq document foldable. See :ref:`coqdoc-hide-show` (`#10592 `_, by Thomas Letan). - **Added:** The ``make-both-single-timing-files.py`` script now accepts a ``--fuzz=N`` parameter on the command line which determines how many characters two lines may be offset in the "before" and "after" timing logs while still being considered the same line. When invoking this script via the ``print-pretty-single-time-diff`` target in a ``Makefile`` made by ``coq_makefile``, you can set this argument by passing ``TIMING_FUZZ=N`` to ``make`` (`#11302 `_, by Jason Gross). - **Added:** The ``make-one-time-file.py`` and ``make-both-time-files.py`` scripts now accept a ``--real`` parameter on the command line to print real times rather than user times in the tables. The ``make-both-single-timing-files.py`` script accepts a ``--user`` parameter to use user times. When invoking these scripts via the ``print-pretty-timed`` or ``print-pretty-timed-diff`` or ``print-pretty-single-time-diff`` targets in a ``Makefile`` made by ``coq_makefile``, you can set this argument by passing ``TIMING_REAL=1`` (to pass ``--real``) or ``TIMING_REAL=0`` (to pass ``--user``) to ``make`` (`#11302 `_, by Jason Gross). - **Added:** Coq's build system now supports both ``TIMING_FUZZ``, ``TIMING_SORT_BY``, and ``TIMING_REAL`` just like a ``Makefile`` made by ``coq_makefile`` (`#11302 `_, by Jason Gross). - **Added:** The ``make-one-time-file.py`` and ``make-both-time-files.py`` scripts now include peak memory usage information in the tables (can be turned off by the ``--no-include-mem`` command-line parameter), and a ``--sort-by-mem`` parameter to sort the tables by memory rather than time. When invoking these scripts via the ``print-pretty-timed`` or ``print-pretty-timed-diff`` targets in a ``Makefile`` made by ``coq_makefile``, you can set this argument by passing ``TIMING_INCLUDE_MEM=0`` (to pass ``--no-include-mem``) and ``TIMING_SORT_BY_MEM=1`` (to pass ``--sort-by-mem``) to ``make`` (`#11606 `_, by Jason Gross). - **Added:** Coq's build system now supports both ``TIMING_INCLUDE_MEM`` and ``TIMING_SORT_BY_MEM`` just like a ``Makefile`` made by ``coq_makefile`` (`#11606 `_, by Jason Gross). - **Added:** New ``coqc`` / ``coqtop`` option ``-boot`` that will not bind the `Coq` library prefix by default (`#11617 `_, by Emilio Jesus Gallego Arias). - **Added:** Definitions in coqdoc link to themselves, giving access in html to their own url (`#12026 `_, by Hugo Herbelin; granting `#7093 `_). - **Added:** Hyperlinks on bound variables in coqdoc (`#12033 `_, by Hugo Herbelin; it incidentally fixes `#7697 `_). - **Added:** Highlighting of link targets in coqdoc (`#12091 `_, by Hugo Herbelin). - **Fixed:** The various timing targets for Coq's standard library now correctly display and label the "before" and "after" columns, rather than mixing them up (`#11302 `_ fixes `#11301 `_, by Jason Gross). - **Fixed:** The sorting order of the timing script ``make-both-time-files.py`` and the target ``print-pretty-timed-diff`` is now deterministic even when the sorting order is ``absolute`` or ``diff``; previously the relative ordering of two files with identical times was non-deterministic (`#11606 `_, by Jason Gross). - **Fixed:** Fields of a record tuple now link in coqdoc to their definition (`#12027 `_, fixes `#3415 `_, by Hugo Herbelin). - **Fixed:** ``coqdoc`` now reports the location of a mismatched opening ``[[`` instead of throwing an uninformative exception (`#12037 `_, fixes `#9670 `_, by Xia Li-yao). - **Fixed:** coqchk incorrectly reporting names from opaque modules as axioms (`#12076 `_, by Pierre Roux; fixes `#5030 `_). - **Fixed:** coq_makefile-generated ``Makefile``\s ``pretty-timed-diff`` target no longer raises Python exceptions in the rare corner case where the log of times contains no files (`#12388 `_, fixes `#12387 `_, by Jason Gross). CoqIDE ^^^^^^^^ - **Removed:** "Tactic" menu from CoqIDE which had been unmaintained for a number of years (`#11414 `_, by Pierre-Marie Pédrot). - **Removed:** "Revert all buffers" command from CoqIDE which had been broken for a long time (`#11415 `_, by Pierre-Marie Pédrot). .. _812Stdlib: Standard library ^^^^^^^^^^^^^^^^ - **Changed:** Notations :n:`[|@term|]` and :n:`[||@term||]` for morphisms from 63-bit integers to :g:`Z` and :g:`zn2z int` have been removed in favor of :n:`φ(@term)` and :n:`Φ(@term)` respectively. These notations were breaking Ltac parsing (`#11686 `_, by Maxime Dénès). - **Changed:** The names of ``Sorted_sort`` and ``LocallySorted_sort`` in ``Coq.Sorting.MergeSort`` have been swapped to appropriately reflect their meanings (`#11885 `_, by Lysxia). - **Changed:** Notations :g:`<=?` and :g:``_, `#11891 `_, by Jason Gross). - **Changed:** The level of :g:`≡` in ``Coq.Numbers.Cyclic.Int63.Int63`` is now 70, no associativity, in line with :g:`=`. Note that this is a minor incompatibility with developments that declare their own :g:`≡` notation and import ``Int63`` (fixes `#11905 `_, `#11909 `_, by Jason Gross). - **Changed:** No longer re-export ``ListNotations`` from ``Program`` (``Program.Syntax``) (`#11992 `_, by Antonio Nikishaev). - **Changed:** It is now possible to import the :g:`nsatz` machinery without transitively depending on the axioms of the real numbers nor of classical logic by loading ``Coq.nsatz.NsatzTactic`` rather than ``Coq.nsatz.Nsatz``. Note that some constants have changed kernel names, living in ``Coq.nsatz.NsatzTactic`` rather than ``Coq.nsatz.Nsatz``; this might cause minor incompatibilities that can be fixed by actually running :g:`Import Nsatz` rather than relying on absolute names (`#12073 `_, by Jason Gross; fixes `#5445 `_). - **Changed:** new lemma ``NoDup_incl_NoDup`` in ``List.v`` to remove useless hypothesis `NoDup l'` in ``Sorting.Permutation.NoDup_Permutation_bis`` (`#12120 `_, by Olivier Laurent). - **Changed:** :cmd:`Fixpoints ` of the standard library without a recursive call turned into ordinary :cmd:`Definitions ` (`#12121 `_, by Hugo Herbelin; fixes `#11903 `_). - **Deprecated:** ``Bool.leb`` in favor of ``Bool.le``. The definition of ``Bool.le`` is made local to avoid conflicts with ``Nat.le``. As a consequence, previous calls to ``leb`` based on importing ``Bool`` should now be qualified into ``Bool.le`` even if ``Bool`` is imported (`#12162 `_, by Olivier Laurent). - **Added:** Theorem :g:`bezout_comm` for natural numbers (`#11127 `_, by Daniel de Rauglaudre). - **Added** :g:`rew dependent` notations for the dependent version of :g:`rew` in :g:`Coq.Init.Logic.EqNotations` to improve the display and parsing of :g:`match` statements on :g:`Logic.eq` (`#11240 `_, by Jason Gross). - **Added:** Lemmas about lists: - properties of ``In``: ``in_elt``, ``in_elt_inv`` - properties of ``nth``: ``app_nth2_plus``, ``nth_middle``, ``nth_ext`` - properties of ``last``: ``last_last``, ``removelast_last`` - properties of ``remove``: ``remove_cons``, ``remove_app``, ``notin_remove``, ``in_remove``, ``in_in_remove``, ``remove_remove_comm``, ``remove_remove_eq``, ``remove_length_le``, ``remove_length_lt`` - properties of ``concat``: ``in_concat``, ``remove_concat`` - properties of ``map`` and ``flat_map``: ``map_last``, ``map_eq_cons``, ``map_eq_app``, ``flat_map_app``, ``flat_map_ext``, ``nth_nth_nth_map`` - properties of ``incl``: ``incl_nil_l``, ``incl_l_nil``, ``incl_cons_inv``, ``incl_app_app``, ``incl_app_inv``, ``remove_incl``, ``incl_map``, ``incl_filter``, ``incl_Forall_in_iff`` - properties of ``NoDup`` and ``nodup``: ``NoDup_rev``, ``NoDup_filter``, ``nodup_incl`` - properties of ``Exists`` and ``Forall``: ``Exists_nth``, ``Exists_app``, ``Exists_rev``, ``Exists_fold_right``, ``incl_Exists``, ``Forall_nth``, ``Forall_app``, ``Forall_elt``, ``Forall_rev``, ``Forall_fold_right``, ``incl_Forall``, ``map_ext_Forall``, ``Exists_or``, ``Exists_or_inv``, ``Forall_and``, ``Forall_and_inv``, ``exists_Forall``, ``Forall_image``, ``concat_nil_Forall``, ``in_flat_map_Exists``, ``notin_flat_map_Forall`` - properties of ``repeat``: ``repeat_cons``, ``repeat_to_concat`` - definitions and properties of ``list_sum`` and ``list_max``: ``list_sum_app``, ``list_max_app``, ``list_max_le``, ``list_max_lt`` - misc: ``elt_eq_unit``, ``last_length``, ``rev_eq_app``, ``removelast_firstn_len``, ``cons_seq``, ``seq_S`` (`#11249 `_, `#12237 `_, by Olivier Laurent). - **Added:** Well-founded induction principles for `nat`: ``lt_wf_rect1``, ``lt_wf_rect``, ``gt_wf_rect``, ``lt_wf_double_rect`` (`#11335 `_, by Olivier Laurent). - **Added:** ``remove'`` and ``count_occ'`` over lists, alternatives to ``remove`` and ``count_occ`` based on ``filter`` (`#11350 `_, by Yishuai Li). - **Added:** Facts about ``N.iter`` and ``Pos.iter``: - ``N.iter_swap_gen``, ``N.iter_swap``, ``N.iter_succ``, ``N.iter_succ_r``, ``N.iter_add``, ``N.iter_ind``, ``N.iter_invariant``; - ``Pos.iter_succ_r``, ``Pos.iter_ind``. (`#11880 `_, by Lysxia). - **Added:** Facts about ``Permutation``: - structure: ``Permutation_refl'``, ``Permutation_morph_transp`` - compatibilities: ``Permutation_app_rot``, ``Permutation_app_swap_app``, ``Permutation_app_middle``, ``Permutation_middle2``, ``Permutation_elt``, ``Permutation_Forall``, ``Permutation_Exists``, ``Permutation_Forall2``, ``Permutation_flat_map``, ``Permutation_list_sum``, ``Permutation_list_max`` - inversions: ``Permutation_app_inv_m``, ``Permutation_vs_elt_inv``, ``Permutation_vs_cons_inv``, ``Permutation_vs_cons_cons_inv``, ``Permutation_map_inv``, ``Permutation_image``, ``Permutation_elt_map_inv`` - length-preserving definition by means of transpositions ``Permutation_transp`` with associated properties: ``Permutation_transp_sym``, ``Permutation_transp_equiv``, ``Permutation_transp_cons``, ``Permutation_Permutation_transp``, ``Permutation_ind_transp`` (`#11946 `_, by Olivier Laurent). - **Added:** Notations for sigma types: ``{ x & P & Q }``, ``{ ' pat & P }``, ``{ ' pat & P & Q }`` (`#11957 `_, by Olivier Laurent). - **Added:** Order relations ``lt`` and ``compare`` added in ``Bool.Bool``. Order properties for ``bool`` added in ``Bool.BoolOrder`` as well as two modules ``Bool_as_OT`` and ``Bool_as_DT`` in ``Structures.OrdersEx`` (`#12008 `_, by Olivier Laurent). - **Added:** Properties of some operations on vectors: - ``nth_order``: ``nth_order_hd``, ``nth_order_tl``, ``nth_order_ext`` - ``replace``: ``nth_order_replace_eq``, ``nth_order_replace_neq``, ``replace_id``, ``replace_replace_eq``, ``replace_replace_neq`` - ``map``: ``map_id``, ``map_map``, ``map_ext_in``, ``map_ext`` - ``Forall`` and ``Forall2``: ``Forall_impl``, ``Forall_forall``, ``Forall_nth_order``, ``Forall2_nth_order`` (`#12014 `_, by Olivier Laurent). - **Added:** Lemmas :g:`orb_negb_l`, :g:`andb_negb_l`, :g:`implb_true_iff`, :g:`implb_false_iff`, :g:`implb_true_r`, :g:`implb_false_r`, :g:`implb_true_l`, :g:`implb_false_l`, :g:`implb_same`, :g:`implb_contrapositive`, :g:`implb_negb`, :g:`implb_curry`, :g:`implb_andb_distrib_r`, :g:`implb_orb_distrib_r`, :g:`implb_orb_distrib_l` in library :g:`Bool` (`#12018 `_, by Hugo Herbelin). - **Added:** Definition and properties of cyclic permutations / circular shifts: ``CPermutation`` (`#12031 `_, by Olivier Laurent). - **Added:** ``Structures.OrderedTypeEx.Ascii_as_OT`` (`#12044 `_, by formalize.eth (formalize@protonmail.com)). - **Fixed:** Rewrote ``Structures.OrderedTypeEx.String_as_OT.compare`` to avoid huge proof terms (`#12044 `_, by formalize.eth (formalize@protonmail.com); fixes `#12015 `_). Reals library ^^^^^^^^^^^^^ - **Changed:** Cleanup of names in the Reals theory: replaced `tan_is_inj` with `tan_inj` and replaced `atan_right_inv` with `tan_atan` - compatibility notations are provided. Moved various auxiliary lemmas from `Ratan.v` to more appropriate places (`#9803 `_, by Laurent Théry and Michael Soegtrop). - **Changed:** Replace `CRzero` and `CRone` by `CR_of_Q 0` and `CR_of_Q 1` in `ConstructiveReals`. Use implicit arguments for `ConstructiveReals`. Move `ConstructiveReals` into new directory `Abstract`. Remove imports of implementations inside those `Abstract` files. Move implementation by means of Cauchy sequences in new directory `Cauchy`. Split files `ConstructiveMinMax` and `ConstructivePower`. .. warning:: The constructive reals modules are marked as experimental. (`#11725 `_, `#12287 `_ and `#12288 `_, by Vincent Semeria). - **Removed:** Type `RList` has been removed. All uses have been replaced by `list R`. Functions from `RList` named `In`, `Rlength`, `cons_Rlist`, `app_Rlist` have also been removed as they are essentially the same as `In`, `length`, `app`, and `map` from `List`, modulo the following changes: - `RList.In x (RList.cons a l)` used to be convertible to `(x = a) \\/ RList.In x l`, but `List.In x (a :: l)` is convertible to `(a = x) \\/ List.In l`. The equality is reversed. - `app_Rlist` and `List.map` take arguments in different order. (`#11404 `_, by Yves Bertot). - **Added:** inverse trigonometric functions `asin` and `acos` with lemmas for the derivatives, bounds and special values of these functions; an extensive set of identities between trigonometric functions and their inverse functions; lemmas for the injectivity of sine and cosine; lemmas on the derivative of the inverse of decreasing functions and on the derivative of horizontally mirrored functions; various generic auxiliary lemmas and definitions for `Rsqr`, `sqrt`, `posreal` and others (`#9803 `_, by Laurent Théry and Michael Soegtrop). Extraction ^^^^^^^^^^ - **Added:** Support for better extraction of strings in OCaml and Haskell: `ExtOcamlNativeString` provides bindings from the Coq `String` type to the OCaml `string` type, and string literals can be extracted to literals, both in OCaml and Haskell (`#10486 `_, by Xavier Leroy, with help from Maxime Dénès, review by Hugo Herbelin). - **Fixed:** In Haskell extraction with ``ExtrHaskellString``, equality comparisons on strings and characters are now guaranteed to be uniquely well-typed, even in very polymorphic contexts under ``unsafeCoerce``; this is achieved by adding type annotations to the extracted code, and by making ``ExtrHaskellString`` export ``ExtrHaskellBasic`` (`#12263 `_, by Jason Gross, fixes `#12257 `_ and `#12258 `_). .. _812Refman: Reference manual ^^^^^^^^^^^^^^^^ - **Changed:** The reference manual has been restructured to get a more logical organization. In the new version, there are fewer top-level chapters, and, in the HTML format, chapters are split into smaller pages. This is still a work in progress and further restructuring is expected in the next versions of Coq (`CEP#43 `_, implemented in `#11601 `_, `#11871 `_, `#11914 `_, `#12148 `_, `#12172 `_, `#12239 `_ and `#12330 `_, effort inspired by Matthieu Sozeau, led by Théo Zimmermann, with help and reviews of Jim Fehrle, Clément Pit-Claudel and others). - **Changed:** Most of the grammar is now presented using the notation mechanism that has been used to present commands and tactics since Coq 8.8 and which is documented in :ref:`syntax-conventions` (`#11183 `_, `#11314 `_, `#11423 `_, `#11705 `_, `#11718 `_, `#11720 `_, `#11961 `_ and `#12103 `_, by Jim Fehrle, reviewed by Théo Zimmermann). - **Added:** A glossary of terms and an index of attributes (`#11869 `_, `#12150 `_ and `#12224 `_, by Jim Fehrle and Théo Zimmermann, reviewed by Clément Pit-Claudel) - **Added:** A selector that allows switching between versions of the reference manual (`#12286 `_, by Clément Pit-Claudel). - **Fixed:** Most of the documented syntax has been thoroughly updated to make it accurate and easily understood. This was done using a semi-automated `doc_grammar` tool introduced for this purpose and through significant revisions to the text (`#9884 `_, `#10614 `_, `#11314 `_, `#11423 `_, `#11705 `_, `#11718 `_, `#11720 `_ `#11797 `_, `#11913 `_, `#11958 `_, `#11960 `_, `#11961 `_ and `#12103 `_, by Jim Fehrle, reviewed by Théo Zimmermann and Jason Gross). Infrastructure and dependencies ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - **Changed:** Minimal versions of dependencies for building the reference manual: now requires Sphinx >= 2.3.1 & < 3.0.0, sphinx_rtd_theme 0.4.3+ and sphinxcontrib-bibtex 0.4.2+. .. warning:: The reference manual is known not to build properly with Sphinx 3. (`#12224 `_, by Jim Fehrle and Théo Zimmermann). - **Removed:** Python 2 is no longer required in any part of the codebase (`#11245 `_, by Emilio Jesus Gallego Arias). Changes in 8.12.0 ~~~~~~~~~~~~~~~~~~~~~ **Notations** - **Added:** Simultaneous definition of terms and notations now support custom entries. Fixes `#11121 `_. (`#12523 `_, by Maxime Dénès). - **Fixed:** Printing bug with notations for n-ary applications used with applied references. (`#12683 `_, fixes `#12682 `_, by Hugo Herbelin). **Tactics** - **Fixed:** :tacn:`typeclasses eauto` (and discriminated hint bases) now correctly classify local variables as being unfoldable (`#12572 `_, fixes `#12571 `_, by Pierre-Marie Pédrot). **Tactic language** - **Fixed:** Excluding occurrences was causing an anomaly in tactics (e.g., :g:`pattern _ at L` where :g:`L` is :g:`-2`). (`#12541 `_, fixes `#12228 `_, by Pierre Roux). - **Fixed:** Parsing of multi-parameters Ltac2 types (`#12594 `_, fixes `#12595 `_, by Pierre-Marie Pédrot). **SSReflect** - **Fixed:** Do not store the full environment inside ssr ast_closure_term (`#12708 `_, fixes `#12707 `_, by Pierre-Marie Pédrot). **Commands and options** - **Fixed:** Properly report the mismatched magic number of vo files (`#12677 `_, fixes `#12513 `_, by Pierre-Marie Pédrot). - **Changed:** Arbitrary hints have been undeprecated, and their definition now triggers a standard warning instead (`#12678 `_, fixes `#11970 `_, by Pierre-Marie Pédrot). **CoqIDE** - **Fixed:** CoqIDE no longer exits when trying to open a file whose name is not a valid identifier (`#12562 `_, fixes `#10988 `_, by Vincent Laporte). **Infrastructure and dependencies** - **Fixed:** Running ``make`` in ``test-suite/`` twice (or more) in a row will no longer rebuild the ``modules/`` tests on subsequent runs, if they have not been modified in the meantime (`#12583 `_, fixes `#12582 `_, by Jason Gross). Changes in 8.12.1 ~~~~~~~~~~~~~~~~~~~~~ **Kernel** - **Fixed:** Incompleteness of conversion checking on problems involving :ref:`eta-expansion-sect` and :ref:`cumulative universe polymorphic inductive types ` (`#12738 `_, fixes `#7015 `_, by Gaëtan Gilbert). - **Fixed:** Polymorphic side-effects inside monomorphic definitions were incorrectly handled as not inlined. This allowed deriving an inconsistency (`#13331 `_, fixes `#13330 `_, by Pierre-Marie Pédrot). **Notations** - **Fixed:** Undetected collision between a lonely notation and a notation in scope at printing time (`#12946 `_, fixes the first part of `#12908 `_, by Hugo Herbelin). - **Fixed:** Printing of notations in custom entries with variables not mentioning an explicit level (`#13026 `_, fixes `#12775 `_ and `#13018 `_, by Hugo Herbelin). **Tactics** - **Added:** :tacn:`replace` and :tacn:`inversion` support registration of a :g:`core.identity`\-like equality in :g:`Type`, such as HoTT's :g:`path` (`#12847 `_, partially fixes `#12846 `_, by Hugo Herbelin). - **Fixed:** Anomaly with :tacn:`injection` involving artificial dependencies disappearing by reduction (`#12816 `_, fixes `#12787 `_, by Hugo Herbelin). **Tactic language** - **Fixed:** Miscellaneous issues with locating tactic errors (`#13247 `_, fixes `#12773 `_ and `#12992 `_, by Hugo Herbelin). **SSReflect** - **Fixed:** Regression in error reporting after :tacn:`case `. A generic error message "Could not fill dependent hole in apply" was reported for any error following :tacn:`case ` or :tacn:`elim ` (`#12857 `_, fixes `#12837 `_, by Enrico Tassi). **Commands and options** - **Fixed:** Failures of :cmd:`Search` in the presence of primitive projections (`#13301 `_, fixes `#13298 `_, by Hugo Herbelin). - **Fixed:** :cmd:`Search` supports filtering on parts of identifiers which are not proper identifiers themselves, such as :n:`"1"` (`#13351 `_, fixes `#13349 `_, by Hugo Herbelin). **Tools** - **Fixed:** Special symbols now escaped in the index produced by coqdoc, avoiding collision with the syntax of the output format (`#12754 `_, fixes `#12752 `_, by Hugo Herbelin). - **Fixed:** The `details` environment added in the 8.12 release can now be used as advertised in the reference manual (`#12772 `_, by Thomas Letan). - **Fixed:** Targets such as ``print-pretty-timed`` in ``coq_makefile``\-made ``Makefile``\s no longer error in rare cases where ``--output-sync`` is not passed to make and the timing output gets interleaved in just the wrong way (`#13063 `_, fixes `#13062 `_, by Jason Gross). **CoqIDE** - **Fixed:** View menu "Display parentheses" (`#12794 `_ and `#13067 `_, fixes `#12793 `_, by Jean-Christophe Léchenet and Hugo Herbelin). **Infrastructure and dependencies** - **Added:** Coq is now tested against OCaml 4.11.1 (`#12972 `_, by Emilio Jesus Gallego Arias). - **Fixed:** The reference manual can now build with Sphinx 3 (`#13011 `_, fixes `#12332 `_, by Théo Zimmermann and Jim Fehrle). Changes in 8.12.2 ~~~~~~~~~~~~~~~~~ **Notations** - **Fixed:** 8.12 regression causing notations mentioning a coercion to be ignored (`#13436 `_, fixes `#13432 `_, by Hugo Herbelin). **Tactics** - **Fixed:** 8.12 regression: incomplete inference of implicit arguments in :tacn:`exists` (`#13468 `_, fixes `#13456 `_, by Hugo Herbelin). Version 8.11 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ The main changes brought by Coq version 8.11 are: - :ref:`Ltac2<811Ltac2>`, a new tactic language for writing more robust larger scale tactics, with built-in support for datatypes and the multi-goal tactic monad. - :ref:`Primitive floats<811PrimitiveFloats>` are integrated in terms and follow the binary64 format of the IEEE 754 standard, as specified in the `Coq.Float.Floats` library. - :ref:`Cleanups<811Sections>` of the section mechanism, delayed proofs and further restrictions of template polymorphism to fix soundness issues related to universes. - New :ref:`unsafe flags<811UnsafeFlags>` to disable locally guard, positivity and universe checking. Reliance on these flags is always printed by :g:`Print Assumptions`. - :ref:`Fixed bugs<811ExportBug>` of :g:`Export` and :g:`Import` that can have a significant impact on user developments (**common source of incompatibility!**). - New interactive development method based on `vos` :ref:`interface files<811vos>`, allowing to work on a file without recompiling the proof parts of their dependencies. - New :g:`Arguments` annotation for :ref:`bidirectional type inference<811BidirArguments>` configuration for reference (e.g. constants, inductive) applications. - New :ref:`refine attribute<811RefineInstance>` for :cmd:`Instance` can be used instead of the removed ``Refine Instance Mode``. - Generalization of the :g:`under` and :g:`over` :ref:`tactics<811SSRUnderOver>` of SSReflect to arbitrary relations. - :ref:`Revision<811Reals>` of the :g:`Coq.Reals` library, its axiomatisation and instances of the constructive and classical real numbers. Additionally, while the `omega` tactic is not yet deprecated in this version of Coq, it should soon be the case and we already recommend users to switch to :tacn:`lia` in new proof scripts. The ``dev/doc/critical-bugs`` file documents the known critical bugs of Coq and affected releases. See the `Changes in 8.11+beta1`_ section and following sections for the detailed list of changes, including potentially breaking changes marked with **Changed**. Coq's documentation is available at https://coq.github.io/doc/v8.11/api (documentation of the ML API), https://coq.github.io/doc/v8.11/refman (reference manual), and https://coq.github.io/doc/v8.11/stdlib (documentation of the standard library). Maxime Dénès, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop and Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Karl Palmskog, Matthieu Sozeau and Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The 61 contributors to this version are Michael D. Adams, Guillaume Allais, Helge Bahmann, Langston Barrett, Guillaume Bertholon, Frédéric Besson, Simon Boulier, Michele Caci, Tej Chajed, Arthur Charguéraud, Cyril Cohen, Frédéric Dabrowski, Arthur Azevedo de Amorim, Maxime Dénès, Nikita Eshkeev, Jim Fehrle, Emilio Jesús Gallego Arias, Paolo G. Giarrusso, Gaëtan Gilbert, Georges Gonthier, Jason Gross, Samuel Gruetter, Armaël Guéneau, Hugo Herbelin, Florent Hivert, Jasper Hugunin, Shachar Itzhaky, Jan-Oliver Kaiser, Robbert Krebbers, Vincent Laporte, Olivier Laurent, Samuel Lelièvre, Nicholas Lewycky, Yishuai Li, Jose Fernando Lopez Fernandez, Andreas Lynge, Kenji Maillard, Erik Martin-Dorel, Guillaume Melquiond, Alexandre Moine, Oliver Nash, Wojciech Nawrocki, Antonio Nikishaev, Pierre-Marie Pédrot, Clément Pit-Claudel, Lars Rasmusson, Robert Rand, Talia Ringer, JP Rodi, Pierre Roux, Kazuhiko Sakaguchi, Vincent Semeria, Michael Soegtrop, Matthieu Sozeau, spanjel, Claude Stolze, Enrico Tassi, Laurent Théry, James R. Wilcox, Xia Li-yao, Théo Zimmermann Many power users helped to improve the design of the new features via the issue and pull request system, the Coq development mailing list, the coq-club@inria.fr mailing list or the `Discourse forum `_. It would be impossible to mention exhaustively the names of everybody who to some extent influenced the development. Version 8.11 is the sixth release of Coq developed on a time-based development cycle. Its development spanned 3 months from the release of Coq 8.10. Pierre-Marie Pédrot is the release manager and maintainer of this release, assisted by Matthieu Sozeau. This release is the result of 2000+ commits and 300+ PRs merged, closing 75+ issues. | Paris, November 2019, | Matthieu Sozeau for the Coq development team | Changes in 8.11+beta1 ~~~~~~~~~~~~~~~~~~~~~ **Kernel** .. _811PrimitiveFloats: - **Added:** A built-in support of floating-point arithmetic, allowing one to devise efficient reflection tactics involving numerical computation. Primitive floats are added in the language of terms, following the binary64 format of the IEEE 754 standard, and the related operations are implemented for the different reduction engines of Coq by using the corresponding processor operators in rounding-to-nearest-even. The properties of these operators are axiomatized in the theory :g:`Coq.Floats.FloatAxioms` which is part of the library :g:`Coq.Floats.Floats`. See Section :ref:`primitive-floats` (`#9867 `_, closes `#8276 `_, by Guillaume Bertholon, Erik Martin-Dorel, Pierre Roux). - **Changed:** Internal definitions generated by :tacn:`abstract`\-like tactics are now inlined inside universe :cmd:`Qed`\-terminated polymorphic definitions, similarly to what happens for their monomorphic counterparts, (`#10439 `_, by Pierre-Marie Pédrot). .. _811Sections: - **Fixed:** Section data is now part of the kernel. Solves a soundness issue in interactive mode where global monomorphic universe constraints would be dropped when forcing a delayed opaque proof inside a polymorphic section. Also relaxes the nesting criterion for sections, as polymorphic sections can now appear inside a monomorphic one (`#10664 `_, by Pierre-Marie Pédrot). - **Changed:** Using ``SProp`` is now allowed by default, without needing to pass ``-allow-sprop`` or use :flag:`Allow StrictProp` (`#10811 `_, by Gaëtan Gilbert). **Specification language, type inference** .. _811BidirArguments: - **Added:** Annotation in `Arguments` for bidirectionality hints: it is now possible to tell type inference to use type information from the context once the `n` first arguments of an application are known. The syntax is: `Arguments foo x y & z`. See :ref:`bidirectionality_hints` (`#10049 `_, by Maxime Dénès with help from Enrico Tassi). - **Added:** Record fields can be annotated to prevent them from being used as canonical projections; see :ref:`canonicalstructures` for details (`#10076 `_, by Vincent Laporte). - **Changed:** Require parentheses around nested disjunctive patterns, so that pattern and term syntax are consistent; match branch patterns no longer require parentheses for notation at level 100 or more. .. warning:: Incompatibilities + In :g:`match p with (_, (0|1)) => ...` parentheses may no longer be omitted around :n:`0|1`. + Notation :g:`(p | q)` now potentially clashes with core pattern syntax, and should be avoided. ``-w disj-pattern-notation`` flags such :cmd:`Notation`. See :ref:`extendedpatternmatching` for details (`#10167 `_, by Georges Gonthier). - **Changed:** :cmd:`Function` always opens a proof when used with a ``measure`` or ``wf`` annotation, see :ref:`advanced-recursive-functions` for the updated documentation (`#10215 `_, by Enrico Tassi). - **Changed:** The legacy command :cmd:`Add Morphism` always opens a proof and cannot be used inside a module type. In order to declare a module type parameter that happens to be a morphism, use :cmd:`Declare Morphism`. See :ref:`deprecated_syntax_for_generalized_rewriting` for the updated documentation (`#10215 `_, by Enrico Tassi). - **Changed:** The universe polymorphism setting now applies from the opening of a section. In particular, it is not possible anymore to mix polymorphic and monomorphic definitions in a section when there are no variables nor universe constraints defined in this section. This makes the behavior consistent with the documentation. (`#10441 `_, by Pierre-Marie Pédrot) - **Added:** The :cmd:`Section` command now accepts the "universes" attribute. In addition to setting the section universe polymorphism, it also locally sets the universe polymorphic option inside the section. (`#10441 `_, by Pierre-Marie Pédrot) - **Fixed:** ``Program Fixpoint`` now uses ``ex`` and ``sig`` to make telescopes involving ``Prop`` types (`#10758 `_, by Gaëtan Gilbert, fixing `#10757 `_ reported by Xavier Leroy). - **Changed:** Output of the :cmd:`Print` and :cmd:`About` commands. Arguments meta-data is now displayed as the corresponding :cmd:`Arguments` command instead of the human-targeted prose used in previous Coq versions. (`#10985 `_, by Gaëtan Gilbert). .. _811RefineInstance: - **Added:** :attr:`refine` attribute for :cmd:`Instance`, a more predictable version of the old ``Refine Instance Mode`` which unconditionally opens a proof (`#10996 `_, by Gaëtan Gilbert). - **Changed:** The unsupported attribute error is now an error-by-default warning, meaning it can be disabled (`#10997 `_, by Gaëtan Gilbert). - **Fixed:** Bugs sometimes preventing to define valid (co)fixpoints with implicit arguments in the presence of local definitions, see `#3282 `_ (`#11132 `_, by Hugo Herbelin). .. example:: The following features an implicit argument after a local definition. It was wrongly rejected. .. coqtop:: in Definition f := fix f (o := true) {n : nat} m {struct m} := match m with 0 => 0 | S m' => f (n:=n+1) m' end. **Notations** - **Added:** Numeral Notations now support sorts in the input to printing functions (e.g., numeral notations can be defined for terms containing things like `@cons Set nat nil`). (`#9883 `_, by Jason Gross). - **Added:** The :cmd:`Notation` and :cmd:`Infix` commands now support the `deprecated` attribute (`#10180 `_, by Maxime Dénès). - **Deprecated:** The former `compat` annotation for notations is deprecated, and its semantics changed. It is now made equivalent to using a `deprecated` attribute, and is no longer connected with the `-compat` command-line flag (`#10180 `_, by Maxime Dénès). - **Changed:** A simplification of parsing rules could cause a slight change of parsing precedences for the very rare users who defined notations with `constr` at level strictly between 100 and 200 and used these notations on the right-hand side of a cast operator (`:`, `<:`, `<<:`) (`#10963 `_, by Théo Zimmermann, simplification initially noticed by Jim Fehrle). **Tactics** - **Added:** Syntax :n:`injection @term as [= {+ @intropattern} ]` as an alternative to :n:`injection @term as {+ @simple_intropattern}` using the standard injection intropattern syntax (`#9288 `_, by Hugo Herbelin). - **Changed:** Reimplementation of the :tacn:`zify` tactic. The tactic is more efficient and copes with dependent hypotheses. It can also be extended by redefining the tactic ``zify_post_hook``. (`#9856 `_, fixes `#8898 `_, `#7886 `_, `#9848 `_ and `#5155 `_, by Frédéric Besson). - **Changed:** The goal selector tactical ``only`` now checks that the goal range it is given is valid instead of ignoring goals out of the focus range (`#10318 `_, by Gaëtan Gilbert). - **Added:** Flags :flag:`Lia Cache`, :flag:`Nia Cache` and :flag:`Nra Cache`. (`#10765 `_, by Frédéric Besson, see `#10772 `_ for use case). - **Added:** The :tacn:`zify` tactic is now aware of `Z.to_N`. (`#10774 `_, grants `#9162 `_, by Kazuhiko Sakaguchi). - **Changed:** The :tacn:`assert_succeeds` and :tacn:`assert_fails` tactics now only run their tactic argument once, even if it has multiple successes. This prevents blow-up and looping from using multisuccess tactics with :tacn:`assert_succeeds`. (`#10966 `_ fixes `#10965 `_, by Jason Gross). - **Fixed:** The :tacn:`assert_succeeds` and :tacn:`assert_fails` tactics now behave correctly when their tactic fully solves the goal. (`#10966 `_ fixes `#9114 `_, by Jason Gross). **Tactic language** .. _811Ltac2: - **Added:** Ltac2, a new version of the tactic language Ltac, that doesn't preserve backward compatibility, has been integrated in the main Coq distribution. It is still experimental, but we already recommend users of advanced Ltac to start using it and report bugs or request enhancements. See its documentation in the :ref:`dedicated chapter ` (`#10002 `_, plugin authored by Pierre-Marie Pédrot, with contributions by various users, integration by Maxime Dénès, help on integrating / improving the documentation by Théo Zimmermann and Jim Fehrle). - **Added:** Ltac2 tactic notations with “constr” arguments can specify the notation scope for these arguments; see :ref:`ltac2_notations` for details (`#10289 `_, by Vincent Laporte). - **Changed:** White spaces are forbidden in the :n:`&@ident` syntax for ltac2 references that are described in :ref:`ltac2_built-in-quotations` (`#10324 `_, fixes `#10088 `_, authored by Pierre-Marie Pédrot). **SSReflect** .. _811SSRUnderOver: - **Added:** Generalize tactics :tacn:`under` and :tacn:`over` for any registered relation. More precisely, assume the given context lemma has type `forall f1 f2, .. -> (forall i, R1 (f1 i) (f2 i)) -> R2 f1 f2`. The first step performed by :tacn:`under` (since Coq 8.10) amounts to calling the tactic :tacn:`rewrite `, which itself relies on :tacn:`setoid_rewrite` if need be. So this step was already compatible with a double implication or setoid equality for the conclusion head symbol `R2`. But a further step consists in tagging the generated subgoal `R1 (f1 i) (?f2 i)` to protect it from unwanted evar instantiation, and get `Under_rel _ R1 (f1 i) (?f2 i)` that is displayed as ``'Under[ f1 i ]``. In Coq 8.10, this second (convenience) step was only performed when `R1` was Leibniz' `eq` or `iff`. Now, it is also performed for any relation `R1` which has a ``RewriteRelation`` instance (a `RelationClasses.Reflexive` instance being also needed so :tacn:`over` can discharge the ``'Under[ _ ]`` goal by instantiating the hidden evar.) This feature generalizing support for setoid-like relations is enabled as soon as we do both ``Require Import ssreflect.`` and ``Require Setoid.`` Finally, a rewrite rule ``UnderE`` has been added if one wants to "unprotect" the evar, and instantiate it manually with another rule than reflexivity (i.e., without using the :tacn:`over` tactic nor the ``over`` rewrite rule). See also Section :ref:`under_ssr` (`#10022 `_, by Erik Martin-Dorel, with suggestions and review by Enrico Tassi and Cyril Cohen). - **Added:** A :g:`void` notation for the standard library empty type (:g:`Empty_set`) (`#10932 `_, by Arthur Azevedo de Amorim). - **Added:** Lemma :g:`inj_compr` to :g:`ssr.ssrfun` (`#11136 `_, by Cyril Cohen). **Commands and options** - **Removed:** Deprecated flag `Refine Instance Mode` (`#9530 `_, fixes `#3632 `_, `#3890 `_ and `#4638 `_ by Maxime Dénès, review by Gaëtan Gilbert). - **Changed:** :cmd:`Fail` does not catch critical errors (including "stack overflow") anymore (`#10173 `_, by Gaëtan Gilbert). - **Removed:** Undocumented :n:`Instance : !@type` syntax (`#10185 `_, by Gaëtan Gilbert). - **Removed:** Deprecated ``Show Script`` command (`#10277 `_, by Gaëtan Gilbert). .. _811UnsafeFlags: - **Added:** Unsafe commands to enable/disable guard checking, positivity checking and universes checking (providing a local `-type-in-type`). See :ref:`controlling-typing-flags` (`#10291 `_ by Simon Boulier). .. _811ExportBug: - **Fixed:** Two bugs in :cmd:`Export`. This can have an impact on the behavior of the :cmd:`Import` command on libraries. `Import A` when `A` imports `B` which exports `C` was importing `C`, whereas :cmd:`Import` is not transitive. Also, after `Import A B`, the import of `B` was sometimes incomplete (`#10476 `_, by Maxime Dénès). .. warning:: This is a common source of incompatibilities in projects migrating to Coq 8.11. - **Changed:** Output generated by :flag:`Printing Dependent Evars Line` flag used by the Prooftree tool in Proof General. (`#10489 `_, closes `#4504 `_, `#10399 `_ and `#10400 `_, by Jim Fehrle). - **Added:** Optionally highlight the differences between successive proof steps in the :cmd:`Show Proof` command. Experimental; only available in coqtop and Proof General for now, may be supported in other IDEs in the future. (`#10494 `_, by Jim Fehrle). - **Removed:** Legacy commands ``AddPath``, ``AddRecPath``, and ``DelPath`` which were undocumented, broken variants of :cmd:`Add LoadPath`, :cmd:`Add Rec LoadPath`, and :cmd:`Remove LoadPath` (`#11187 `_, by Maxime Dénès and Théo Zimmermann). **Tools** .. _811vos: - **Added:** `coqc` now provides the ability to generate compiled interfaces. Use `coqc -vos foo.v` to skip all opaque proofs during the compilation of `foo.v`, and output a file called `foo.vos`. This feature is experimental. It enables working on a Coq file without the need to first compile the proofs contained in its dependencies (`#8642 `_ by Arthur Charguéraud, review by Maxime Dénès and Emilio Gallego). - **Added:** Command-line options `-require-import`, `-require-export`, `-require-import-from` and `-require-export-from`, as well as their shorthand, `-ri`, `-re`, `-refrom` and -`rifrom`. Deprecate confusing command line option `-require` (`#10245 `_ by Hugo Herbelin, review by Emilio Gallego). - **Changed:** Renamed `VDFILE` from `.coqdeps.d` to `..d` in the `coq_makefile` utility, where `` is the name of the output file given by the `-o` option. In this way two generated makefiles can coexist in the same directory. (`#10947 `_, by Kazuhiko Sakaguchi). - **Fixed:** ``coq_makefile`` now supports environment variable ``COQBIN`` with no ending ``/`` character (`#11068 `_, by Gaëtan Gilbert). **Standard library** - **Changed:** Moved the :tacn:`auto` hints of the `OrderedType` module into a new `ordered_type` database (`#9772 `_, by Vincent Laporte). - **Removed:** Deprecated modules `Coq.ZArith.Zlogarithm` and `Coq.ZArith.Zsqrt_compat` (`#9811 `_, by Vincent Laporte). .. _811Reals: - **Added:** Module `Reals.Cauchy.ConstructiveCauchyReals` defines constructive real numbers by Cauchy sequences of rational numbers (`#10445 `_, by Vincent Semeria, with the help and review of Guillaume Melquiond and Bas Spitters). This module is not meant to be imported directly, please import `Reals.Abstract.ConstructiveReals` instead. - **Added:** New module `Reals.ClassicalDedekindReals` defines Dedekind real numbers as boolean-valued functions along with 3 logical axioms: limited principle of omniscience, excluded middle of negations, and functional extensionality. The exposed type :g:`R` in module :g:`Reals.Rdefinitions` now corresponds to these Dedekind reals, hidden behind an opaque module, which significantly reduces the number of axioms needed (see `Reals.Rdefinitions` and `Reals.Raxioms`), while preserving backward compatibility. Classical Dedekind reals are a quotient of constructive reals, which allows to transport many constructive proofs to the classical case (`#10827 `_, by Vincent Semeria, based on discussions with Guillaume Melquiond, Bas Spitters and Hugo Herbelin, code review by Hugo Herbelin). - **Added:** New lemmas on :g:`combine`, :g:`filter`, :g:`nodup`, :g:`nth`, and :g:`nth_error` functions on lists (`#10651 `_, and `#10731 `_, by Oliver Nash). - **Changed:** The lemma :g:`filter_app` was moved to the :g:`List` module (`#10651 `_, by Oliver Nash). - **Added:** Standard equivalence between weak excluded-middle and the classical instance of De Morgan's law, in module :g:`ClassicalFacts` (`#10895 `_, by Hugo Herbelin). **Infrastructure and dependencies** - **Changed:** Coq now officially supports OCaml 4.08. See `INSTALL` file for details (`#10471 `_, by Emilio Jesús Gallego Arias). Changes in 8.11.0 ~~~~~~~~~~~~~~~~~ **Kernel** - **Changed:** the native compilation (:tacn:`native_compute`) now creates a directory to contain temporary files instead of putting them in the root of the system temporary directory (`#11081 `_, by Gaëtan Gilbert). - **Fixed:** `#11360 `_. Broken section closing when a template polymorphic inductive type depends on a section variable through its parameters (`#11361 `_, by Gaëtan Gilbert). - **Fixed:** The type of :g:`Set+1` would be computed to be itself, leading to a proof of False (`#11422 `_, by Gaëtan Gilbert). **Specification language, type inference** - **Changed:** Heuristics for universe minimization to :g:`Set`: only minimize flexible universes (`#10657 `_, by Gaëtan Gilbert with help from Maxime Dénès and Matthieu Sozeau). - **Fixed:** A dependency was missing when looking for default clauses in the algorithm for printing pattern matching clauses (`#11233 `_, by Hugo Herbelin, fixing `#11231 `_, reported by Barry Jay). **Notations** - **Fixed:** :cmd:`Print Visibility` was failing in the presence of only-printing notations (`#11276 `_, by Hugo Herbelin, fixing `#10750 `_). - **Fixed:** Recursive notations with custom entries were incorrectly parsing `constr` instead of custom grammars (`#11311 `_ by Maxime Dénès, fixes `#9532 `_, `#9490 `_). **Tactics** - **Changed:** The tactics :tacn:`eapply`, :tacn:`refine` and variants no longer allow shelved goals to be solved by typeclass resolution (`#10762 `_, by Matthieu Sozeau). - **Fixed:** The optional string argument to :tacn:`time` is now properly quoted under :cmd:`Print Ltac` (`#11203 `_, fixes `#10971 `_, by Jason Gross) - **Fixed:** Efficiency regression of :tacn:`lia` introduced in 8.10 by PR `#9725 `_ (`#11263 `_, fixes `#11063 `_, and `#11242 `_, and `#11270 `_, by Frédéric Besson). - **Deprecated:** The undocumented ``omega with`` tactic variant has been deprecated. Using :tacn:`lia` is the recommended replacement, though the old semantics of ``omega with *`` can be recovered with ``zify; omega`` (`#11337 `_, by Emilio Jesus Gallego Arias). - **Fixed** For compatibility reasons, in 8.11, :tacn:`zify` does not support :g:`Z.pow_pos` by default. It can be enabled by explicitly loading the module :g:`ZifyPow` (`#11430 `_ by Frédéric Besson fixes `#11191 `_). **Tactic language** - **Fixed:** Syntax of tactic `cofix ... with ...` was broken since Coq 8.10 (`#11241 `_, by Hugo Herbelin). **Commands and options** - **Deprecated:** The `-load-ml-source` and `-load-ml-object` command line options have been deprecated; their use was very limited, you can achieve the same by adding object files in the linking step or by using a plugin (`#11428 `_, by Emilio Jesus Gallego Arias). **Tools** - **Fixed:** ``coqtop --version`` was broken when called in the middle of an installation process (`#11255 `_, by Hugo Herbelin, fixing `#11254 `_). - **Deprecated:** The ``-quick`` command is renamed to ``-vio``, for consistency with the new ``-vos`` and ``-vok`` flags. Usage of ``-quick`` is now deprecated (`#11280 `_, by Arthur Charguéraud). - **Fixed:** ``coq_makefile`` does not break when using the ``CAMLPKGS`` variable together with an unpacked (``mllib``) plugin (`#11357 `_, by Gaëtan Gilbert). - **Fixed:** ``coqdoc`` with option ``-g`` (Gallina only) now correctly prints commands with attributes (`#11394 `_, fixes `#11353 `_, by Karl Palmskog). **CoqIDE** - **Changed:** CoqIDE now uses the GtkSourceView native implementation of the autocomplete mechanism (`#11400 `_, by Pierre-Marie Pédrot). **Standard library** - **Removed:** Export of module :g:`RList` in :g:`Ranalysis` and :g:`Ranalysis_reg`. Module :g:`RList` is still there but must be imported explicitly where required (`#11396 `_, by Michael Soegtrop). **Infrastructure and dependencies** - **Added:** Build date can now be overridden by setting the `SOURCE_DATE_EPOCH` environment variable (`#11227 `_, by Bernhard M. Wiedemann). Changes in 8.11.1 ~~~~~~~~~~~~~~~~~ **Kernel** - **Fixed:** Allow more inductive types in `Unset Positivity Checking` mode (`#11811 `_, by SimonBoulier). **Notations** - **Fixed:** Bugs in dealing with precedences of notations in custom entries (`#11530 `_, by Hugo Herbelin, fixing in particular `#9517 `_, `#9519 `_, `#9521 `_, `#11331 `_). - **Added:** In primitive floats, print a warning when parsing a decimal value that is not exactly a binary64 floating-point number. For instance, parsing 0.1 will print a warning whereas parsing 0.5 won't. (`#11859 `_, by Pierre Roux). **CoqIDE** - **Fixed:** Compiling file paths containing spaces (`#10008 `_, by snyke7, fixing `#11595 `_). **Infrastructure and dependencies** - **Added:** Bump official OCaml support and CI testing to 4.10.0 (`#11131 `_, `#11123 `_, `#11102 `_, by Emilio Jesus Gallego Arias, Jacques-Henri Jourdan, Guillaume Melquiond, and Guillaume Munch-Maccagnoni). **Miscellaneous** - **Fixed:** :cmd:`Extraction Implicit` on the constructor of a record was leading to an anomaly (`#11329 `_, by Hugo Herbelin, fixes `#11114 `_). Changes in 8.11.2 ~~~~~~~~~~~~~~~~~ **Kernel** - **Fixed:** Using :cmd:`Require` inside a section caused an anomaly when closing the section. (`#11972 `_, by Gaëtan Gilbert, fixing `#11783 `_, reported by Attila Boros). **Tactics** - **Fixed:** Anomaly with induction schemes whose conclusion is not normalized (`#12116 `_, by Hugo Herbelin; fixes `#12045 `_) - **Fixed:** Loss of location of some tactic errors (`#12223 `_, by Hugo Herbelin; fixes `#12152 `_ and `#12255 `_). **Commands and options** - **Changed:** Ignore -native-compiler option when built without native compute support. (`#12070 `_, by Pierre Roux). **CoqIDE** - **Changed:** CoqIDE now uses native window frames by default on Windows. The GTK window frames can be restored by setting the `GTK_CSD` environment variable to `1` (`#12060 `_, fixes `#11080 `_, by Attila Gáspár). - **Fixed:** New patch presumably fixing the random Coq 8.11 segfault issue with CoqIDE completion (`#12068 `_, by Hugo Herbelin, presumably fixing `#11943 `_). - **Fixed:** Highlighting style consistently applied to all three buffers of CoqIDE (`#12106 `_, by Hugo Herbelin; fixes `#11506 `_). Version 8.10 ------------ Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.10 contains two major new features: support for a native fixed-precision integer type and a new sort :math:`\SProp` of strict propositions. It is also the result of refinements and stabilization of previous features, deprecations or removals of deprecated features, cleanups of the internals of the system and API, and many documentation improvements. This release includes many user-visible changes, including deprecations that are documented in the next subsection, and new features that are documented in the reference manual. Here are the most important user-visible changes: - Kernel: - A notion of primitive object was added to the calculus. Its first instance is primitive cyclic unsigned integers, axiomatized in module :g:`UInt63`. See Section :ref:`primitive-integers`. The `Coq.Numbers.Cyclic.Int31` library is deprecated (`#6914 `_, by Maxime Dénès, Benjamin Grégoire and Vincent Laporte, with help and reviews from many others). - The :math:`\SProp` sort of definitionally proof-irrelevant propositions was introduced. :math:`\SProp` allows to mark proof terms as irrelevant for conversion, and is treated like :math:`\Prop` during extraction. It is enabled using the `-allow-sprop` command-line flag or the :flag:`Allow StrictProp` flag. See Chapter :ref:`sprop` (`#8817 `_, by Gaëtan Gilbert). - The unfolding heuristic in termination checking was made more complete, allowing more constants to be unfolded to discover valid recursive calls. Performance regression may occur in Fixpoint declarations without an explicit ``{struct}`` annotation, since guessing the decreasing argument can now be more expensive (`#9602 `_, by Enrico Tassi). - Universes: - Added Subgraph variant to :cmd:`Print Universes`. Try for instance :g:`Print Universes Subgraph(sigT2.u1 sigT_of_sigT2.u1 projT3_eq.u1).` (`#8451 `_, by Gaëtan Gilbert). - Added private universes for opaque polymorphic constants, see the documentation for the :flag:`Private Polymorphic Universes` flag, and unset it to get the previous behavior (`#8850 `_, by Gaëtan Gilbert). - Notations: - New command :cmd:`String Notation` to register string syntax for custom inductive types (`#8965 `_, by Jason Gross). - Experimental: :ref:`Number Notations ` now parse decimal constants such as ``1.02e+01`` or ``10.2``. Parsers added for :g:`Q` and :g:`R`. In the rare case when such numeral notations were used in a development along with :g:`Q` or :g:`R`, they may have to be removed or disambiguated through explicit scope annotations (`#8764 `_, by Pierre Roux). - Ltac backtraces can be turned on using the :flag:`Ltac Backtrace` flag, which is off by default (`#9142 `_, fixes `#7769 `_ and `#7385 `_, by Pierre-Marie Pédrot). - The tactics :tacn:`lia`, :tacn:`nia`, :tacn:`lra`, :tacn:`nra` are now using a novel Simplex-based proof engine. In case of regression, unset :flag:`Simplex` to get the venerable Fourier-based engine (`#8457 `_, by Fréderic Besson). - SSReflect: - New intro patterns: - temporary introduction: `=> +` - block introduction: `=> [^ prefix ] [^~ suffix ]` - fast introduction: `=> >` - tactics as views: `=> /ltac:mytac` - replace hypothesis: `=> {}H` See Section :ref:`introduction_ssr` (`#6705 `_, by Enrico Tassi, with help from Maxime Dénès, ideas coming from various users). - New tactic :tacn:`under` to rewrite under binders, given an extensionality lemma: - interactive mode: :n:`under @term`, associated terminator: :tacn:`over` - one-liner mode: :n:`under @term do [@tactic | ...]` It can take occurrence switches, contextual patterns, and intro patterns: :g:`under {2}[in RHS]eq_big => [i|i ?]` (`#9651 `_, by Erik Martin-Dorel and Enrico Tassi). - :cmd:`Combined Scheme` now works when inductive schemes are generated in sort :math:`\Type`. It used to be limited to sort `Prop` (`#7634 `_, by Théo Winterhalter). - A new registration mechanism for reference from ML code to Coq constructs has been added (`#186 `_, by Emilio Jesús Gallego Arias, Maxime Dénès and Vincent Laporte). - CoqIDE: - CoqIDE now depends on gtk+3 and lablgtk3 instead of gtk+2 and lablgtk2. The INSTALL file available in the Coq sources has been updated to list the new dependencies (`#9279 `_, by Hugo Herbelin, with help from Jacques Garrigue, Emilio Jesús Gallego Arias, Michael Sogetrop and Vincent Laporte). - Smart input for Unicode characters. For example, typing ``\alpha`` then ``Shift+Space`` will insert the greek letter alpha. A larger number of default bindings are provided, following the latex naming convention. Bindings can be customized, either globally, or on a per-project basis. See Section :ref:`coqide-unicode` for details (`#8560 `_, by Arthur Charguéraud). - Infrastructure and dependencies: - Coq 8.10 requires OCaml >= 4.05.0, bumped from 4.02.3 See the `INSTALL` file for more information on dependencies (`#7522 `_, by Emilio Jesús Gallego Arías). - Coq 8.10 doesn't need Camlp5 to build anymore. It now includes a fork of the core parsing library that Coq uses, which is a small subset of the whole Camlp5 distribution. In particular, this subset doesn't depend on the OCaml AST, allowing easier compilation and testing on experimental OCaml versions. Coq also ships a new parser `coqpp` that plugin authors must switch to (`#7902 `_, `#7979 `_, `#8161 `_, `#8667 `_, and `#8945 `_, by Pierre-Marie Pédrot and Emilio Jesús Gallego Arias). The Coq developers would like to thank Daniel de Rauglaudre for many years of continued support. - Coq now supports building with Dune, in addition to the traditional Makefile which is scheduled for deprecation (`#6857 `_, by Emilio Jesús Gallego Arias, with help from Rudi Grinberg). Experimental support for building Coq projects has been integrated in Dune at the same time, providing an `improved experience `_ for plugin developers. We thank the Dune team for their work supporting Coq. Version 8.10 also comes with a bunch of smaller-scale changes and improvements regarding the different components of the system, including many additions to the standard library (see the next subsection for details). On the implementation side, the ``dev/doc/changes.md`` file documents the numerous changes to the implementation and improvements of interfaces. The file provides guidelines on porting a plugin to the new version and a plugin development tutorial originally made by Yves Bertot is now in `doc/plugin_tutorial`. The ``dev/doc/critical-bugs`` file documents the known critical bugs of Coq and affected releases. The efficiency of the whole system has seen improvements thanks to contributions from Gaëtan Gilbert, Pierre-Marie Pédrot, and Maxime Dénès. Maxime Dénès, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop, Théo Zimmermann worked on maintaining and improving the continuous integration system and package building infrastructure. Coq is now continuously tested against the OCaml trunk, in addition to the oldest supported and latest OCaml releases. Coq's documentation for the development branch is now deployed continuously at https://coq.github.io/doc/master/api (documentation of the ML API), https://coq.github.io/doc/master/refman (reference manual), and https://coq.github.io/doc/master/stdlib (documentation of the standard library). Similar links exist for the `v8.10` branch. The OPAM repository for Coq packages has been maintained by Guillaume Melquiond, Matthieu Sozeau, Enrico Tassi (who migrated it to opam 2) with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The 61 contributors to this version are Tanaka Akira, Benjamin Barenblat, Yves Bertot, Frédéric Besson, Lasse Blaauwbroek, Martin Bodin, Joachim Breitner, Tej Chajed, Frédéric Chapoton, Arthur Charguéraud, Cyril Cohen, Lukasz Czajka, David A. Dalrymple, Christian Doczkal, Maxime Dénès, Andres Erbsen, Jim Fehrle, Emilio Jesus Gallego Arias, Gaëtan Gilbert, Matěj Grabovský, Simon Gregersen, Jason Gross, Samuel Gruetter, Hugo Herbelin, Jasper Hugunin, Mirai Ikebuchi, Chantal Keller, Matej Košík, Sam Pablo Kuper, Vincent Laporte, Olivier Laurent, Larry Darryl Lee Jr, Nick Lewycky, Yao Li, Yishuai Li, Assia Mahboubi, Simon Marechal, Erik Martin-Dorel, Thierry Martinez, Guillaume Melquiond, Kayla Ngan, Karl Palmskog, Pierre-Marie Pédrot, Clément Pit-Claudel, Pierre Roux, Kazuhiko Sakaguchi, Ryan Scott, Vincent Semeria, Gan Shen, Michael Soegtrop, Matthieu Sozeau, Enrico Tassi, Laurent Théry, Kamil Trzciński, whitequark, Théo Winterhalter, Xia Li-yao, Beta Ziliani and Théo Zimmermann. Many power users helped to improve the design of the new features via the issue and pull request system, the Coq development mailing list, the coq-club@inria.fr mailing list or the new Discourse forum. It would be impossible to mention exhaustively the names of everybody who to some extent influenced the development. Version 8.10 is the fifth release of Coq developed on a time-based development cycle. Its development spanned 6 months from the release of Coq 8.9. Vincent Laporte is the release manager and maintainer of this release. This release is the result of ~2500 commits and ~650 PRs merged, closing 150+ issues. | Santiago de Chile, April 2019, | Matthieu Sozeau for the Coq development team | Other changes in 8.10+beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Command-line tools and options: - The use of `coqtop` as a compiler has been deprecated, in favor of `coqc`. Consequently option `-compile` will stop to be accepted in the next release. `coqtop` is now reserved to interactive use (`#9095 `_, by Emilio Jesús Gallego Arias). - New option ``-topfile filename``, which will set the current module name (*à la* ``-top``) based on the filename passed, taking into account the proper ``-R``/``-Q`` options. For example, given ``-R Foo foolib`` using ``-topfile foolib/bar.v`` will set the module name to ``Foo.Bar``. CoqIDE now properly sets the module name for a given file based on its path (`#8991 `_, closes `#8989 `_, by Gaëtan Gilbert). - Experimental: Coq flags and options can now be set on the command-line, e.g. ``-set "Universe Polymorphism=true"`` (`#9876 `_, by Gaëtan Gilbert). - The `-native-compiler` flag of `coqc` and `coqtop` now takes an argument which can have three values: - `no` disables native_compute - `yes` enables native_compute and precompiles `.v` files to native code - `ondemand` enables native_compute but compiles code only when `native_compute` is called The default value is `ondemand`. Note that this flag now has priority over the configure flag of the same name. A new `-bytecode-compiler` flag for `coqc` and `coqtop` controls whether conversion can use the VM. The default value is `yes`. (`#8870 `_, by Maxime Dénès) - The pretty timing diff scripts (flag `TIMING=1` to a `coq_makefile`\-made `Makefile`, also `tools/make-both-single-timing-files.py`, `tools/make-both-time-files.py`, and `tools/make-one-time-file.py`) now correctly support non-UTF-8 characters in the output of `coqc` / `make` as well as printing to stdout, on both python2 and python3 (`#9872 `_, closes `#9767 `_ and `#9705 `_, by Jason Gross) - coq_makefile's install target now errors if any file to install is missing (`#9906 `_, by Gaëtan Gilbert). - Preferences from ``coqide.keys`` are no longer overridden by modifiers preferences in ``coqiderc`` (`#10014 `_, by Hugo Herbelin). - Specification language, type inference: - Fixing a missing check in interpreting instances of existential variables that are bound to local definitions. Might exceptionally induce an overhead if the cost of checking the conversion of the corresponding definitions is additionally high (`#8217 `_, closes `#8215 `_, by Hugo Herbelin). - A few improvements in inference of the return clause of `match` that can exceptionally introduce incompatibilities. This can be solved by writing an explicit `return` clause, sometimes even simply an explicit `return _` clause (`#262 `_, by Hugo Herbelin). - Using non-projection values with the projection syntax is not allowed. For instance :g:`0.(S)` is not a valid way to write :g:`S 0`. Projections from non-primitive (emulated) records are allowed with warning "nonprimitive-projection-syntax" (`#8829 `_, by Gaëtan Gilbert). - An option and attributes to control the automatic decision to declare an inductive type as template polymorphic were added. Warning "auto-template" (off by default) can trigger when an inductive is automatically declared template polymorphic without the attribute. Inductive types declared by Funind will never be template polymorphic. (`#8488 `_, by Gaëtan Gilbert) - Notations: - New command :cmd:`Declare Scope` to explicitly declare a scope name before any use of it. Implicit declaration of a scope at the time of :cmd:`Bind Scope`, :cmd:`Delimit Scope`, :cmd:`Undelimit Scope`, or :cmd:`Notation` is deprecated (`#7135 `_, by Hugo Herbelin). - Various bugs have been fixed (e.g. `#9214 `_ on removing spurious parentheses on abbreviations shortening a strict prefix of an application, by Hugo Herbelin). - :cmd:`Number Notation` now support inductive types in the input to printing functions (e.g., numeral notations can be defined for terms containing things like :g:`@cons nat O O`), and parsing functions now fully normalize terms including parameters of constructors (so that, e.g., a numeral notation whose parsing function outputs a proof of :g:`Nat.gcd x y = 1` will no longer fail to parse due to containing the constant :g:`Nat.gcd` in the parameter-argument of :g:`eq_refl`) (`#9874 `_, closes `#9840 `_ and `#9844 `_, by Jason Gross). - Deprecated compatibility notations have actually been removed. Uses of these notations are generally easy to fix thanks to the hint contained in the deprecation warning emitted by Coq 8.8 and 8.9. For projects that require more than a handful of such fixes, there is `a script `_ that will do it automatically, using the output of ``coqc`` (`#8638 `_, by Jason Gross). - Allow inspecting custom grammar entries by :cmd:`Print Custom Grammar` (`#10061 `_, fixes `#9681 `_, by Jasper Hugunin, review by Pierre-Marie Pédrot and Hugo Herbelin). - The `quote plugin `_ was removed. If some users are interested in maintaining this plugin externally, the Coq development team can provide assistance for extracting the plugin and setting up a new repository (`#7894 `_, by Maxime Dénès). - Ltac: - Tactic names are no longer allowed to clash, even if they are not defined in the same section. For example, the following is no longer accepted: :g:`Ltac foo := idtac. Section S. Ltac foo := fail. End S.` (`#8555 `_, by Maxime Dénès). - Names of existential variables occurring in Ltac functions (e.g. :g:`?[n]` or :g:`?n` in terms - not in patterns) are now interpreted the same way as other variable names occurring in Ltac functions (`#7309 `_, by Hugo Herbelin). - Tactics: - Removed the deprecated `romega` tactic (`#8419 `_, by Maxime Dénès and Vincent Laporte). - Hint declaration and removal should now specify a database (e.g. `Hint Resolve foo : database`). When the database name is omitted, the hint is added to the `core` database (as previously), but a deprecation warning is emitted (`#8987 `_, by Maxime Dénès). - There are now tactics in `PreOmega.v` called `Z.div_mod_to_equations`, `Z.quot_rem_to_equations`, and `Z.to_euclidean_division_equations` (which combines the `div_mod` and `quot_rem` variants) which allow :tacn:`lia`, :tacn:`nia`, etc to support `Z.div` and `Z.modulo` (`Z.quot` and `Z.rem`, respectively), by posing the specifying equation for `Z.div` and `Z.modulo` before replacing them with atoms (`#8062 `_, by Jason Gross). - The syntax of the :tacn:`autoapply` tactic was fixed to conform with preexisting documentation: it now takes a `with` clause instead of a `using` clause (`#9524 `_, closes `#7632 `_, by Théo Zimmermann). - Modes are now taken into account by :tacn:`typeclasses eauto` for local hypotheses (`#9996 `_, fixes `#5752 `_, by Maxime Dénès, review by Pierre-Marie Pédrot). - New variant :tacn:`change_no_check` of :tacn:`change`, usable as a documented replacement of `convert_concl_no_check` (`#10012 `_, `#10017 `_, `#10053 `_, and `#10059 `_, by Hugo Herbelin and Paolo G. Giarrusso). - The simplified value returned by :tacn:`field_simplify` is not always a fraction anymore. When the denominator is :g:`1`, it returns :g:`x` while previously it was returning :g:`x/1`. This change could break codes that were post-processing application of :tacn:`field_simplify` to get rid of these :g:`x/1` (`#9854 `_, by Laurent Théry, with help from Michael Soegtrop, Maxime Dénès, and Vincent Laporte). - SSReflect: - Clear discipline made consistent across the entire proof language. Whenever a clear switch `{x..}` comes immediately before an existing proof context entry (used as a view, as a rewrite rule or as name for a new context entry) then such entry is cleared too. E.g. The following sentences are elaborated as follows (when H is an existing proof context entry): - `=> {x..} H` -> `=> {x..H} H` - `=> {x..} /H` -> `=> /v {x..H}` - `rewrite {x..} H` -> `rewrite E {x..H}` (`#9341 `_, by Enrico Tassi). - `inE` now expands `y \in r x` when `r` is a `simpl_rel`. New `{pred T}` notation for a `pred T` alias in the `pred_sort` coercion class, simplified `predType` interface: `pred_class` and `mkPredType` deprecated, `{pred T}` and `PredType` should be used instead. `if c return t then ...` now expects `c` to be a variable bound in `t`. New `nonPropType` interface matching types that do _not_ have sort `Prop`. New `relpre R f` definition for the preimage of a relation R under f (`#9995 `_, by Georges Gonthier). - Commands: - Binders for an :cmd:`Instance` now act more like binders for a :cmd:`Theorem`. Names may not be repeated, and may not overlap with section variable names (`#8820 `_, closes `#8791 `_, by Jasper Hugunin). - Removed the deprecated `Implicit Tactic` family of commands (`#8779 `_, by Pierre-Marie Pédrot). - The `Automatic Introduction` option has been removed and is now the default (`#9001 `_, by Emilio Jesús Gallego Arias). - `Arguments` now accepts names for arguments provided with `extra_scopes` (`#9117 `_, by Maxime Dénès). - The naming scheme for anonymous binders in a `Theorem` has changed to avoid conflicts with explicitly named binders (`#9160 `_, closes `#8819 `_, by Jasper Hugunin). - Computation of implicit arguments now properly handles local definitions in the binders for an `Instance`, and can be mixed with implicit binders `{x : T}` (`#9307 `_, closes `#9300 `_, by Jasper Hugunin). - :cmd:`Declare Instance` now requires an instance name. The flag `Refine Instance Mode` has been turned off by default, meaning that :cmd:`Instance` no longer opens a proof when a body is provided. The flag has been deprecated and will be removed in the next version. (`#9270 `_, and `#9825 `_, by Maxime Dénès) - Command :cmd:`Instance`, when no body is provided, now always opens a proof. This is a breaking change, as instance of :n:`Instance @ident__1 : @ident__2.` where :n:`@ident__2` is a trivial class will have to be changed into :n:`Instance @ident__1 : @ident__2 := %{%}.` or :n:`Instance @ident__1 : @ident__2. Proof. Qed.` (`#9274 `_, by Maxime Dénès). - The flag :flag:`Program Mode` now means that the `Program` attribute is enabled for all commands that support it. In particular, it does not have any effect on tactics anymore. May cause some incompatibilities (`#9410 `_, by Maxime Dénès). - The algorithm computing implicit arguments now behaves uniformly for primitive projection and application nodes (`#9509 `_, closes `#9508 `_, by Pierre-Marie Pédrot). - :cmd:`Hypotheses` and :cmd:`Variables` can now take implicit binders inside sections (`#9364 `_, closes `#9363 `_, by Jasper Hugunin). - Removed deprecated option `Automatic Coercions Import` (`#8094 `_, by Maxime Dénès). - The ``Show Script`` command has been deprecated (`#9829 `_, by Vincent Laporte). - :cmd:`Coercion` does not warn ambiguous paths which are obviously convertible with existing ones. The ambiguous paths messages have been turned to warnings, thus now they could appear in the output of ``coqc``. The convertibility checking procedure for coercion paths is complete for paths consisting of coercions satisfying the uniform inheritance condition, but some coercion paths could be reported as ambiguous even if they are convertible with existing ones when they have coercions that don't satisfy the uniform inheritance condition (`#9743 `_, closes `#3219 `_, by Kazuhiko Sakaguchi). - A new flag :flag:`Fast Name Printing` has been introduced. It changes the algorithm used for allocating bound variable names for a faster but less clever one (`#9078 `_, by Pierre-Marie Pédrot). - Option ``Typeclasses Axioms Are Instances`` (compatibility option introduced in the previous version) is deprecated. Use :cmd:`Declare Instance` for axioms which should be instances (`#8920 `_, by Gaëtan Gilbert). - Removed option `Printing Primitive Projection Compatibility` (`#9306 `_, by Gaëtan Gilbert). - Standard Library: - Added `Bvector.BVeq` that decides whether two `Bvector`\s are equal. Added notations for `BVxor`, `BVand`, `BVor`, `BVeq` and `BVneg` (`#8171 `_, by Yishuai Li). - Added `ByteVector` type that can convert to and from `string` (`#8365 `_, by Yishuai Li). - Added lemmas about monotonicity of `N.double` and `N.succ_double`, and about the upper bound of number represented by a vector. Allowed implicit vector length argument in `Ndigits.Bv2N` (`#8815 `_, by Yishuai Li). - The prelude used to be automatically Exported and is now only Imported. This should be relevant only when importing files which don't use `-noinit` into files which do (`#9013 `_, by Gaëtan Gilbert). - Added `Coq.Structures.OrderedTypeEx.String_as_OT` to make strings an ordered type, using lexical order (`#7221 `_, by Li Yao). - Added lemmas about `Z.testbit`, `Z.ones`, and `Z.modulo` (`#9425 `_, by Andres Erbsen). - Moved the `auto` hints of the `FSet` library into a new `fset` database (`#9725 `_, by Frédéric Besson). - Added :g:`Coq.Structures.EqualitiesFacts.PairUsualDecidableTypeFull` (`#9984 `_, by Jean-Christophe Léchenet and Oliver Nash). - Some error messages that show problems with a pair of non-matching values will now highlight the differences (`#8669 `_, by Jim Fehrle). - Changelog has been moved from a specific file `CHANGES.md` to the reference manual; former Credits chapter of the reference manual has been split in two parts: a History chapter which was enriched with additional historical information about Coq versions 1 to 5, and a Changes chapter which was enriched with the content formerly in `CHANGES.md` and `COMPATIBILITY` (`#9133 `_, `#9668 `_, `#9939 `_, `#9964 `_, and `#10085 `_, by Théo Zimmermann, with help and ideas from Emilio Jesús Gallego Arias, Gaëtan Gilbert, Clément Pit-Claudel, Matthieu Sozeau, and Enrico Tassi). Changes in 8.10+beta2 ~~~~~~~~~~~~~~~~~~~~~ Many bug fixes and documentation improvements, in particular: **Tactics** - Make the :tacn:`discriminate` tactic work together with :flag:`Universe Polymorphism` and equality in :g:`Type`. This, in particular, makes :tacn:`discriminate` compatible with the HoTT library https://github.com/HoTT/HoTT (`#10205 `_, by Andreas Lynge, review by Pierre-Marie Pédrot and Matthieu Sozeau). **SSReflect** - Make the ``case E: t`` tactic work together with :flag:`Universe Polymorphism` and equality in :g:`Type`. This makes :tacn:`case ` compatible with the HoTT library https://github.com/HoTT/HoTT (`#10302 `_, fixes `#10301 `_, by Andreas Lynge, review by Enrico Tassi) - Make the ``rewrite /t`` tactic work together with :flag:`Universe Polymorphism`. This makes :tacn:`rewrite ` compatible with the HoTT library https://github.com/HoTT/HoTT (`#10305 `_, fixes `#9336 `_, by Andreas Lynge, review by Enrico Tassi) **CoqIDE** - Fix CoqIDE instability on Windows after the update to gtk3 (`#10360 `_, by Michael Soegtrop, closes `#9885 `_). **Miscellaneous** - Proof General can now display Coq-generated diffs between proof steps in color (`#10019 `_ and (in Proof General) `#421 `_, by Jim Fehrle). Changes in 8.10+beta3 ~~~~~~~~~~~~~~~~~~~~~ **Kernel** - Fix soundness issue with template polymorphism (`#9294 `_). Declarations of template-polymorphic inductive types ignored the provenance of the universes they were abstracting on and did not detect if they should be greater or equal to :math:`\Set` in general. Previous universes and universes introduced by the inductive definition could have constraints that prevented their instantiation with e.g. :math:`\Prop`, resulting in unsound instantiations later. The implemented fix only allows abstraction over universes introduced by the inductive declaration, and properly records all their constraints by making them by default only :math:`>= \Prop`. It is also checked that a template polymorphic inductive actually is polymorphic on at least one universe. This prevents inductive declarations in sections to be universe polymorphic over section parameters. For a backward compatible fix, simply hoist the inductive definition out of the section. An alternative is to declare the inductive as universe-polymorphic and cumulative in a universe-polymorphic section: all universes and constraints will be properly gathered in this case. See :ref:`Template-polymorphism` for a detailed exposition of the rules governing template-polymorphic types. To help users incrementally fix this issue, a command line option `-no-template-check` and a global flag ``Template Check`` are available to selectively disable the new check. Use at your own risk. (`#9918 `_, by Matthieu Sozeau and Maxime Dénès). **User messages** - Improve the ambiguous paths warning to indicate which path is ambiguous with new one (`#10336 `_, closes `#3219 `_, by Kazuhiko Sakaguchi). **Extraction** - Fix extraction to OCaml of primitive machine integers; see :ref:`primitive-integers` (`#10430 `_, fixes `#10361 `_, by Vincent Laporte). - Fix a printing bug of OCaml extraction on dependent record projections, which produced improper `assert false`. This change makes the OCaml extractor internally inline record projections by default; thus the monolithic OCaml extraction (:cmd:`Extraction` and :cmd:`Recursive Extraction`) does not produce record projection constants anymore except for record projections explicitly instructed to extract, and records declared in opaque modules (`#10577 `_, fixes `#7348 `_, by Kazuhiko Sakaguchi). **Standard library** - Added ``splitat`` function and lemmas about ``splitat`` and ``uncons`` (`#9379 `_, by Yishuai Li, with help of Konstantinos Kallas, follow-up of `#8365 `_, which added ``uncons`` in 8.10+beta1). Changes in 8.10.0 ~~~~~~~~~~~~~~~~~ - Micromega tactics (:tacn:`lia`, :tacn:`nia`, etc) are no longer confused by primitive projections (`#10806 `_, fixes `#9512 `_ by Vincent Laporte). Changes in 8.10.1 ~~~~~~~~~~~~~~~~~ A few bug fixes and documentation improvements, in particular: **Kernel** - Fix proof of False when using |SProp| (incorrect De Bruijn handling when inferring the relevance mark of a function) (`#10904 `_, by Pierre-Marie Pédrot). **Tactics** - Fix an anomaly when unsolved evar in :cmd:`Add Ring` (`#10891 `_, fixes `#9851 `_, by Gaëtan Gilbert). **Tactic language** - Fix Ltac regression in binding free names in uconstr (`#10899 `_, fixes `#10894 `_, by Hugo Herbelin). **CoqIDE** - Fix handling of unicode input before space (`#10852 `_, fixes `#10842 `_, by Arthur Charguéraud). **Extraction** - Fix custom extraction of inductives to JSON (`#10897 `_, fixes `#4741 `_, by Helge Bahmann). Changes in 8.10.2 ~~~~~~~~~~~~~~~~~ **Kernel** - Fixed a critical bug of template polymorphism and nonlinear universes (`#11128 `_, fixes `#11039 `_, by Gaëtan Gilbert). - Fixed an anomaly “Uncaught exception Constr.DestKO” on :g:`Inductive` (`#11052 `_, fixes `#11048 `_, by Gaëtan Gilbert). - Fixed an anomaly “not enough abstractions in fix body” (`#11014 `_, fixes `#8459 `_, by Gaëtan Gilbert). **Notations** - Fixed an 8.10 regression related to the printing of coercions associated with notations (`#11090 `_, fixes `#11033 `_, by Hugo Herbelin). **CoqIDE** - Fixed uneven dimensions of CoqIDE panels when window has been resized (`#11070 `_, fixes 8.10-regression `#10956 `_, by Guillaume Melquiond). - Do not include final stops in queries (`#11069 `_, fixes 8.10-regression `#11058 `_, by Guillaume Melquiond). **Infrastructure and dependencies** - Enable building of executables when they are running (`#11000 `_, fixes 8.9-regression `#10728 `_, by Gaëtan Gilbert). Version 8.9 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.9 contains the result of refinements and stabilization of features and deprecations or removals of deprecated features, cleanups of the internals of the system and API along with a few new features. This release includes many user-visible changes, including deprecations that are documented in the next subsection and new features that are documented in the reference manual. Here are the most important changes: - Kernel: mutually recursive records are now supported, by Pierre-Marie Pédrot. - Notations: - Support for autonomous grammars of terms called “custom entries”, by Hugo Herbelin (see Section :ref:`custom-entries` of the reference manual). - Deprecated notations of the standard library will be removed in the next version of Coq, see the next subsection for a script to ease porting, by Jason Gross and Jean-Christophe Léchenet. - Added the :cmd:`Number Notation` command for registering decimal numeral notations for custom types, by Daniel de Rauglaudre, Pierre Letouzey and Jason Gross. - Tactics: Introduction tactics :tacn:`intro`/:tacn:`intros` on a goal that is an existential variable now force a refinement of the goal into a dependent product rather than failing, by Hugo Herbelin. - Decision procedures: deprecation of tactic ``romega`` in favor of :tacn:`lia` and removal of ``fourier``, replaced by :tacn:`lra` which subsumes it, by Frédéric Besson, Maxime Dénès, Vincent Laporte and Laurent Théry. - Proof language: focusing bracket ``{`` now supports named :ref:`goals `, e.g. ``[x]:{`` will focus on a goal (existential variable) named ``x``, by Théo Zimmermann. - SSReflect: the implementation of delayed clear was simplified by Enrico Tassi: the variables are always renamed using inaccessible names when the clear switch is processed and finally cleared at the end of the intro pattern. In addition to that, the use-and-discard flag ``{}`` typical of rewrite rules can now be also applied to views, e.g. ``=> {}/v`` applies ``v`` and then clears ``v``. See Section :ref:`introduction_ssr`. - Vernacular: - Experimental support for :term:`attributes ` on commands, by Vincent Laporte, as in ``#[local] Lemma foo : bar.`` Tactics and tactic notations now support the ``deprecated`` attribute. - Removed deprecated commands ``Arguments Scope`` and ``Implicit Arguments`` in favor of :cmd:`Arguments`, with the help of Jasper Hugunin. - New flag :flag:`Uniform Inductive Parameters` by Jasper Hugunin to avoid repeating uniform parameters in constructor declarations. - New commands :cmd:`Hint Variables` and :cmd:`Hint Constants`, by Matthieu Sozeau, for controlling the opacity status of variables and constants in hint databases. It is recommended to always use these commands after creating a hint database with :cmd:`Create HintDb`. - Multiple sections with the same name are now allowed, by Jasper Hugunin. - Library: additions and changes in the ``VectorDef``, ``Ascii``, and ``String`` libraries. Syntax notations are now available only when using ``Import`` of libraries and not merely ``Require``, by various contributors (source of incompatibility, see the next subsection for details). - Toplevels: ``coqtop`` and ``coqide`` can now display diffs between proof steps in color, using the :opt:`Diffs` option, by Jim Fehrle. - Documentation: we integrated a large number of fixes to the new Sphinx documentation by various contributors, coordinated by Clément Pit-Claudel and Théo Zimmermann. - Tools: removed the ``gallina`` utility and the homebrewed ``Emacs`` mode. - Packaging: as in Coq 8.8.2, the Windows installer now includes many more external packages that can be individually selected for installation, by Michael Soegtrop. Version 8.9 also comes with a bunch of smaller-scale changes and improvements regarding the different components of the system. Most important ones are documented in the next subsection file. On the implementation side, the ``dev/doc/changes.md`` file documents the numerous changes to the implementation and improvements of interfaces. The file provides guidelines on porting a plugin to the new version and a plugin development tutorial kept in sync with Coq was introduced by Yves Bertot http://github.com/ybertot/plugin_tutorials. The new ``dev/doc/critical-bugs`` file documents the known critical bugs of Coq and affected releases. The efficiency of the whole system has seen improvements thanks to contributions from Gaëtan Gilbert, Pierre-Marie Pédrot, and Maxime Dénès. Maxime Dénès, Emilio Jesús Gallego Arias, Gaëtan Gilbert, Michael Soegtrop, Théo Zimmermann worked on maintaining and improving the continuous integration system. The OPAM repository for Coq packages has been maintained by Guillaume Melquiond, Matthieu Sozeau, Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The 54 contributors for this version are Léo Andrès, Rin Arakaki, Benjamin Barenblat, Langston Barrett, Siddharth Bhat, Martin Bodin, Simon Boulier, Timothy Bourke, Joachim Breitner, Tej Chajed, Arthur Charguéraud, Pierre Courtieu, Maxime Dénès, Andres Erbsen, Jim Fehrle, Julien Forest, Emilio Jesus Gallego Arias, Gaëtan Gilbert, Matěj Grabovský, Jason Gross, Samuel Gruetter, Armaël Guéneau, Hugo Herbelin, Jasper Hugunin, Ralf Jung, Sam Pablo Kuper, Ambroise Lafont, Leonidas Lampropoulos, Vincent Laporte, Peter LeFanu Lumsdaine, Pierre Letouzey, Jean-Christophe Léchenet, Nick Lewycky, Yishuai Li, Sven M. Hallberg, Assia Mahboubi, Cyprien Mangin, Guillaume Melquiond, Perry E. Metzger, Clément Pit-Claudel, Pierre-Marie Pédrot, Daniel R. Grayson, Kazuhiko Sakaguchi, Michael Soegtrop, Matthieu Sozeau, Paul Steckler, Enrico Tassi, Laurent Théry, Anton Trunov, whitequark, Théo Winterhalter, Zeimer, Beta Ziliani, Théo Zimmermann. Many power users helped to improve the design of the new features via the issue and pull request system, the Coq development mailing list or the coq-club@inria.fr mailing list. It would be impossible to mention exhaustively the names of everybody who to some extent influenced the development. Version 8.9 is the fourth release of Coq developed on a time-based development cycle. Its development spanned 7 months from the release of Coq 8.8. The development moved to a decentralized merging process during this cycle. Guillaume Melquiond was in charge of the release process and is the maintainer of this release. This release is the result of ~2,000 commits and ~500 PRs merged, closing 75+ issues. The Coq development team welcomed Vincent Laporte, a new Coq engineer working with Maxime Dénès in the Coq consortium. | Paris, November 2018, | Matthieu Sozeau for the Coq development team | Details of changes in 8.9+beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Kernel - Mutually defined records are now supported. Notations - New support for autonomous grammars of terms, called "custom entries" (see chapter "Syntax extensions" of the reference manual). - Deprecated compatibility notations will actually be removed in the next version of Coq. Uses of these notations are generally easy to fix thanks to the hint contained in the deprecation warnings. For projects that require more than a handful of such fixes, there is `a script `_ that will do it automatically, using the output of ``coqc``. The script contains documentation on its usage in a comment at the top. Tactics - Added toplevel goal selector `!` which expects a single focused goal. Use with `Set Default Goal Selector` to force focusing before tactics are called. - The undocumented "nameless" forms `fix N`, `cofix` that were deprecated in 8.8 have been removed from Ltac's syntax; please use `fix ident N/cofix ident` to explicitly name the (co)fixpoint hypothesis to be introduced. - Introduction tactics `intro`/`intros` on a goal that is an existential variable now force a refinement of the goal into a dependent product rather than failing. - Support for `fix`/`cofix` added in Ltac `match` and `lazymatch`. - Ltac backtraces now include trace information about tactics called by OCaml-defined tactics. - Option `Ltac Debug` now applies also to terms built using Ltac functions. - Deprecated the `Implicit Tactic` family of commands. - The default program obligation tactic uses a bounded proof search instead of an unbounded and potentially non-terminating one now (source of incompatibility). - The `simple apply` tactic now respects the `Opaque` flag when called from Ltac (`auto` still does not respect it). - Tactic `constr_eq` now adds universe constraints needed for the identity to the context (it used to ignore them). New tactic `constr_eq_strict` checks that the required constraints already hold without adding new ones. Preexisting tactic `constr_eq_nounivs` can still be used if you really want to ignore universe constraints. - Tactics and tactic notations now understand the `deprecated` attribute. - The `fourier` tactic has been removed. Please now use `lra` instead. You may need to add `Require Import Lra` to your developments. For compatibility, we now define `fourier` as a deprecated alias of `lra`. - The `romega` tactics have been deprecated; please use `lia` instead. Focusing - Focusing bracket `{` now supports named goal selectors, e.g. `[x]: {` will focus on a goal (existential variable) named `x`. As usual, unfocus with `}` once the subgoal is fully solved. Specification language - A fix to unification (which was sensitive to the ascii name of variables) may occasionally change type inference in incompatible ways, especially regarding the inference of the return clause of `match`. Standard Library - Added `Ascii.eqb` and `String.eqb` and the `=?` notation for them, and proved some lemmas about them. Note that this might cause incompatibilities if you have, e.g., `string_scope` and `Z_scope` both open with `string_scope` on top, and expect `=?` to refer to `Z.eqb`. Solution: wrap `_ =? _` in `(_ =? _)%Z` (or whichever scope you want). - Added `Ndigits.N2Bv_sized`, and proved some lemmas about it. Deprecated `Ndigits.N2Bv_gen`. - The scopes `int_scope` and `uint_scope` have been renamed to `dec_int_scope` and `dec_uint_scope`, to clash less with ssreflect and other packages. They are still delimited by `%int` and `%uint`. - Syntax notations for `string`, `ascii`, `Z`, `positive`, `N`, `R`, and `int31` are no longer available merely by :cmd:`Require`\ing the files that define the inductives. You must :cmd:`Import` `Coq.Strings.String.StringSyntax` (after `Require` `Coq.Strings.String`), `Coq.Strings.Ascii.AsciiSyntax` (after `Require` `Coq.Strings.Ascii`), `Coq.ZArith.BinIntDef`, `Coq.PArith.BinPosDef`, `Coq.NArith.BinNatDef`, `Coq.Reals.Rdefinitions`, and `Coq.Numbers.Cyclic.Int31.Int31`, respectively, to be able to use these notations. Note that passing `-compat 8.8` or issuing `Require Import Coq.Compat.Coq88` will make these notations available. Users wishing to port their developments automatically may download `fix.py` from https://gist.github.com/JasonGross/5d4558edf8f5c2c548a3d96c17820169 and run a command like `while true; do make -Okj 2>&1 | /path/to/fix.py; done` and get a cup of coffee. (This command must be manually interrupted once the build finishes all the way though. Note also that this method is not fail-proof; you may have to adjust some scopes if you were relying on string notations not being available even when `string_scope` was open.) - Numeral syntax for `nat` is no longer available without loading the entire prelude (`Require Import Coq.Init.Prelude`). This only impacts users running Coq without the init library (`-nois` or `-noinit`) and also issuing `Require Import Coq.Init.Datatypes`. Tools - Coq_makefile lets one override or extend the following variables from the command line: `COQFLAGS`, `COQCHKFLAGS`, `COQDOCFLAGS`. `COQFLAGS` is now entirely separate from `COQLIBS`, so in custom Makefiles `$(COQFLAGS)` should be replaced by `$(COQFLAGS) $(COQLIBS)`. - Removed the `gallina` utility (extracts specification from Coq vernacular files). If you would like to maintain this tool externally, please contact us. - Removed the Emacs modes distributed with Coq. You are advised to use `Proof-General `_ (and optionally `Company-Coq `_) instead. If your use case is not covered by these alternative Emacs modes, please open an issue. We can help set up external maintenance as part of Proof-General, or independently as part of coq-community. Commands - Removed deprecated commands `Arguments Scope` and `Implicit Arguments` (not the option). Use the `Arguments` command instead. - Nested proofs may be enabled through the option `Nested Proofs Allowed`. By default, they are disabled and produce an error. The deprecation warning which used to occur when using nested proofs has been removed. - Added option `Uniform Inductive Parameters` which abstracts over parameters before typechecking constructors, allowing to write for example `Inductive list (A : Type) := nil : list | cons : A -> list -> list.` - New `Set Hint Variables/Constants Opaque/Transparent` commands for setting globally the opacity flag of variables and constants in hint databases, overriding the opacity setting of the hint database. - Added generic syntax for "attributes", as in: `#[local] Lemma foo : bar.` - Added the `Numeral Notation` command for registering decimal numeral notations for custom types - The `Set SsrHave NoTCResolution` command no longer has special global scope. If you want the previous behavior, use `Global Set SsrHave NoTCResolution`. - Multiple sections with the same name are allowed. Coq binaries and process model - Before 8.9, Coq distributed a single `coqtop` binary and a set of dynamically loadable plugins that used to take over the main loop for tasks such as IDE language server or parallel proof checking. These plugins have been turned into full-fledged binaries so each different process has associated a particular binary now, in particular `coqidetop` is the CoqIDE language server, and `coq{proof,tactic,query}worker` are in charge of task-specific and parallel proof checking. SSReflect - The implementation of delayed clear switches in intro patterns is now simpler to explain: 1. The immediate effect of a clear switch like `{x}` is to rename the variable `x` to `_x_` (i.e. a reserved identifier that cannot be mentioned explicitly) 2. The delayed effect of `{x}` is that `_x_` is cleared at the end of the intro pattern 3. A clear switch immediately before a view application like `{x}/v` is translated to `/v{x}`. In particular, the third rule lets one write `{x}/v` even if `v` uses the variable `x`: indeed the view is executed before the renaming. - An empty clear switch is now accepted in intro patterns before a view application whenever the view is a variable. One can now write `{}/v` to mean `{v}/v`. Remark that `{}/x` is very similar to the idiom `{}e` for the rewrite tactic (the equation `e` is used for rewriting and then discarded). Standard Library - There are now conversions between `string` and `positive`, `Z`, `nat`, and `N` in binary, octal, and hex. Display diffs between proof steps - `coqtop` and `coqide` can now highlight the differences between proof steps in color. This can be enabled from the command line or the `Set Diffs "on"/"off"/"removed"` command. Please see the documentation for details. Showing diffs in Proof General requires small changes to PG (under discussion). Notations - Added `++` infix for `VectorDef.append`. Note that this might cause incompatibilities if you have, e.g., `list_scope` and `vector_scope` both open with `vector_scope` on top, and expect `++` to refer to `app`. Solution: wrap `_ ++ _` in `(_ ++ _)%list` (or whichever scope you want). Changes in 8.8.0 ~~~~~~~~~~~~~~~~ Various bug fixes. Changes in 8.8.1 ~~~~~~~~~~~~~~~~ - Some quality-of-life fixes. - Numerous improvements to the documentation. - Fix a critical bug related to primitive projections and :tacn:`native_compute`. - Ship several additional Coq libraries with the Windows installer. Version 8.8 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.8 contains the result of refinements and stabilization of features and deprecations, cleanups of the internals of the system along with a few new features. The main user visible changes are: - Kernel: fix a subject reduction failure due to allowing fixpoints on non-recursive values, by Matthieu Sozeau. Handling of evars in the VM (the kernel still does not accept evars) by Pierre-Marie Pédrot. - Notations: many improvements on recursive notations and support for destructuring patterns in the syntax of notations by Hugo Herbelin. - Proof language: tacticals for profiling, timing and checking success or failure of tactics by Jason Gross. The focusing bracket ``{`` supports single-numbered goal selectors, e.g. ``2:{``, by Théo Zimmermann. - Vernacular: deprecation of commands and more uniform handling of the ``Local`` flag, by Vincent Laporte and Maxime Dénès, part of a larger attribute system overhaul. Experimental ``Show Extraction`` command by Pierre Letouzey. Coercion now accepts ``Prop`` or ``Type`` as a source by Arthur Charguéraud. ``Export`` modifier for options allowing to export the option to modules that ``Import`` and not only ``Require`` a module, by Pierre-Marie Pédrot. - Universes: many user-level and API level enhancements: qualified naming and printing, variance annotations for cumulative inductive types, more general constraints and enhancements of the minimization heuristics, interaction with modules by Gaëtan Gilbert, Pierre-Marie Pédrot and Matthieu Sozeau. - Library: Decimal Numbers library by Pierre Letouzey and various small improvements. - Documentation: a large community effort resulted in the migration of the reference manual to the Sphinx documentation tool. The result is this manual. The new documentation infrastructure (based on Sphinx) is by Clément Pit-Claudel. The migration was coordinated by Maxime Dénès and Paul Steckler, with some help of Théo Zimmermann during the final integration phase. The 14 people who ported the manual are Calvin Beck, Heiko Becker, Yves Bertot, Maxime Dénès, Richard Ford, Pierre Letouzey, Assia Mahboubi, Clément Pit-Claudel, Laurence Rideau, Matthieu Sozeau, Paul Steckler, Enrico Tassi, Laurent Théry, Nikita Zyuzin. - Tools: experimental ``-mangle-names`` option to ``coqtop``/``coqc`` for linting proof scripts, by Jasper Hugunin. On the implementation side, the ``dev/doc/changes.md`` file documents the numerous changes to the implementation and improvements of interfaces. The file provides guidelines on porting a plugin to the new version. Version 8.8 also comes with a bunch of smaller-scale changes and improvements regarding the different components of the system. Most important ones are documented in the next subsection file. The efficiency of the whole system has seen improvements thanks to contributions from Gaëtan Gilbert, Pierre-Marie Pédrot, Maxime Dénès and Matthieu Sozeau and performance issue tracking by Jason Gross and Paul Steckler. The official wiki and the bugtracker of Coq migrated to the GitHub platform, thanks to the work of Pierre Letouzey and Théo Zimmermann. Gaëtan Gilbert, Emilio Jesús Gallego Arias worked on maintaining and improving the continuous integration system. The OPAM repository for Coq packages has been maintained by Guillaume Melquiond, Matthieu Sozeau, Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. The 44 contributors for this version are Yves Bertot, Joachim Breitner, Tej Chajed, Arthur Charguéraud, Jacques-Pascal Deplaix, Maxime Dénès, Jim Fehrle, Julien Forest, Yannick Forster, Gaëtan Gilbert, Jason Gross, Samuel Gruetter, Thomas Hebb, Hugo Herbelin, Jasper Hugunin, Emilio Jesus Gallego Arias, Ralf Jung, Johannes Kloos, Matej Košík, Robbert Krebbers, Tony Beta Lambda, Vincent Laporte, Peter LeFanu Lumsdaine, Pierre Letouzey, Farzon Lotfi, Cyprien Mangin, Guillaume Melquiond, Raphaël Monat, Carl Patenaude Poulin, Pierre-Marie Pédrot, Clément Pit-Claudel, Matthew Ryan, Matt Quinn, Sigurd Schneider, Bernhard Schommer, Michael Soegtrop, Matthieu Sozeau, Arnaud Spiwack, Paul Steckler, Enrico Tassi, Anton Trunov, Martin Vassor, Vadim Zaliva and Théo Zimmermann. Version 8.8 is the third release of Coq developed on a time-based development cycle. Its development spanned 6 months from the release of Coq 8.7 and was based on a public roadmap. The development process was coordinated by Matthieu Sozeau. Maxime Dénès was in charge of the release process. Théo Zimmermann is the maintainer of this release. Many power users helped to improve the design of the new features via the bug tracker, the pull request system, the Coq development mailing list or the coq-club@inria.fr mailing list. Special thanks to the users who contributed patches and intensive brain-storming and code reviews, starting with Jason Gross, Ralf Jung, Robbert Krebbers and Amin Timany. It would however be impossible to mention exhaustively the names of everybody who to some extent influenced the development. The Coq consortium, an organization directed towards users and supporters of the system, is now running and employs Maxime Dénès. The contacts of the Coq Consortium are Yves Bertot and Maxime Dénès. | Santiago de Chile, March 2018, | Matthieu Sozeau for the Coq development team | Details of changes in 8.8+beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Kernel - Support for template polymorphism for definitions was removed. May trigger more "universe inconsistency" errors in rare occasions. - Fixpoints are no longer allowed on non-recursive inductive types. Notations - Recursive notations with the recursive pattern repeating on the right (e.g. "( x ; .. ; y ; z )") now supported. - Notations with a specific level for the leftmost nonterminal, when printing-only, are supported. - Notations can now refer to the syntactic category of patterns (as in "fun 'pat =>" or "match p with pat => ... end"). Two variants are available, depending on whether a single variable is considered as a pattern or not. - Recursive notations now support ".." patterns with several occurrences of the recursive term or binder, possibly mixing terms and binders, possibly in reverse left-to-right order. - "Locate" now working also on notations of the form "x + y" (rather than "_ + _"). Specification language - When printing clauses of a "match", clauses with same right-hand side are factorized and the last most factorized clause with no variables, if it exists, is turned into a default clause. Use "Unset Printing Allow Default Clause" do deactivate printing of a default clause. Use "Unset Printing Factorizable Match Patterns" to deactivate factorization of clauses with same right-hand side. Tactics - On Linux, "native_compute" calls can be profiled using the "perf" utility. The command "Set NativeCompute Profiling" enables profiling, and "Set NativeCompute Profile Filename" customizes the profile filename. - The tactic "omega" is now aware of the bodies of context variables such as "x := 5 : Z" (see #1362). This could be disabled via Unset Omega UseLocalDefs. - The tactic "romega" is also aware now of the bodies of context variables. - The tactic "zify" resp. "omega with N" is now aware of N.pred. - Tactic "decide equality" now able to manage constructors which contain proofs. - Added tactics reset ltac profile, show ltac profile (and variants) - Added tactics restart_timer, finish_timing, and time_constr as an experimental way of timing Ltac's evaluation phase - Added tactic optimize_heap, analogous to the Vernacular Optimize Heap, which performs a major garbage collection and heap compaction in the OCaml run-time system. - The tactics "dtauto", "dintuition", "firstorder" now handle inductive types with let bindings in the parameters. - The tactic ``dtauto`` now handles some inductives such as ``@sigT A (fun _ => B)`` as non-dependent conjunctions. - A bug fixed in ``rewrite H in *`` and ``rewrite H in * |-`` may cause a few rare incompatibilities (it was unintendedly recursively rewriting in the side conditions generated by H). - Added tactics "assert_succeeds tac" and "assert_fails tac" to ensure properties of the execution of a tactic without keeping the effect of the execution. - `vm_compute` now supports existential variables. - Calls to `shelve` and `give_up` within calls to tactic `refine` now working. - Deprecated tactic `appcontext` was removed. Focusing - Focusing bracket `{` now supports single-numbered goal selector, e.g. `2: {` will focus on the second subgoal. As usual, unfocus with `}` once the subgoal is fully solved. The `Focus` and `Unfocus` commands are now deprecated. Commands - Proofs ending in "Qed exporting ident, .., ident" are not supported anymore. Constants generated during `abstract` are kept private to the local environment. - The deprecated Coercion Local, Open Local Scope, Notation Local syntax was removed. Use Local as a prefix instead. - For the Extraction Language command, "OCaml" is spelled correctly. The older "Ocaml" is still accepted, but deprecated. - Using “Require” inside a section is deprecated. - An experimental command "Show Extraction" allows to extract the content of the current ongoing proof (grant wish #4129). - Coercion now accepts the type of its argument to be "Prop" or "Type". - The "Export" modifier can now be used when setting and unsetting options, and will result in performing the same change when the module corresponding the command is imported. - The `Axiom` command does not automatically declare axioms as instances when their type is a class. Previous behavior can be restored using `Set Typeclasses Axioms Are Instances`. Universes - Qualified naming of global universes now works like other namespaced objects (e.g. constants), with a separate namespace, inside and across module and library boundaries. Global universe names introduced in an inductive / constant / Let declaration get qualified with the name of the declaration. - Universe cumulativity for inductive types is now specified as a variance for each polymorphic universe. See the reference manual for more information. - Inference of universe constraints with cumulative inductive types produces more general constraints. Unsetting new option Cumulativity Weak Constraints produces even more general constraints (but may produce too many universes to be practical). - Fix #5726: Notations that start with `Type` now support universe instances with `@{u}`. - `with Definition` now understands universe declarations (like `@{u| Set < u}`). Tools - Coq can now be run with the option -mangle-names to change the auto-generated name scheme. This is intended to function as a linter for developments that want to be robust to changes in auto-generated names. This feature is experimental, and may change or disappear without warning. - GeoProof support was removed. Checker - The checker now accepts filenames in addition to logical paths. CoqIDE - Find and Replace All report the number of occurrences found; Find indicates when it wraps. coqdep - Learned to read -I, -Q, -R and filenames from _CoqProject files. This is used by coq_makefile when generating dependencies for .v files (but not other files). Documentation - The Coq FAQ, formerly located at https://coq.inria.fr/faq, has been moved to the GitHub wiki section of this repository; the main entry page is https://github.com/coq/coq/wiki/The-Coq-FAQ. - Documentation: a large community effort resulted in the migration of the reference manual to the Sphinx documentation tool. The result is partially integrated in this version. Standard Library - New libraries Coq.Init.Decimal, Coq.Numbers.DecimalFacts, Coq.Numbers.DecimalNat, Coq.Numbers.DecimalPos, Coq.Numbers.DecimalN, Coq.Numbers.DecimalZ, Coq.Numbers.DecimalString providing a type of decimal numbers, some facts about them, and conversions between decimal numbers and nat, positive, N, Z, and string. - Added [Coq.Strings.String.concat] to concatenate a list of strings inserting a separator between each item - Notation `'` for Zpos in QArith was removed. - Some deprecated aliases are now emitting warnings when used. Compatibility support - Support for compatibility with versions before 8.6 was dropped. Options - The following deprecated options have been removed: + `Refolding Reduction` + `Standard Proposition Elimination` + `Dependent Propositions Elimination` + `Discriminate Introduction` + `Shrink Abstract` + `Tactic Pattern Unification` + `Intuition Iff Unfolding` + `Injection L2R Pattern Order` + `Record Elimination Schemes` + `Match Strict` + `Tactic Compat Context` + `Typeclasses Legacy Resolution` + `Typeclasses Module Eta` + `Typeclass Resolution After Apply` Details of changes in 8.8.0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tools - Asynchronous proof delegation policy was fixed. Since version 8.7 Coq was ignoring previous runs and the `-async-proofs-delegation-threshold` option did not have the expected behavior. Tactic language - The undocumented "nameless" forms `fix N`, `cofix` have been deprecated; please use `fix ident N /cofix ident` to explicitly name the (co)fixpoint hypothesis to be introduced. Documentation - The reference manual is now fully ported to Sphinx. Other small deprecations and bug fixes. Details of changes in 8.8.1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Kernel - Fix a critical bug with cofixpoints and `vm_compute`/`native_compute` (#7333). - Fix a critical bug with modules and algebraic universes (#7695) - Fix a critical bug with inlining of polymorphic constants (#7615). - Fix a critical bug with universe polymorphism and `vm_compute` (#7723). Was present since 8.5. Notations - Fixed unexpected collision between only-parsing and only-printing notations (issue #7462). Windows installer - The Windows installer now includes external packages Ltac2 and Equations (it included the Bignums package since 8.8+beta1). Many other bug fixes, documentation improvements (including fixes of regressions due to the Sphinx migration), and user message improvements (for details, see the 8.8.1 milestone at https://github.com/coq/coq/milestone/13?closed=1). Details of changes in 8.8.2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Documentation - A PDF version of the reference manual is available once again. Tools - The coq-makefile targets `print-pretty-timed`, `print-pretty-timed-diff`, and `print-pretty-single-time-diff` now correctly label the "before" and "after" columns, rather than swapping them. Kernel - The kernel does not tolerate capture of global universes by polymorphic universe binders, fixing a soundness break (triggered only through custom plugins) Windows installer - The Windows installer now includes many more external packages that can be individually selected for installation. Many other bug fixes and lots of documentation improvements (for details, see the 8.8.2 milestone at https://github.com/coq/coq/milestone/15?closed=1). Version 8.7 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.7 contains the result of refinements, stabilization of features and cleanups of the internals of the system along with a few new features. The main user visible changes are: - New tactics: variants of tactics supporting existential variables :tacn:`eassert`, :tacn:`eenough`, etc... by Hugo Herbelin. Tactics ``extensionality in H`` and :tacn:`inversion_sigma` by Jason Gross, ``specialize with ...`` accepting partial bindings by Pierre Courtieu. - ``Cumulative Polymorphic Inductive`` types, allowing cumulativity of universes to go through applied inductive types, by Amin Timany and Matthieu Sozeau. - Integration of the SSReflect plugin and its documentation in the reference manual, by Enrico Tassi, Assia Mahboubi and Maxime Dénès. - The ``coq_makefile`` tool was completely redesigned to improve its maintainability and the extensibility of generated Makefiles, and to make ``_CoqProject`` files more palatable to IDEs by Enrico Tassi. Coq 8.7 involved a large amount of work on cleaning and speeding up the code base, notably the work of Pierre-Marie Pédrot on making the tactic-level system insensitive to existential variable expansion, providing a safer API to plugin writers and making the code more robust. The ``dev/doc/changes.txt`` file documents the numerous changes to the implementation and improvements of interfaces. An effort to provide an official, streamlined API to plugin writers is in progress, thanks to the work of Matej Košík. Version 8.7 also comes with a bunch of smaller-scale changes and improvements regarding the different components of the system. We shall only list a few of them. The efficiency of the whole system has been significantly improved thanks to contributions from Pierre-Marie Pédrot, Maxime Dénès and Matthieu Sozeau and performance issue tracking by Jason Gross and Paul Steckler. Thomas Sibut-Pinote and Hugo Herbelin added support for side effect hooks in cbv, cbn and simpl. The side effects are provided via a plugin available at https://github.com/herbelin/reduction-effects/. The BigN, BigZ, BigQ libraries are no longer part of the Coq standard library, they are now provided by a separate repository https://github.com/coq/bignums, maintained by Pierre Letouzey. In the Reals library, ``IZR`` has been changed to produce a compact representation of integers and real constants are now represented using ``IZR`` (work by Guillaume Melquiond). Standard library additions and improvements by Jason Gross, Pierre Letouzey and others, documented in the next subsection file. The mathematical proof language/declarative mode plugin was removed from the archive. The OPAM repository for Coq packages has been maintained by Guillaume Melquiond, Matthieu Sozeau, Enrico Tassi with contributions from many users. A list of packages is available at https://coq.inria.fr/opam/www/. Packaging tools and software development kits were prepared by Michael Soegtrop with the help of Maxime Dénès and Enrico Tassi for Windows, and Maxime Dénès for MacOS X. Packages are regularly built on the Travis continuous integration server. The contributors for this version are Abhishek Anand, C.J. Bell, Yves Bertot, Frédéric Besson, Tej Chajed, Pierre Courtieu, Maxime Dénès, Julien Forest, Gaëtan Gilbert, Jason Gross, Hugo Herbelin, Emilio Jesús Gallego Arias, Ralf Jung, Matej Košík, Xavier Leroy, Pierre Letouzey, Assia Mahboubi, Cyprien Mangin, Erik Martin-Dorel, Olivier Marty, Guillaume Melquiond, Sam Pablo Kuper, Benjamin Pierce, Pierre-Marie Pédrot, Lars Rasmusson, Lionel Rieg, Valentin Robert, Yann Régis-Gianas, Thomas Sibut-Pinote, Michael Soegtrop, Matthieu Sozeau, Arnaud Spiwack, Paul Steckler, George Stelle, Pierre-Yves Strub, Enrico Tassi, Hendrik Tews, Amin Timany, Laurent Théry, Vadim Zaliva and Théo Zimmermann. The development process was coordinated by Matthieu Sozeau with the help of Maxime Dénès, who was also in charge of the release process. Théo Zimmermann is the maintainer of this release. Many power users helped to improve the design of the new features via the bug tracker, the pull request system, the Coq development mailing list or the Coq-Club mailing list. Special thanks to the users who contributed patches and intensive brain-storming and code reviews, starting with Jason Gross, Ralf Jung, Robbert Krebbers, Xavier Leroy, Clément Pit–Claudel and Gabriel Scherer. It would however be impossible to mention exhaustively the names of everybody who to some extent influenced the development. Version 8.7 is the second release of Coq developed on a time-based development cycle. Its development spanned 9 months from the release of Coq 8.6 and was based on a public road-map. It attracted many external contributions. Code reviews and continuous integration testing were systematically used before integration of new features, with an important focus given to compatibility and performance issues, resulting in a hopefully more robust release than Coq 8.6 while maintaining compatibility. Coq Enhancement Proposals (CEPs for short) and open pull request discussions were used to discuss publicly the new features. The Coq consortium, an organization directed towards users and supporters of the system, is now upcoming and will rely on Inria’s newly created Foundation. | Paris, August 2017, | Matthieu Sozeau and the Coq development team | Potential compatibility issues ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Extra superfluous names in introduction patterns may now raise an error rather than a warning when the superfluous name is already in use. The easy fix is to remove the superfluous name. Details of changes in 8.7+beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tactics - New tactic "extensionality in H" which applies (possibly dependent) functional extensionality in H supposed to be a quantified equality until giving a bare equality. - New tactic ``inversion_sigma`` which turns equalities of dependent pairs (e.g., ``existT P x p = existT P y q``, frequently left over by ``inversion`` on a dependent type family) into pairs of equalities (e.g., a hypothesis ``H : x = y`` and a hypothesis of type ``rew H in p = q``); these hypotheses can subsequently be simplified using ``subst``, without ever invoking any kind of axiom asserting uniqueness of identity proofs. If you want to explicitly specify the hypothesis to be inverted, or name the generated hypotheses, you can invoke ``induction H as [H1 H2] using eq_sigT_rect``. The tactic also works for ``sig``, ``sigT2``, and ``sig2``, and there are similar ``eq_sig*_rect`` induction lemmas. - Tactic "specialize with ..." now accepts any partial bindings. Missing bindings are either solved by unification or left quantified in the hypothesis. - New representation of terms that statically ensure stability by evar-expansion. This has several consequences. * In terms of performance, this adds a cost to every term destructuration, but at the same time most eager evar normalizations were removed, which couterbalances this drawback and even sometimes outperforms the old implementation. For instance, many operations that would require O(n) normalization of the term are now O(1) in tactics. YMMV. * This triggers small changes in unification, which was not evar-insensitive. Most notably, the new implementation recognizes Miller patterns that were missed before because of a missing normalization step. Hopefully this should be fairly uncommon. - Tactic "auto with real" can now discharge comparisons of literals. - The types of variables in patterns of "match" are now beta-iota-reduced after type checking. This has an impact on the type of the variables that the tactic "refine" introduces in the context, producing types that should be closer to the expectations. - In "Tactic Notation" or "TACTIC EXTEND", entry "constr_with_bindings" now uses type classes and rejects terms with unresolved holes, like entry "constr" does. To get the former behavior use "open_constr_with_bindings" (possible source of incompatibility). - New e-variants eassert, eenough, epose proof, eset, eremember, epose which behave like the corresponding variants with no "e" but turn unresolved implicit arguments into existential variables, on the shelf, rather than failing. - Tactic injection has become more powerful (closes bug #4890) and its documentation has been updated. - New variants of the `first` and `solve` tacticals that do not rely on parsing rules, meant to define tactic notations. - Added support for side effects hooks in `cbv`, `cbn` and `simpl`. The side effects are provided via a plugin: https://github.com/herbelin/reduction-effects/ - It is now possible to take hint database names as parameters in a Ltac definition or a Tactic Notation. - New option `Set Ltac Batch Debug` on top of `Set Ltac Debug` for non-interactive Ltac debug output. Gallina - Now supporting all kinds of binders, including 'pat, in syntax of record fields. Commands - Goals context can be printed in a more compact way when `Set Printing Compact Contexts` is activated. - Unfocused goals can be printed with the `Set Printing Unfocused` option. - `Print` now shows the types of let-bindings. - The compatibility options for printing primitive projections (`Set Printing Primitive Projection Parameters` and `Set Printing Primitive Projection Compatibility`) are now off by default. - Possibility to unset the printing of notations in a more fine grained fashion than `Unset Printing Notations` is provided without any user-syntax. The goal is that someone creates a plugin to experiment such a user-syntax, to be later integrated in Coq when stabilized. - `About` now tells if a reference is a coercion. - The deprecated `Save` vernacular and its form `Save Theorem id` to close proofs have been removed from the syntax. Please use `Qed`. - `Search` now sorts results by relevance (the relevance metric is a weighted sum of number of distinct symbols and size of the term). Standard Library - New file PropExtensionality.v to explicitly work in the axiomatic context of propositional extensionality. - New file SetoidChoice.v axiomatically providing choice over setoids, and, consequently, choice of representatives in equivalence classes. Various proof-theoretic characterizations of choice over setoids in file ChoiceFacts.v. - New lemmas about iff and about orders on positive and Z. - New lemmas on powerRZ. - Strengthened statement of JMeq_eq_dep (closes bug #4912). - The BigN, BigZ, BigZ libraries are no longer part of the Coq standard library, they are now provided by a separate repository https://github.com/coq/bignums The split has been done just after the Int31 library. - IZR (Reals) has been changed to produce a compact representation of integers. As a consequence, IZR is no longer convertible to INR and lemmas such as INR_IZR_INZ should be used instead. - Real constants are now represented using IZR rather than R0 and R1; this might cause rewriting rules to fail to apply to constants. - Added new notation {x & P} for sigT (without a type for x) Plugins - The Ssreflect plugin is now distributed with Coq. Its documentation has been integrated as a chapter of the reference manual. This chapter is work in progress so feedback is welcome. - The mathematical proof language (also known as declarative mode) was removed. - A new command Extraction TestCompile has been introduced, not meant for the general user but instead for Coq's test-suite. - The extraction plugin is no longer loaded by default. It must be explicitly loaded with [Require Extraction], which is backwards compatible. - The functional induction plugin (which provides the [Function] vernacular) is no longer loaded by default. It must be explicitly loaded with [Require FunInd], which is backwards compatible. Dependencies - Support for camlp4 has been removed. Tools - coq_makefile was completely redesigned to improve its maintainability and the extensibility of generated Makefiles, and to make _CoqProject files more palatable to IDEs. Overview: * _CoqProject files contain only Coq specific data (i.e. the list of files, -R options, ...) * coq_makefile translates _CoqProject to Makefile.conf and copies in the desired location a standard Makefile (that reads Makefile.conf) * Makefile extensions can be implemented in a Makefile.local file (read by the main Makefile) by installing a hook in the extension points provided by the standard Makefile The current version contains code for retro compatibility that prints warnings when a deprecated feature is used. Please upgrade your _CoqProject accordingly. * Additionally, coq_makefile-made Makefiles now support experimental timing targets `pretty-timed`, `pretty-timed-before`, `pretty-timed-after`, `print-pretty-timed-diff`, `print-pretty-single-time-diff`, `all.timing.diff`, and the variable `TIMING=1` (or `TIMING=before` or `TIMING=after`); see the documentation for more details. Build Infrastructure - Note that 'make world' does not build the bytecode binaries anymore. For that, you can use 'make byte' (and 'make install-byte' afterwards). Warning: native and byte compilations should *not* be mixed in the same instance of 'make -j', otherwise both ocamlc and ocamlopt might race for access to the same .cmi files. In short, use "make -j && make -j byte" instead of "make -j world byte". Universes - Cumulative inductive types. see prefixes "Cumulative", "NonCumulative" for inductive definitions and the option "Set Polymorphic Inductive Cumulativity" in the reference manual. - New syntax `foo@{_}` to instantiate a polymorphic definition with anonymous universes (can also be used with `Type`). XML Protocol and internal changes See dev/doc/changes.txt Many bugfixes including #1859, #2884, #3613, #3943, #3994, #4250, #4709, #4720, #4824, #4844, #4911, #5026, #5233, #5275, #5315, #5336, #5360, #5390, #5414, #5417, #5420, #5439, #5449, #5475, #5476, #5482, #5501, #5507, #5520, #5523, #5524, #5553, #5577, #5578, #5589, #5597, #5598, #5607, #5618, #5619, #5620, #5641, #5648, #5651, #5671. Many bugfixes on OS X and Windows (now the test-suite passes on these platforms too). Many optimizations. Many documentation improvements. Details of changes in 8.7+beta2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tools - In CoqIDE, the "Compile Buffer" command takes account of flags in _CoqProject or other project file. Improvements around some error messages. Many bug fixes including two important ones: - Bug #5730: CoqIDE becomes unresponsive on file open. - coq_makefile: make sure compile flags for Coq and coq_makefile are in sync (in particular, make sure the `-safe-string` option is used to compile plugins). Details of changes in 8.7.0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ OCaml - Users can pass specific flags to the OCaml optimizing compiler by -using the flambda-opts configure-time option. Beware that compiling Coq with a flambda-enabled compiler is experimental and may require large amounts of RAM and CPU, see INSTALL for more details. Details of changes in 8.7.1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Compatibility with OCaml 4.06.0. Many bug fixes, documentation improvements, and user message improvements (for details see the 8.7.1 milestone at https://github.com/coq/coq/milestone/10?closed=1). Details of changes in 8.7.2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Fixed a critical bug in the VM handling of universes (#6677). This bug affected all releases since 8.5. Improved support for building with OCaml 4.06.0 and external num package. Many other bug fixes, documentation improvements, and user message improvements (for details, see the 8.7.2 milestone at https://github.com/coq/coq/milestone/11?closed=1). Version 8.6 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.6 contains the result of refinements, stabilization of 8.5’s features and cleanups of the internals of the system. Over the year of (now time-based) development, about 450 bugs were resolved and over 100 contributions integrated. The main user visible changes are: - A new, faster state-of-the-art universe constraint checker, by Jacques-Henri Jourdan. - In CoqIDE and other asynchronous interfaces, more fine-grained asynchronous processing and error reporting by Enrico Tassi, making Coq capable of recovering from errors and continue processing the document. - More access to the proof engine features from Ltac: goal management primitives, range selectors and a :tacn:`typeclasses eauto` engine handling multiple goals and multiple successes, by Cyprien Mangin, Matthieu Sozeau and Arnaud Spiwack. - Tactic behavior uniformization and specification, generalization of intro-patterns by Hugo Herbelin and others. - A brand new warning system allowing to control warnings, turn them into errors or ignore them selectively by Maxime Dénès, Guillaume Melquiond, Pierre-Marie Pédrot and others. - Irrefutable patterns in abstractions, by Daniel de Rauglaudre. - The ssreflect subterm selection algorithm by Georges Gonthier and Enrico Tassi is now accessible to tactic writers through the ssrmatching plugin. - Integration of LtacProf, a profiler for Ltac by Jason Gross, Paul Steckler, Enrico Tassi and Tobias Tebbi. Coq 8.6 also comes with a bunch of smaller-scale changes and improvements regarding the different components of the system. We shall only list a few of them. The iota reduction flag is now a shorthand for match, fix and cofix flags controlling the corresponding reduction rules (by Hugo Herbelin and Maxime Dénès). Maxime Dénès maintained the native compilation machinery. Pierre-Marie Pédrot separated the Ltac code from general purpose tactics, and generalized and rationalized the handling of generic arguments, allowing to create new versions of Ltac more easily in the future. In patterns and terms, @, abbreviations and notations are now interpreted the same way, by Hugo Herbelin. Name handling for universes has been improved by Pierre-Marie Pédrot and Matthieu Sozeau. The minimization algorithm has been improved by Matthieu Sozeau. The unifier has been improved by Hugo Herbelin and Matthieu Sozeau, fixing some incompatibilities introduced in Coq 8.5. Unification constraints can now be left floating around and be seen by the user thanks to a new option. The Keyed Unification mode has been improved by Matthieu Sozeau. The typeclass resolution engine and associated proof search tactic have been reimplemented on top of the proof-engine monad, providing better integration in tactics, and new options have been introduced to control it, by Matthieu Sozeau with help from Théo Zimmermann. The efficiency of the whole system has been significantly improved thanks to contributions from Pierre-Marie Pédrot, Maxime Dénès and Matthieu Sozeau and performance issue tracking by Jason Gross and Paul Steckler. Standard library improvements by Jason Gross, Sébastien Hinderer, Pierre Letouzey and others. Emilio Jesús Gallego Arias contributed many cleanups and refactorings of the pretty-printing and user interface communication components. Frédéric Besson maintained the micromega tactic. The OPAM repository for Coq packages has been maintained by Guillaume Claret, Guillaume Melquiond, Matthieu Sozeau, Enrico Tassi and others. A list of packages is now available at https://coq.inria.fr/opam/www/. Packaging tools and software development kits were prepared by Michael Soegtrop with the help of Maxime Dénès and Enrico Tassi for Windows, and Maxime Dénès and Matthieu Sozeau for MacOS X. Packages are now regularly built on the continuous integration server. Coq now comes with a META file usable with ocamlfind, contributed by Emilio Jesús Gallego Arias, Gregory Malecha, and Matthieu Sozeau. Matej Košík maintained and greatly improved the continuous integration setup and the testing of Coq contributions. He also contributed many API improvements and code cleanups throughout the system. The contributors for this version are Bruno Barras, C.J. Bell, Yves Bertot, Frédéric Besson, Pierre Boutillier, Tej Chajed, Guillaume Claret, Xavier Clerc, Pierre Corbineau, Pierre Courtieu, Maxime Dénès, Ricky Elrod, Emilio Jesús Gallego Arias, Jason Gross, Hugo Herbelin, Sébastien Hinderer, Jacques-Henri Jourdan, Matej Košík, Xavier Leroy, Pierre Letouzey, Gregory Malecha, Cyprien Mangin, Erik Martin-Dorel, Guillaume Melquiond, Clément Pit–Claudel, Pierre-Marie Pédrot, Daniel de Rauglaudre, Lionel Rieg, Gabriel Scherer, Thomas Sibut-Pinote, Matthieu Sozeau, Arnaud Spiwack, Paul Steckler, Enrico Tassi, Laurent Théry, Nickolai Zeldovich and Théo Zimmermann. The development process was coordinated by Hugo Herbelin and Matthieu Sozeau with the help of Maxime Dénès, who was also in charge of the release process. Many power users helped to improve the design of the new features via the bug tracker, the pull request system, the Coq development mailing list or the Coq-Club mailing list. Special thanks to the users who contributed patches and intensive brain-storming and code reviews, starting with Cyril Cohen, Jason Gross, Robbert Krebbers, Jonathan Leivent, Xavier Leroy, Gregory Malecha, Clément Pit–Claudel, Gabriel Scherer and Beta Ziliani. It would however be impossible to mention exhaustively the names of everybody who to some extent influenced the development. Version 8.6 is the first release of Coq developed on a time-based development cycle. Its development spanned 10 months from the release of Coq 8.5 and was based on a public roadmap. To date, it contains more external contributions than any previous Coq system. Code reviews were systematically done before integration of new features, with an important focus given to compatibility and performance issues, resulting in a hopefully more robust release than Coq 8.5. Coq Enhancement Proposals (CEPs for short) were introduced by Enrico Tassi to provide more visibility and a discussion period on new features, they are publicly available https://github.com/coq/ceps. Started during this period, an effort is led by Yves Bertot and Maxime Dénès to put together a Coq consortium. | Paris, November 2016, | Matthieu Sozeau and the Coq development team | Potential sources of incompatibilities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Symptom: An obligation generated by Program or an abstracted subproof has different arguments. Cause: Set Shrink Abstract and Set Shrink Obligations are on by default and the subproof does not use the argument. Remedy: + Adapt the script. + Write an explicit lemma to prove the obligation/subproof and use it instead (compatible with 8.4). + Unset the option for the program/proof the obligation/subproof originates from. - Symptom: In a goal, order of hypotheses, or absence of an equality of the form "x = t" or "t = x", or no unfolding of a local definition. Cause: This might be connected to a number of fixes in the tactic "subst". The former behavior can be reactivated by issuing "Unset Regular Subst Tactic". Details of changes in 8.6beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Kernel - A new, faster state-of-the-art universe constraint checker. Specification language - Giving implicit arguments explicitly to a constant with multiple choices of implicit arguments does not break any more insertion of further maximal implicit arguments. - Ability to put any pattern in binders, prefixed by quote, e.g. "fun '(a,b) => ...", "λ '(a,(b,c)), ...", "Definition foo '(x,y) := ...". It expands into a "let 'pattern := ..." Tactics - Flag "Bracketing Last Introduction Pattern" is now on by default. - Flag "Regular Subst Tactic" is now on by default: it respects the initial order of hypothesis, it contracts cycles, it unfolds no local definitions (common source of incompatibilities, fixable by "Unset Regular Subst Tactic"). - New flag "Refolding Reduction", now disabled by default, which turns on refolding of constants/fixpoints (as in cbn) during the reductions done during type inference and tactic retyping. Can be extremely expensive. When set off, this recovers the 8.4 behavior of unification and type inference. Potential source of incompatibility with 8.5 developments (the option is set on in Compat/Coq85.v). - New flag "Shrink Abstract" that minimalizes proofs generated by the abstract tactical w.r.t. variables appearing in the body of the proof. On by default and deprecated. Minor source of incompatibility for code relying on the precise arguments of abstracted proofs. - Serious bugs are fixed in tactic "double induction" (source of incompatibilities as soon as the inductive types have dependencies in the type of their constructors; "double induction" remains however deprecated). - In introduction patterns of the form (pat1,...,patn), n should match the exact number of hypotheses introduced (except for local definitions for which pattern can be omitted, as in regular pattern-matching). - Tactic scopes in Ltac like constr: and ltac: now require parentheses around their argument. - Every generic argument type declares a tactic scope of the form "name:(...)" where name is the name of the argument. This generalizes the constr: and ltac: instances. - When in strict mode (i.e. in a Ltac definition), if the "intro" tactic is given a free identifier, it is not bound in subsequent tactics anymore. In order to introduce a binding, use e.g. the "fresh" primitive instead (potential source of incompatibilities). - New tactics is_ind, is_const, is_proj, is_constructor for use in Ltac. - New goal selectors. Sets of goals can be selected by listing integers ranges. Example: "1,4-7,24: tac" focuses "tac" on goals 1,4,5,6,7,24. - For uniformity with "destruct"/"induction" and for a more natural behavior, "injection" can now work in place by activating option "Structural Injection". In this case, hypotheses are also put in the context in the natural left-to-right order and the hypothesis on which injection applies is cleared. - Tactic "contradiction" (hence "easy") now also solve goals with hypotheses of the form "~True" or "t<>t" (possible source of incompatibilities because of more successes in automation, but generally a more intuitive strategy). - Option "Injection On Proofs" was renamed "Keep Proof Equalities". When enabled, injection and inversion do not drop equalities between objects in Prop. Still disabled by default. - New tactics "notypeclasses refine" and "simple notypeclasses refine" that disallow typeclass resolution when typechecking their argument, for use in typeclass hints. - Integration of LtacProf, a profiler for Ltac. - Reduction tactics now accept more fine-grained flags: iota is now a shorthand for the new flags match, fix and cofix. - The ssreflect subterm selection algorithm is now accessible to tactic writers through the ssrmatching plugin. - When used as an argument of an ltac function, "auto" without "with" nor "using" clause now correctly uses only the core hint database by default. Hints - Revised the syntax of [Hint Cut] to follow standard notation for regexps. - Hint Mode now accepts "!" which means that the mode matches only if the argument's head is not an evar (it goes under applications, casts, and scrutinees of matches and projections). - Hints can now take an optional user-given pattern, used only by [typeclasses eauto] with the [Filtered Unification] option on. Typeclasses - Many new options and new engine based on the proof monad. The [typeclasses eauto] tactic is now a multi-goal, multi-success tactic. See reference manual for more information. It is planned to replace auto and eauto in the following version. The 8.5 resolution engine is still available to help solve compatibility issues. Program - The "Shrink Obligations" flag now applies to all obligations, not only those solved by the automatic tactic. - "Shrink Obligations" is on by default and deprecated. Minor source of incompatibility for code relying on the precise arguments of obligations. Notations - "Bind Scope" can once again bind "Funclass" and "Sortclass". General infrastructure - New configurable warning system which can be controlled with the vernacular command "Set Warnings", or, under coqc/coqtop, with the flag "-w". In particular, the default is now that warnings are printed by coqc. - In asynchronous mode, Coq is now capable of recovering from errors and continue processing the document. Tools - coqc accepts a -o option to specify the output file name - coqtop accepts --print-version to print Coq and OCaml versions in easy to parse format - Setting [Printing Dependent Evars Line] can be unset to disable the computation associated with printing the "dependent evars: " line in -emacs mode - Removed the -verbose-compat-notations flag and the corresponding Set Verbose Compat vernacular, since these warnings can now be silenced or turned into errors using "-w". XML protocol - message format has changed, see dev/doc/changes.txt for more details. Many bug fixes, minor changes and documentation improvements are not mentioned here. Details of changes in 8.6 ~~~~~~~~~~~~~~~~~~~~~~~~~ Kernel - Fixed critical bug #5248 in VM long multiplication on 32-bit architectures. Was there only since 8.6beta1, so no stable release impacted. Other bug fixes in universes, type class shelving,... Details of changes in 8.6.1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Fix #5380: Default colors for CoqIDE are actually applied. - Fix plugin warnings - Document named evars (including Show ident) - Fix Bug #5574, document function scope - Adding a test case as requested in bug 5205. - Fix Bug #5568, no dup notation warnings on repeated module imports - Fix documentation of Typeclasses eauto := - Refactor documentation of records. - Protecting from warnings while compiling 8.6 - Fixing an inconsistency between configure and configure.ml - Add test-suite checks for coqchk with constraints - Fix bug #5019 (looping zify on dependent types) - Fix bug 5550: "typeclasses eauto with" does not work with section variables. - Bug 5546, qualify datatype constructors when needed in Show Match - Bug #5535, test for Show with -emacs - Fix bug #5486, don't reverse ids in tuples - Fixing #5522 (anomaly with free vars of pat) - Fix bug #5526, don't check for nonlinearity in notation if printing only - Fix bug #5255 - Fix bug #3659: -time should understand multibyte encodings. - FIx bug #5300: Anomaly: Uncaught exception Not_found" in "Print Assumptions". - Fix outdated description in RefMan. - Repairing `Set Rewriting Schemes` - Fixing #5487 (v8.5 regression on ltac-matching expressions with evars). - Fix description of command-line arguments for Add (Rec) LoadPath - Fix bug #5377: @? patterns broken. - add XML protocol doc - Fix anomaly when doing [all:Check _.] during a proof. - Correction of bug #4306 - Fix #5435: [Eval native_compute in] raises anomaly. - Instances should obey universe binders even when defined by tactics. - Intern names bound in match patterns - funind: Ignore missing info for current function - Do not typecheck twice the type of opaque constants. - show unused intro pattern warning - [future] Be eager when "chaining" already resolved future values. - Opaque side effects - Fix #5132: coq_makefile generates incorrect install goal - Run non-tactic comands without resilient_command - Univs: fix bug #5365, generation of u+k <= v constraints - make ``emit`` tail recursive - Don't require printing-only notation to be productive - Fix the way setoid_rewrite handles bindings. - Fix for bug 5244 - set printing width ignored when given enough space - Fix bug 4969, autoapply was not tagging shelved subgoals correctly Version 8.5 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.5 contains the result of five specific long-term projects: - A new asynchronous evaluation and compilation mode by Enrico Tassi with help from Bruno Barras and Carst Tankink. - Full integration of the new proof engine by Arnaud Spiwack helped by Pierre-Marie Pédrot, - Addition of conversion and reduction based on native compilation by Maxime Dénès and Benjamin Grégoire. - Full universe polymorphism for definitions and inductive types by Matthieu Sozeau. - An implementation of primitive projections with η-conversion bringing significant performance improvements when using records by Matthieu Sozeau. The full integration of the proof engine, by Arnaud Spiwack and Pierre-Marie Pédrot, brings to primitive tactics and the user level Ltac language dependent subgoals, deep backtracking and multiple goal handling, along with miscellaneous features and an improved potential for future modifications. Dependent subgoals allow statements in a goal to mention the proof of another. Proofs of unsolved subgoals appear as existential variables. Primitive backtracking makes it possible to write a tactic with several possible outcomes which are tried successively when subsequent tactics fail. Primitives are also available to control the backtracking behavior of tactics. Multiple goal handling paves the way for smarter automation tactics. It is currently used for simple goal manipulation such as goal reordering. The way Coq processes a document in batch and interactive mode has been redesigned by Enrico Tassi with help from Bruno Barras. Opaque proofs, the text between Proof and Qed, can be processed asynchronously, decoupling the checking of definitions and statements from the checking of proofs. It improves the responsiveness of interactive development, since proofs can be processed in the background. Similarly, compilation of a file can be split into two phases: the first one checking only definitions and statements and the second one checking proofs. A file resulting from the first phase – with the .vio extension – can be already Required. All .vio files can be turned into complete .vo files in parallel. The same infrastructure also allows terminating tactics to be run in parallel on a set of goals via the ``par:`` goal selector. CoqIDE was modified to cope with asynchronous checking of the document. Its source code was also made separate from that of Coq, so that CoqIDE no longer has a special status among user interfaces, paving the way for decoupling its release cycle from that of Coq in the future. Carst Tankink developed a Coq back-end for user interfaces built on Makarius Wenzel’s Prover IDE framework (PIDE), like PIDE/jEdit (with help from Makarius Wenzel) or PIDE/Coqoon (with help from Alexander Faithfull and Jesper Bengtson). The development of such features was funded by the Paral-ITP French ANR project. The full universe polymorphism extension was designed by Matthieu Sozeau. It conservatively extends the universes system and core calculus with definitions and inductive declarations parameterized by universes and constraints. It is based on a modification of the kernel architecture to handle constraint checking only, leaving the generation of constraints to the refinement/type inference engine. Accordingly, tactics are now fully universe aware, resulting in more localized error messages in case of inconsistencies and allowing higher-level algorithms like unification to be entirely type safe. The internal representation of universes has been modified but this is invisible to the user. The underlying logic has been extended with η-conversion for records defined with primitive projections by Matthieu Sozeau. This additional form of η-conversion is justified using the same principle than the previously added η-conversion for function types, based on formulations of the Calculus of Inductive Constructions with typed equality. Primitive projections, which do not carry the parameters of the record and are rigid names (not defined as a pattern matching construct), make working with nested records more manageable in terms of time and space consumption. This extension and universe polymorphism were carried out partly while Matthieu Sozeau was working at the IAS in Princeton. The guard condition has been made compliant with extensional equality principles such as propositional extensionality and univalence, thanks to Maxime Dénès and Bruno Barras. To ensure compatibility with the univalence axiom, a new flag ``-indices-matter`` has been implemented, taking into account the universe levels of indices when computing the levels of inductive types. This supports using Coq as a tool to explore the relations between homotopy theory and type theory. Maxime Dénès and Benjamin Grégoire developed an implementation of conversion test and normal form computation using the OCaml native compiler. It complements the virtual machine conversion offering much faster computation for expensive functions. Coq 8.5 also comes with a bunch of many various smaller-scale changes and improvements regarding the different components of the system. We shall only list a few of them. Pierre Boutillier developed an improved tactic for simplification of expressions called :tacn:`cbn`. Maxime Dénès maintained the bytecode-based reduction machine. Pierre Letouzey maintained the extraction mechanism. Pierre-Marie Pédrot has extended the syntax of terms to, experimentally, allow holes in terms to be solved by a locally specified tactic. Existential variables are referred to by identifiers rather than mere numbers, thanks to Hugo Herbelin who also improved the tactic language here and there. Error messages for universe inconsistencies have been improved by Matthieu Sozeau. Error messages for unification and type inference failures have been improved by Hugo Herbelin, Pierre-Marie Pédrot and Arnaud Spiwack. Pierre Courtieu contributed new features for using Coq through Proof General and for better interactive experience (bullets, Search, etc). The efficiency of the whole system has been significantly improved thanks to contributions from Pierre-Marie Pédrot. A distribution channel for Coq packages using the OPAM tool has been initiated by Thomas Braibant and developed by Guillaume Claret, with contributions by Enrico Tassi and feedback from Hugo Herbelin. Packaging tools were provided by Pierre Letouzey and Enrico Tassi (Windows), Pierre Boutillier, Matthieu Sozeau and Maxime Dénès (MacOS X). Maxime Dénès improved significantly the testing and benchmarking support. Many power users helped to improve the design of the new features via the bug tracker, the coq development mailing list or the Coq-Club mailing list. Special thanks are going to the users who contributed patches and intensive brain-storming, starting with Jason Gross, Jonathan Leivent, Greg Malecha, Clément Pit-Claudel, Marc Lasson, Lionel Rieg. It would however be impossible to mention with precision all names of people who to some extent influenced the development. Version 8.5 is one of the most important releases of Coq. Its development spanned over about 3 years and a half with about one year of beta-testing. General maintenance during part or whole of this period has been done by Pierre Boutillier, Pierre Courtieu, Maxime Dénès, Hugo Herbelin, Pierre Letouzey, Guillaume Melquiond, Pierre-Marie Pédrot, Matthieu Sozeau, Arnaud Spiwack, Enrico Tassi as well as Bruno Barras, Yves Bertot, Frédéric Besson, Xavier Clerc, Pierre Corbineau, Jean-Christophe Filliâtre, Julien Forest, Sébastien Hinderer, Assia Mahboubi, Jean-Marc Notin, Yann Régis-Gianas, François Ripault, Carst Tankink. Maxime Dénès coordinated the release process. | Paris, January 2015, revised December 2015, | Hugo Herbelin, Matthieu Sozeau and the Coq development team | Potential sources of incompatibilities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ List of typical changes to be done to adapt files from Coq 8.4 to Coq 8.5 when not using compatibility option ``-compat 8.4``. - Symptom: "The reference omega was not found in the current environment". Cause: "Require Omega" does not import the tactic "omega" any more Possible solutions: + use "Require Import OmegaTactic" (not compatible with 8.4) + use "Require Import Omega" (compatible with 8.4) + add definition "Ltac omega := Coq.omega.Omega.omega." - Symptom: "intuition" cannot solve a goal (not working anymore on nonstandard connective) Cause: "intuition" had an accidental non-uniform behavior fixed on nonstandard connectives Possible solutions: + use "dintuition" instead; it is stronger than "intuition" and works uniformly on nonstandard connectives, such as n-ary conjunctions or disjunctions (not compatible with 8.4) + do the script differently - Symptom: The constructor foo (in type bar) expects n arguments. Cause: parameters must now be given in patterns Possible solutions: + use option "Set Asymmetric Patterns" (compatible with 8.4) + add "_" for the parameters (not compatible with 8.4) + turn the parameters into implicit arguments (compatible with 8.4) - Symptom: "NPeano.Nat.foo" not existing anymore\ Possible solutions: + use "Nat.foo" instead Symptom: typing problems with proj1_sig or similar Cause: coercion from sig to sigT and similar coercions have been removed so as to make the initial state easier to understand for beginners Solution: change proj1_sig into projT1 and similarly (compatible with 8.4) Other detailed changes - options for *coq* compilation (see below for ocaml). + [-I foo] is now deprecated and will not add directory foo to the coq load path (only for ocaml, see below). Just replace [-I foo] by [-Q foo ""] in your project file and re-generate makefile. Or perform the same operation directly in your makefile if you edit it by hand. + Option -R Foo bar is the same in v8.5 than in v8.4 concerning coq load path. + Option [-I foo -as bar] is unchanged but discouraged unless you compile ocaml code. Use -Q foo bar instead. for more details: see section "Customization at launch time" of the reference manual. - Command line options for ocaml Compilation of ocaml code (plugins) + [-I foo] is *not* deprecated to add foo to the ocaml load path. + [-I foo -as bar] adds foo to the ocaml load path *and* adds foo to the coq load path with logical name bar (shortcut for -I foo -Q foo bar). for more details: section "Customization at launch time" of the reference manual. - Universe Polymorphism. - Refinement, unification and tactics are now aware of universes, resulting in more localized errors. Universe inconsistencies should no more get raised at Qed time but during the proof. Unification *always* produces well-typed substitutions, hence some rare cases of unifications that succeeded while producing ill-typed terms before will now fail. - The [change p with c] tactic semantics changed, now typechecking [c] at each matching occurrence [t] of the pattern [p], and converting [t] with [c]. - Template polymorphic inductive types: the partial application of a template polymorphic type (e.g. list) is not polymorphic. An explicit parameter application (e.g [fun A => list A]) or [apply (list _)] will result in a polymorphic instance. - The type inference algorithm now takes opacity of constants into account. This may have effects on tactics using type inference (e.g. induction). Extra "Transparent" might have to be added to revert opacity of constants. Type classes. - When writing an ``Instance foo : Class A := {| proj := t |}`` (note the vertical bars), support for typechecking the projections using the type information and switching to proof mode is no longer available. Use ``{ }`` (without the vertical bars) instead. Tactic abstract. - Auxiliary lemmas generated by the abstract tactic are removed from the global environment and inlined in the proof term when a proof is ended with Qed. The behavior of 8.4 can be obtained by ending proofs with "Qed exporting" or "Qed exporting ident, .., ident". Details of changes in 8.5beta1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Logic - Primitive projections for records allow for a compact representation of projections, without parameters and avoid the behavior of defined projections that can unfold to a case expression. To turn the use of native projections on, use [Set Primitive Projections]. Record, Class and Structure types defined while this option is set will be defined with primitive projections instead of the usual encoding as a case expression. For compatibility, when p is a primitive projection, @p can be used to refer to the projection with explicit parameters, i.e. [@p] is definitionally equal to [λ params r. r.(p)]. Records with primitive projections have eta-conversion, the canonical form being [mkR pars (p1 t) ... (pn t)]. - New universe polymorphism (see reference manual) - New option -type-in-type to collapse the universe hierarchy (this makes the logic inconsistent). - The guard condition for fixpoints is now a bit stricter. Propagation of subterm value through pattern matching is restricted according to the return predicate. Restores compatibility of Coq's logic with the propositional extensionality axiom. May create incompatibilities in recursive programs heavily using dependent types. - Trivial inductive types are no longer defined in Type but in Prop, which leads to a non-dependent induction principle being generated in place of the dependent one. To recover the old behavior, explicitly define your inductive types in Set. Commands - A command "Variant" allows to define non-recursive variant types. - The command "Record foo ..." does not generate induction principles (foo_rect, foo_rec, foo_ind) anymore by default (feature wish #2693). The command "Variant foo ..." does not either. A flag "Set/Unset Nonrecursive Elimination Schemes" allows changing this. The tactic "induction" on a "Record" or a "Variant" is now actually doing "destruct". - The "Open Scope" command can now be given also a delimiter (e.g. Z). - The "Definition" command now allows the "Local" modifier, allowing for non-importable definitions. The same goes for "Axiom" and "Parameter". - Section-specific commands such as "Let" (resp. "Variable", "Hypothesis") used out of a section now behave like the corresponding "Local" command, i.e. "Local Definition" (resp. "Local Parameter", "Local Axiom"). (potential source of rare incompatibilities). - The "Let" command can now define local (co)fixpoints. - Command "Search" has been renamed into "SearchHead". The command name "Search" now behaves like former "SearchAbout". The latter name is deprecated. - "Search", "About", "SearchHead", "SearchRewrite" and "SearchPattern" now search for hypothesis (of the current goal by default) first. They now also support the goal selector prefix to specify another goal to search: e.g. "n:Search id". This is also true for SearchAbout although it is deprecated. - The coq/user-contrib directory and the XDG directories are no longer recursively added to the load path, so files from installed libraries now need to be fully qualified for the "Require" command to find them. The tools/update-require script can be used to convert a development. - A new Print Strategies command allows visualizing the opacity status of the whole engine. - The "Locate" command now searches through all sorts of qualified namespaces of Coq: terms, modules, tactics, etc. The old behavior of the command can be retrieved using the "Locate Term" command. - New "Derive" command to help writing program by derivation. - New "Refine Instance Mode" option that allows to deactivate the generation of obligations in incomplete typeclass instances, raising an error instead. - "Collection" command to name sets of section hypotheses. Named collections can be used in the syntax of "Proof using" to assert which section variables are used in a proof. - The "Optimize Proof" command can be placed in the middle of a proof to force the compaction of the data structure used to represent the ongoing proof (evar map). This may result in a lower memory footprint and speed up the execution of the following tactics. - "Optimize Heap" command to tell the OCaml runtime to perform a major garbage collection step and heap compaction. - ``Instance`` no longer treats the ``{|...|}`` syntax specially; it handles it in the same way as other commands, e.g. "Definition". Use the ``{...}`` syntax (no pipe symbols) to recover the old behavior. Specification Language - Slight changes in unification error messages. - Added a syntax $(...)$ that allows putting tactics in terms (may break user notations using "$(", fixable by inserting a space or rewriting the notation). - Constructors in pattern-matching patterns now respect the same rules regarding implicit arguments as in applicative position. The old behavior can be recovered by the command "Set Asymmetric Patterns". As a side effect, notations for constructors explicitly mentioning non-implicit parameters can now be used in patterns. Considering that the pattern language is already rich enough, binding local definitions is however now forbidden in patterns (source of incompatibilities for local definitions that delta-reduce to a constructor). - Type inference algorithm now granting opacity of constants. This might also affect behavior of tactics (source of incompatibilities, solvable by re-declaring transparent constants which were set opaque). - Existential variables are now referred to by an identifier and the relevant part of their instance is displayed by default. They can be reparsed. The naming policy is yet unstable and subject to changes in future releases. Tactics - New tactic engine allowing dependent subgoals, fully backtracking (also known as multiple success) tactics, as well as tactics which can consider multiple goals together. In the new tactic engine, instantiation information of existential variables is always propagated to tactics, removing the need to manually use the "instantiate" tactics to mark propagation points. * New tactical (a+b) inserts a backtracking point. When (a+b);c fails during the execution of c, it can backtrack and try b instead of a. * New tactical (once a) removes all the backtracking points from a (i.e. it selects the first success of a). * Tactic "constructor" is now fully backtracking. In case of incompatibilities (e.g. combinatoric explosion), the former behavior of "constructor" can be retrieved by using instead "[> once constructor ..]". Thanks to backtracking, undocumented "constructor " syntax is now equivalent to "[> once (constructor; tac) ..]". * New "multimatch" variant of "match" tactic which backtracks to new branches in case of a later failure. The "match" tactic is equivalent to "once multimatch". * New selector "all:" such that "all:tac" applies tactic "tac" to all the focused goals, instead of just the first one as is the default. * A corresponding new option Set Default Goal Selector "all" makes the tactics in scripts be applied to all the focused goal by default * New selector "par:" such that "par:tac" applies the (terminating) tactic "tac" to all the focused goal in parallel. The number of worker can be selected with -async-proofs-tac-j and also limited using the coqworkmgr utility. * New tactics "revgoals", "cycle" and "swap" to reorder goals. * The semantics of recursive tactics (introduced with "Ltac t := ..." or "let rec t := ... in ...") changed slightly as t is now applied to every goal, not each goal independently. In particular it may be applied when no goals are left. This may cause tactics such as "let rec t := constructor;t" to loop indefinitely. The simple fix is to rewrite the recursive calls as follows: "let rec t := constructor;[t..]" which recovers the earlier behavior (source of rare incompatibilities). * New tactic language feature "numgoals" to count number of goals. It is accompanied by a "guard" tactic which fails if a Boolean test over integers does not pass. * New tactical "[> ... ]" to apply tactics to individual goals. * New tactic "gfail" which works like "fail" except it will also fail if every goal has been solved. * The refine tactic is changed not to use an ad hoc typing algorithm to generate subgoals. It also uses the dependent subgoal feature to generate goals to materialize every existential variable which is introduced by the refinement (source of incompatibilities). * A tactic shelve is introduced to manage the subgoals which may be solved by unification: shelve removes every goal it is applied to from focus. These goals can later be called back into focus by the Unshelve command. * A variant shelve_unifiable only removes those goals which appear as existential variables in other goals. To emulate the old refine, use "refine c;shelve_unifiable". This can still cause incompatibilities in rare occasions. * New "give_up" tactic to skip over a goal. A proof containing given up goals cannot be closed with "Qed", but only with "Admitted". - The implementation of the admit tactic has changed: no axiom is generated for the admitted sub proof. "admit" is now an alias for "give_up". Code relying on this specific behavior of "admit" can be made to work by: * Adding an "Axiom" for each admitted subproof. * Adding a single "Axiom proof_admitted : False." and the Ltac definition "Ltac admit := case proof_admitted.". - Matching using "lazymatch" was fundamentally modified. It now behaves like "match" (immediate execution of the matching branch) but without the backtracking mechanism in case of failure. - New "tryif t then u else v" tactical which executes "u" in case of success of "t" and "v" in case of failure. - New conversion tactic "native_compute": evaluates the goal (or an hypothesis) with a call-by-value strategy, using the OCaml native compiler. Useful on very intensive computations. - New "cbn" tactic, a well-behaved simpl. - Repeated identical calls to omega should now produce identical proof terms. - Tactics btauto, a reflexive Boolean tautology solver. - Tactic "tauto" was exceptionally able to destruct other connectives than the binary connectives "and", "or", "prod", "sum", "iff". This non-uniform behavior has been fixed (bug #2680) and tauto is slightly weaker (possible source of incompatibilities). On the opposite side, new tactic "dtauto" is able to destruct any record-like inductive types, superseding the old version of "tauto". - Similarly, "intuition" has been made more uniform and, where it now fails, "dintuition" can be used (possible source of incompatibilities). - New option "Unset Intuition Negation Unfolding" for deactivating automatic unfolding of "not" in intuition. - Tactic notations can now be defined locally to a module (use "Local" prefix). - Tactic "red" now reduces head beta-iota redexes (potential source of rare incompatibilities). - Tactic "hnf" now reduces inner beta-iota redexes (potential source of rare incompatibilities). - Tactic "intro H" now reduces beta-iota redexes if these hide a product (potential source of rare incompatibilities). - In Ltac matching on patterns of the form "_ pat1 ... patn" now behaves like if matching on "?X pat1 ... patn", i.e. accepting "_" to be instantiated by an applicative term (experimental at this stage, potential source of incompatibilities). - In Ltac matching on goal, types of hypotheses are now interpreted in the %type scope (possible source of incompatibilities). - "change ... in ..." and "simpl ... in ..." now properly consider nested occurrences (possible source of incompatibilities since this alters the numbering of occurrences), but do not support nested occurrences. - Tactics simpl, vm_compute and native_compute can be given a notation string to a constant as argument. - When given a reference as argument, simpl, vm_compute and native_compute now strictly interpret it as the head of a pattern starting with this reference. - The "change p with c" tactic semantics changed, now type checking "c" at each matching occurrence "t" of the pattern "p", and converting "t" with "c". - Now "appcontext" and "context" behave the same. The old buggy behavior of "context" can be retrieved at parse time by setting the "Tactic Compat Context" flag (possible source of incompatibilities). - New introduction pattern p/c which applies lemma c on the fly on the hypothesis under consideration before continuing with introduction pattern p. - New introduction pattern [= x1 .. xn] applies "injection as [x1 .. xn]" on the fly if injection is applicable to the hypothesis under consideration (idea borrowed from Georges Gonthier). Introduction pattern [=] applies "discriminate" if a discriminable equality. - New introduction patterns * and ** to respectively introduce all forthcoming dependent variables and all variables/hypotheses dependent or not. - Tactic "injection c as ipats" now clears c if c refers to an hypothesis and moves the resulting equations in the hypotheses independently of the number of ipats, which has itself to be less than the number of new hypotheses (possible source of incompatibilities; former behavior obtainable by "Unset Injection L2R Pattern Order"). - Tactic "injection" now automatically simplifies subgoals "existT n p = existT n p'" into "p = p'" when "n" is in an inductive type for which a decidable equality scheme has been generated with "Scheme Equality" (possible source of incompatibilities). - New tactic "rewrite_strat" for generalized rewriting with user-defined strategies, subsuming autorewrite. - Injection can now also deduce equality of arguments of sort Prop, by using the option "Set Injection On Proofs" (disabled by default). Also improved the error messages. - Tactic "subst id" now supports id occurring in dependent local definitions. - Bugs fixed about intro-pattern "*" might lead to some rare incompatibilities. - New tactical "time" to display time spent executing its argument. - Tactics referring or using a constant dependent in a section variable which has been cleared or renamed in the current goal context now fail (possible source of incompatibilities solvable by avoiding clearing the relevant hypotheses). - New construct "uconstr:c" and "type_term c" to build untyped terms. - Binders in terms defined in Ltac (either "constr" or "uconstr") can now take their names from identifiers defined in Ltac. As a consequence, a name cannot be used in a binder "constr:(fun x => ...)" if an Ltac variable of that name already exists and does not contain an identifier. Source of occasional incompatibilities. - The "refine" tactic now accepts untyped terms built with "uconstr" so that terms with holes can be constructed piecewise in Ltac. - New bullets --, ++, **, ---, +++, ***, ... made available. - More informative messages when wrong bullet is used. - Bullet suggestion when a subgoal is solved. - New tactic "enough", symmetric to "assert", but with subgoals swapped, as a more friendly replacement of "cut". - In destruct/induction, experimental modifier "!" prefixing the hypothesis name to tell not erasing the hypothesis. - Bug fixes in "inversion as" may occasionally lead to incompatibilities. - Behavior of introduction patterns -> and <- made more uniform (hypothesis is cleared, rewrite in hypotheses and conclusion and erasing the variable when rewriting a variable). - New experimental option "Set Standard Proposition Elimination Names" so that case analysis or induction on schemes in Type containing propositions now produces "H"-based names. - Tactics from plugins are now active only when the corresponding module is imported (source of incompatibilities, solvable by adding an "Import"; in the particular case of Omega, use "Require Import OmegaTactic"). - Semantics of destruct/induction has been made more regular in some edge cases, possibly leading to incompatibilities: + new goals are now opened when the term does not match a subterm of the goal and has unresolved holes, while in 8.4 these holes were turned into existential variables + when no "at" option is given, the historical semantics which selects all subterms syntactically identical to the first subterm matching the given pattern is used + non-dependent destruct/induction on an hypothesis with premises in an inductive type with indices is fixed + residual local definitions are now correctly removed. - The rename tactic may now replace variables in parallel. - A new "Info" command replaces the "info" tactical discontinued in v8.4. It still gives informative results in many cases. - The "info_auto" tactic is known to be broken and does not print a trace anymore. Use "Info 1 auto" instead. The same goes for "info_trivial". On the other hand "info_eauto" still works fine, while "Info 1 eauto" prints a trivial trace. - When using a lemma of the prototypical form "forall A, {a:A & P a}", "apply" and "apply in" do not instantiate anymore "A" with the current goal and use "a" as the proof, as they were sometimes doing, now considering that it is a too powerful decision. Program - "Solve Obligations using" changed to "Solve Obligations with", consistent with "Proof with". - Program Lemma, Definition now respect automatic introduction. - Program Lemma, Definition, etc.. now interpret "->" like Lemma and Definition as a non-dependent arrow (potential source of incompatibility). - Add/document "Set Hide Obligations" (to hide obligations in the final term inside an implicit argument) and "Set Shrink Obligations" (to minimize dependencies of obligations defined by tactics). Notations - The syntax "x -> y" is now declared at level 99. In particular, it has now a lower priority than "<->": "A -> B <-> C" is now "A -> (B <-> C)" (possible source of incompatibilities) - Notations accept term-providing tactics using the $(...)$ syntax. - "Bind Scope" can no longer bind "Funclass" and "Sortclass". - A notation can be given a (compat "8.x") annotation, making it behave like a "only parsing" notation, but the annotation may lead to eventually issue warnings or errors in further versions when this notation is used. - More systematic insertion of spaces as a default for printing notations ("format" still available to override the default). - In notations, a level modifier referring to a non-existent variable is now considered an error rather than silently ignored. Tools - Option -I now only adds directories to the ml path. - Option -Q behaves as -R, except that the logical path of any loaded file has to be fully qualified. - Option -R no longer adds recursively to the ml path; only the root directory is added. (Behavior with respect to the load path is unchanged.) - Option -nois prevents coq/theories and coq/plugins to be recursively added to the load path. (Same behavior as with coq/user-contrib.) - coqdep accepts a -dumpgraph option generating a dot file. - Makefiles generated through coq_makefile have three new targets "quick" "checkproofs" and "vio2vo", allowing respectively to asynchronously compile the files without playing the proof scripts, asynchronously checking that the quickly generated proofs are correct and generating the object files from the quickly generated proofs. - The XML plugin was discontinued and removed from the source. - A new utility called coqworkmgr can be used to limit the number of concurrent workers started by independent processes, like make and CoqIDE. This is of interest for users of the par: goal selector. Interfaces - CoqIDE supports asynchronous edition of the document, ongoing tasks and errors are reported in the bottom right window. The number of workers taking care of processing proofs can be selected with -async-proofs-j. - CoqIDE highlights in yellow "unsafe" commands such as axiom declarations, and tactics like "give_up". - CoqIDE supports Proof General like key bindings; to activate the PG mode go to Edit -> Preferences -> Editor. For the documentation see Help -> Help for PG mode. - CoqIDE automatically retracts the locked area when one edits the locked text. - CoqIDE search and replace got regular expressions power. See the documentation of OCaml's Str module for the supported syntax. - Many CoqIDE windows, including the query one, are now detachable to improve usability on multi screen work stations. - Coqtop/coqc outputs highlighted syntax. Colors can be configured thanks to the COQ_COLORS environment variable, and their current state can be displayed with the -list-tags command line option. - Third party user interfaces can install their main loop in $COQLIB/toploop and call coqtop with the -toploop flag to select it. Internal Infrastructure - Many reorganizations in the ocaml source files. For instance, many internal a.s.t. of Coq are now placed in mli files in a new directory intf/, for instance constrexpr.mli or glob_term.mli. More details in dev/doc/changes. - The file states/initial.coq does not exist anymore. Instead, coqtop initially does a "Require" of Prelude.vo (or nothing when given the options -noinit or -nois). - The format of vo files has slightly changed: cf final comments in checker/cic.mli. - The build system does not produce anymore programs named coqtop.opt and a symbolic link to coqtop. Instead, coqtop is now directly an executable compiled with the best OCaml compiler available. The bytecode program coqtop.byte is still produced. Same for other utilities. - Some options of the ./configure script slightly changed: * The -coqrunbyteflags and its blank-separated argument is replaced by option -vmbyteflags which expects a comma-separated argument. * The -coqtoolsbyteflags option is discontinued, see -no-custom instead. Miscellaneous - ML plugins now require a "DECLARE PLUGIN \"foo\"" statement. The "foo" name must be exactly the name of the ML module that will be loaded through a "Declare ML \"foo\"" command. Details of changes in 8.5beta2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Logic - The VM now supports inductive types with up to 8388851 non-constant constructors and up to 8388607 constant ones. Specification language - Syntax "$(tactic)$" changed to "ltac: tactic". Tactics - A script using the admit tactic can no longer be concluded by either Qed or Defined. In the first case, Admitted can be used instead. In the second case, a subproof should be used. - The easy tactic and the now tactical now have a more predictable behavior, but they might now discharge some previously unsolved goals. Extraction - Definitions extracted to Haskell GHC should no longer randomly segfault when some Coq types cannot be represented by Haskell types. - Definitions can now be extracted to Json for post-processing. Tools - Option -I -as has been removed, and option -R -as has been deprecated. In both cases, option -R can be used instead. - coq_makefile now generates double-colon rules for rules such as clean. API - The interface of [change] has changed to take a [change_arg], which can be built from a [constr] using [make_change_arg]. Details of changes in 8.5beta3 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Commands - New command "Redirect" to redirect the output of a command to a file. - New command "Undelimit Scope" to remove the delimiter of a scope. - New option "Strict Universe Declaration", set by default. It enforces the declaration of all polymorphic universes appearing in a definition when introducing it. - New command "Show id" to show goal named id. - Option "Virtual Machine" removed. Tactics - New flag "Regular Subst Tactic" which fixes "subst" in situations where it failed to substitute all substitutable equations or failed to simplify cycles, or accidentally unfolded local definitions (flag is off by default). - New flag "Loose Hint Behavior" to handle hints loaded but not imported in a special way. It accepts three distinct flags: * "Lax", which is the default one, sets the old behavior, i.e. a non-imported hint behaves the same as an imported one. * "Warn" outputs a warning when a non-imported hint is used. Note that this is an over-approximation, because a hint may be triggered by an eauto run that will eventually fail and backtrack. * "Strict" changes the behavior of an unloaded hint to the one of the fail tactic, allowing to emulate the hopefully future import-scoped hint mechanism. - New compatibility flag "Universal Lemma Under Conjunction" which let tactics working under conjunctions apply sublemmas of the form "forall A, ... -> A". - New compatibility flag "Bracketing Last Introduction Pattern" which can be set so that the last disjunctive-conjunctive introduction pattern given to "intros" automatically complete the introduction of its subcomponents, as the the disjunctive-conjunctive introduction patterns in non-terminal position already do. - New flag "Shrink Abstract" that minimalizes proofs generated by the abstract tactical w.r.t. variables appearing in the body of the proof. Program - The "Shrink Obligations" flag now applies to all obligations, not only those solved by the automatic tactic. - Importing Program no longer overrides the "exists" tactic (potential source of incompatibilities). - Hints costs are now correctly taken into account (potential source of incompatibilities). - Documented the Hint Cut command that allows control of the proof search during typeclass resolution (see reference manual). API - Some functions from pretyping/typing.ml and their derivatives were potential source of evarmap leaks, as they dropped their resulting evarmap. The situation was clarified by renaming them according to a ``unsafe_*`` scheme. Their sound variant is likewise renamed to their old name. The following renamings were made. * ``Typing.type_of`` -> ``unsafe_type_of`` * ``Typing.e_type_of`` -> ``type_of`` * A new ``e_type_of`` function that matches the ``e_`` prefix policy * ``Tacmach.pf_type_of`` -> ``pf_unsafe_type_of`` * A new safe ``pf_type_of`` function. All uses of ``unsafe_*`` functions should be eventually eliminated. Tools - Added an option -w to control the output of coqtop warnings. - Configure now takes an optional -native-compiler (yes|no) flag replacing -no-native-compiler. The new flag is set to no by default under Windows. - Flag -no-native-compiler was removed and became the default for coqc. If precompilation of files for native conversion test is desired, use -native-compiler. - The -compile command-line option now takes the full path of the considered file, including the ".v" extension, and outputs a warning if such an extension is lacking. - The -require and -load-vernac-object command-line options now take a logical path of a given library rather than a physical path, thus they behave like Require [Import] path. - The -vm command-line option has been removed. Standard Library - There is now a Coq.Compat.Coq84 library, which sets the various compatibility options and does a few redefinitions to make Coq behave more like Coq v8.4. The standard way of putting Coq in v8.4 compatibility mode is to pass the command line flags "-require Coq.Compat.Coq84 -compat 8.4". Details of changes in 8.5 ~~~~~~~~~~~~~~~~~~~~~~~~~ Tools - Flag "-compat 8.4" now loads Coq.Compat.Coq84. The standard way of putting Coq in v8.4 compatibility mode is to pass the command line flag "-compat 8.4". It can be followed by "-require Coq.Compat.AdmitAxiom" if the 8.4 behavior of admit is needed, in which case it uses an axiom. Specification language - Syntax "$(tactic)$" changed to "ltac:(tactic)". Tactics - Syntax "destruct !hyp" changed to "destruct (hyp)", and similarly for induction (rare source of incompatibilities easily solvable by removing parentheses around "hyp" when not for the purpose of keeping the hypothesis). - Syntax "p/c" for on-the-fly application of a lemma c before introducing along pattern p changed to p%c1..%cn. The feature and syntax are in experimental stage. - "Proof using" does not clear unused section variables. - Tactic "refine" has been changed back to the 8.4 behavior of shelving subgoals that occur in other subgoals. The "refine" tactic of 8.5beta3 has been renamed "simple refine"; it does not shelve any subgoal. - New tactical "unshelve tac" which grab existential variables put on the tactic shelve by the execution of "tac". Details of changes in 8.5pl1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Critical bugfix - The subterm relation for the guard condition was incorrectly defined on primitive projections (#4588) Plugin development tools - add a .merlin target to the makefile Various performance improvements (time, space used by .vo files) Other bugfixes - Fix order of arguments to Big.compare_case in ExtrOcamlZBigInt.v - Added compatibility coercions from Specif.v which were present in Coq 8.4. - Fixing a source of inefficiency and an artificial dependency in the printer in the congruence tactic. - Allow to unset the refinement mode of Instance in ML - Fixing an incorrect use of prod_appvect on a term which was not a product in setoid_rewrite. - Add -compat 8.4 econstructor tactics, and tests - Add compatibility Nonrecursive Elimination Schemes - Fixing the "No applicable tactic" uninformative error message regression on apply. - Univs: fix get_current_context (bug #4603, part I) - Fix a bug in Program coercion code - Fix handling of arity of definitional classes. - #4630: Some tactics are 20x slower in 8.5 than 8.4. - #4627: records with no declared arity can be template polymorphic. - #4623: set tactic too weak with universes (regression) - Fix incorrect behavior of CS resolution - #4591: Uncaught exception in directory browsing. - CoqIDE is more resilient to initialization errors. - #4614: "Fully check the document" is uninterruptible. - Try eta-expansion of records only on non-recursive ones - Fix bug when a sort is ascribed to a Record - Primitive projections: protect kernel from erroneous definitions. - Fixed bug #4533 with previous Keyed Unification commit - Win: kill unreliable hence do not waitpid after kill -9 (Close #4369) - Fix strategy of Keyed Unification - #4608: Anomaly "output_value: abstract value (outside heap)". - #4607: do not read native code files if native compiler was disabled. - #4105: poor escaping in the protocol between CoqIDE and coqtop. - #4596: [rewrite] broke in the past few weeks. - #4533 (partial): respect declared global transparency of projections in unification.ml - #4544: Backtrack on using full betaiota reduction during keyed unification. - #4540: CoqIDE bottom progress bar does not update. - Fix regression from 8.4 in reflexivity - #4580: [Set Refine Instance Mode] also used for Program Instance. - #4582: cannot override notation [ x ]. MAY CREATE INCOMPATIBILITIES, see #4683. - STM: Print/Extraction have to be skipped if -quick - #4542: CoqIDE: STOP button also stops workers - STM: classify some variants of Instance as regular `` `Fork `` nodes. - #4574: Anomaly: Uncaught exception Invalid_argument("splay_arity"). - Do not give a name to anonymous evars anymore. See bug #4547. - STM: always stock in vio files the first node (state) of a proof - STM: not delegate proofs that contain Vernac(Module|Require|Import), #4530 - Don't fail fatally if PATH is not set. - #4537: Coq 8.5 is slower in typeclass resolution. - #4522: Incorrect "Warning..." on windows. - #4373: coqdep does not know about .vio files. - #3826: "Incompatible module types" is uninformative. - #4495: Failed assertion in metasyntax.ml. - #4511: evar tactic can create non-typed evars. - #4503: mixing universe polymorphic and monomorphic variables and definitions in sections is unsupported. - #4519: oops, global shadowed local universe level bindings. - #4506: Anomaly: File "pretyping/indrec.ml", line 169, characters 14-20: Assertion failed. - #4548: CoqIDE crashes when going back one command Details of changes in 8.5pl2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Critical bugfix - Checksums of .vo files dependencies were not correctly checked. - Unicode-to-ASCII translation was not injective, leading in a soundness bug in the native compiler. Other bugfixes - #4097: more efficient occur-check in presence of primitive projections - #4398: type_scope used consistently in "match goal". - #4450: eauto does not work with polymorphic lemmas - #4677: fix alpha-conversion in notations needing eta-expansion. - Fully preserve initial order of hypotheses in "Regular Subst Tactic" mode. - #4644: a regression in unification. - #4725: Function (Error: Conversion test raised an anomaly) and Program (Error: Cannot infer this placeholder of type) - #4747: Problem building Coq 8.5pl1 with OCaml 4.03.0: Fatal warnings - #4752: CoqIDE crash on files not ended by ".v". - #4777: printing inefficiency with implicit arguments - #4818: "Admitted" fails due to undefined universe anomaly after calling "destruct" - #4823: remote counter: avoid thread race on sockets - #4841: -verbose flag changed semantics in 8.5, is much harder to use - #4851: [nsatz] cannot handle duplicated hypotheses - #4858: Anomaly: Uncaught exception Failure("hd"). Please report. in variant of nsatz - #4880: [nsatz_compute] generates invalid certificates if given redundant hypotheses - #4881: synchronizing "Declare Implicit Tactic" with backtrack. - #4882: anomaly with Declare Implicit Tactic on hole of type with evars - Fix use of "Declare Implicit Tactic" in refine. triggered by CoqIDE - #4069, #4718: congruence fails when universes are involved. Universes - Disallow silently dropping universe instances applied to variables (forward compatible) - Allow explicit universe instances on notations, when they can apply to the head reference of their expansion. Build infrastructure - New update on how to find camlp5 binary and library at configure time. Details of changes in 8.5pl3 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Critical bugfix - #4876: Guard checker incompleteness when using primitive projections Other bugfixes - #4780: Induction with universe polymorphism on was creating ill-typed terms. - #4673: regression in setoid_rewrite, unfolding let-ins for type unification. - #4754: Regression in setoid_rewrite, allow postponed unification problems to remain. - #4769: Anomaly with universe polymorphic schemes defined inside sections. - #3886: Program: duplicate obligations of mutual fixpoints. - #4994: Documentation typo. - #5008: Use the "md5" command on OpenBSD. - #5007: Do not assume the "TERM" environment variable is always set. - #4606: Output a break before a list only if there was an empty line. - #5001: metas not cleaned properly in clenv_refine_in. - #2336: incorrect glob data for module symbols (bug #2336). - #4832: Remove extraneous dot in error message. - Anomaly in printing a unification error message. - #4947: Options which take string arguments are not backwards compatible. - #4156: micromega cache files are now hidden files. - #4871: interrupting par:abstract kills coqtop. - #5043: [Admitted] lemmas pick up section variables. - Fix name of internal refine ("simple refine"). - #5062: probably a typo in Strict Proofs mode. - #5065: Anomaly: Not a proof by induction. - Restore native compiler optimizations, they were disabled since 8.5! - #5077: failure on typing a fixpoint with evars in its type. - Fix recursive notation bug. - #5095: irrelevant too strict test in let-in abstraction. - Ensuring that the evar name is preserved by "rename". - #4887: confusion between using and with in documentation of firstorder. - Bug in subst with let-ins. - #4762: eauto weaker than auto. - Remove if_then_else (was buggy). Use tryif instead. - #4970: confusion between special "{" and non-special "{{" in notations. - #4529: primitive projections unfolding. - #4416: Incorrect "Error: Incorrect number of goals". - #4863: abstract in typeclass hint fails. - #5123: unshelve can impact typeclass resolution - Fix a collision about the meta-variable ".." in recursive notations. - Fix printing of info_auto. - #3209: Not_found due to an occur-check cycle. - #5097: status of evars refined by "clear" in ltac: closed wrt evars. - #5150: Missing dependency of the test-suite subsystems in prerequisite. - Fix a bug in error printing of unif constraints - #3941: Do not stop propagation of signals when Coq is busy. - #4822: Incorrect assertion in cbn. - #3479 parsing of "{" and "}" when a keyword starts with "{" or "}". - #5127: Memory corruption with the VM. - #5102: bullets parsing broken by calls to parse_entry. Various documentation improvements Version 8.4 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.4 contains the result of three long-term projects: a new modular library of arithmetic by Pierre Letouzey, a new proof engine by Arnaud Spiwack and a new communication protocol for CoqIDE by Vincent Gross. The new modular library of arithmetic extends, generalizes and unifies the existing libraries on Peano arithmetic (types nat, N and BigN), positive arithmetic (type positive), integer arithmetic (Z and BigZ) and machine word arithmetic (type Int31). It provides with unified notations (e.g. systematic use of add and mul for denoting the addition and multiplication operators), systematic and generic development of operators and properties of these operators for all the types mentioned above, including gcd, pcm, power, square root, base 2 logarithm, division, modulo, bitwise operations, logical shifts, comparisons, iterators, ... The most visible feature of the new proof engine is the support for structured scripts (bullets and proof brackets) but, even if yet not user-available, the new engine also provides the basis for refining existential variables using tactics, for applying tactics to several goals simultaneously, for reordering goals, all features which are planned for the next release. The new proof engine forced Pierre Letouzey to reimplement info and Show Script differently. Before version 8.4, CoqIDE was linked to Coq with the graphical interface living in a separate thread. From version 8.4, CoqIDE is a separate process communicating with Coq through a textual channel. This allows for a more robust interfacing, the ability to interrupt Coq without interrupting the interface, and the ability to manage several sessions in parallel. Relying on the infrastructure work made by Vincent Gross, Pierre Letouzey, Pierre Boutillier and Pierre-Marie Pédrot contributed many various refinements of CoqIDE. Coq 8.4 also comes with a bunch of various smaller-scale changes and improvements regarding the different components of the system. The underlying logic has been extended with η-conversion thanks to Hugo Herbelin, Stéphane Glondu and Benjamin Grégoire. The addition of η-conversion is justified by the confidence that the formulation of the Calculus of Inductive Constructions based on typed equality (such as the one considered in Lee and Werner to build a set-theoretic model of CIC :cite:`LeeWerner11`) is applicable to the concrete implementation of Coq. The underlying logic benefited also from a refinement of the guard condition for fixpoints by Pierre Boutillier, the point being that it is safe to propagate the information about structurally smaller arguments through β-redexes that are blocked by the “match” construction (blocked commutative cuts). Relying on the added permissiveness of the guard condition, Hugo Herbelin could extend the pattern matching compilation algorithm so that matching over a sequence of terms involving dependencies of a term or of the indices of the type of a term in the type of other terms is systematically supported. Regarding the high-level specification language, Pierre Boutillier introduced the ability to give implicit arguments to anonymous functions, Hugo Herbelin introduced the ability to define notations with several binders (e.g. ``exists x y z, P``), Matthieu Sozeau made the typeclass inference mechanism more robust and predictable, Enrico Tassi introduced a command Arguments that generalizes Implicit Arguments and Arguments Scope for assigning various properties to arguments of constants. Various improvements in the type inference algorithm were provided by Matthieu Sozeau and Hugo Herbelin with contributions from Enrico Tassi. Regarding tactics, Hugo Herbelin introduced support for referring to expressions occurring in the goal by pattern in tactics such as set or destruct. Hugo Herbelin also relied on ideas from Chung-Kil Hur’s Heq plugin to introduce automatic computation of occurrences to generalize when using destruct and induction on types with indices. Stéphane Glondu introduced new tactics :tacn:`constr_eq`, :tacn:`is_evar`, and :tacn:`has_evar`, to be used when writing complex tactics. Enrico Tassi added support to fine-tuning the behavior of :tacn:`simpl`. Enrico Tassi added the ability to specify over which variables of a section a lemma has to be exactly generalized. Pierre Letouzey added a tactic timeout and the interruptibility of :tacn:`vm_compute`. Bug fixes and miscellaneous improvements of the tactic language came from Hugo Herbelin, Pierre Letouzey and Matthieu Sozeau. Regarding decision tactics, Loïc Pottier maintained nsatz, moving in particular to a typeclass based reification of goals while Frédéric Besson maintained Micromega, adding in particular support for division. Regarding commands, Stéphane Glondu provided new commands to analyze the structure of type universes. Regarding libraries, a new library about lists of a given length (called vectors) has been provided by Pierre Boutillier. A new instance of finite sets based on Red-Black trees and provided by Andrew Appel has been adapted for the standard library by Pierre Letouzey. In the library of real analysis, Yves Bertot changed the definition of :math:`\pi` and provided a proof of the long-standing fact yet remaining unproved in this library, namely that :math:`sin \frac{\pi}{2} = 1`. Pierre Corbineau maintained the Mathematical Proof Language (C-zar). Bruno Barras and Benjamin Grégoire maintained the call-by-value reduction machines. The extraction mechanism benefited from several improvements provided by Pierre Letouzey. Pierre Letouzey maintained the module system, with contributions from Élie Soubiran. Julien Forest maintained the Function command. Matthieu Sozeau maintained the setoid rewriting mechanism. Coq related tools have been upgraded too. In particular, coq\_makefile has been largely revised by Pierre Boutillier. Also, patches from Adam Chlipala for coqdoc have been integrated by Pierre Boutillier. Bruno Barras and Pierre Letouzey maintained the `coqchk` checker. Pierre Courtieu and Arnaud Spiwack contributed new features for using Coq through Proof General. The Dp plugin has been removed. Use the plugin provided with Why 3 instead (http://why3.lri.fr/). Under the hood, the Coq architecture benefited from improvements in terms of efficiency and robustness, especially regarding universes management and existential variables management, thanks to Pierre Letouzey and Yann Régis-Gianas with contributions from Stéphane Glondu and Matthias Puech. The build system is maintained by Pierre Letouzey with contributions from Stéphane Glondu and Pierre Boutillier. A new backtracking mechanism simplifying the task of external interfaces has been designed by Pierre Letouzey. The general maintenance was done by Pierre Letouzey, Hugo Herbelin, Pierre Boutillier, Matthieu Sozeau and Stéphane Glondu with also specific contributions from Guillaume Melquiond, Julien Narboux and Pierre-Marie Pédrot. Packaging tools were provided by Pierre Letouzey (Windows), Pierre Boutillier (MacOS), Stéphane Glondu (Debian). Releasing, testing and benchmarking support was provided by Jean-Marc Notin. Many suggestions for improvements were motivated by feedback from users, on either the bug tracker or the Coq-Club mailing list. Special thanks are going to the users who contributed patches, starting with Tom Prince. Other patch contributors include Cédric Auger, David Baelde, Dan Grayson, Paolo Herms, Robbert Krebbers, Marc Lasson, Hendrik Tews and Eelis van der Weegen. | Paris, December 2011 | Hugo Herbelin | Potential sources of incompatibilities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The main known incompatibilities between 8.3 and 8.4 are consequences of the following changes: - The reorganization of the library of numbers: Several definitions have new names or are defined in modules of different names, but a special care has been taken to have this renaming transparent for the user thanks to compatibility notations. However some definitions have changed, what might require some adaptations. The most noticeable examples are: + The "?=" notation which now bind to Pos.compare rather than former Pcompare (now Pos.compare_cont). + Changes in names may induce different automatically generated names in proof scripts (e.g. when issuing "destruct Z_le_gt_dec"). + Z.add has a new definition, hence, applying "simpl" on subterms of its body might give different results than before. + BigN.shiftl and BigN.shiftr have reversed arguments order, the power function in BigN now takes two BigN. - Other changes in libraries: + The definition of functions over "vectors" (list of fixed length) have changed. + TheoryList.v has been removed. - Slight changes in tactics: + Less unfolding of fixpoints when applying destruct or inversion on a fixpoint hiding an inductive type (add an extra call to simpl to preserve compatibility). + Less unexpected local definitions when applying "destruct" (incompatibilities solvable by adapting name hypotheses). + Tactic "apply" might succeed more often, e.g. by now solving pattern-matching of the form ?f x y = g(x,y) (compatibility ensured by using "Unset Tactic Pattern Unification"), but also because it supports (full) betaiota (using "simple apply" might then help). + Tactic autorewrite does no longer instantiate pre-existing existential variables. + Tactic "info" is now available only for auto, eauto and trivial. - Miscellaneous changes: + The command "Load" is now atomic for backtracking (use "Unset Atomic Load" for compatibility). Details of changes in 8.4beta ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Logic - Standard eta-conversion now supported (dependent product only). - Guard condition improvement: subterm property is propagated through beta-redex blocked by pattern-matching, as in "(match v with C .. => fun x => u end) x"; this allows for instance to use "rewrite ... in ..." without breaking the guard condition. Specification language and notations - Maximal implicit arguments can now be set locally by { }. The registration traverses fixpoints and lambdas. Because there is conversion in types, maximal implicit arguments are not taken into account in partial applications (use eta expanded form with explicit { } instead). - Added support for recursive notations with binders (allows for instance to write "exists x y z, P"). - Structure/Record printing can be disable by "Unset Printing Records". In addition, it can be controlled on type by type basis using "Add Printing Record" or "Add Printing Constructor". - Pattern-matching compilation algorithm: in "match x, y with ... end", possible dependencies of x (or of the indices of its type) in the type of y are now taken into account. Tactics - New proof engine. - Scripts can now be structured thanks to bullets - * + and to subgoal delimitation via { }. Note: for use with Proof General, a cvs version of Proof General no older than mid-July 2011 is currently required. - Support for tactical "info" is suspended. - Support for command "Show Script" is suspended. - New tactics constr_eq, is_evar and has_evar for use in Ltac (DOC TODO). - Removed the two-argument variant of "decide equality". - New experimental tactical "timeout ". Since is a time in second for the moment, this feature should rather be avoided in scripts meant to be machine-independent. - Fix in "destruct": removal of unexpected local definitions in context might result in some rare incompatibilities (solvable by adapting name hypotheses). - Introduction pattern "_" made more robust. - Tactic (and Eval command) vm_compute can now be interrupted via Ctrl-C. - Unification in "apply" supports unification of patterns of the form ?f x y = g(x,y) (compatibility ensured by using "Unset Tactic Pattern Unification"). It also supports (full) betaiota. - Tactic autorewrite does no longer instantiate pre-existing existential variables (theoretical source of possible incompatibilities). - Tactic "dependent rewrite" now supports equality in "sig". - Tactic omega now understands Zpred (wish #1912) and can prove any goal from a context containing an arithmetical contradiction (wish #2236). - Using "auto with nocore" disables the use of the "core" database (wish #2188). This pseudo-database "nocore" can also be used with trivial and eauto. - Tactics "set", "destruct" and "induction" accepts incomplete terms and use the goal to complete the pattern assuming it is unambiguous. - When used on arguments with a dependent type, tactics such as "destruct", "induction", "case", "elim", etc. now try to abstract automatically the dependencies over the arguments of the types (based on initial ideas from Chung-Kil Hur, extension to nested dependencies suggested by Dan Grayson) - Tactic "injection" now failing on an equality showing no constructors while it was formerly generalizing again the goal over the given equality. - In Ltac, the "context [...]" syntax has now a variant "appcontext [...]" allowing to match partial applications in larger applications. - When applying destruct or inversion on a fixpoint hiding an inductive type, recursive calls to the fixpoint now remain folded by default (rare source of incompatibility generally solvable by adding a call to simpl). - In an ltac pattern containing a "match", a final "| _ => _" branch could be used now instead of enumerating all remaining constructors. Moreover, the pattern "match _ with _ => _ end" now allows to match any "match". A "in" annotation can also be added to restrict to a precise inductive type. - The behavior of "simpl" can be tuned using the "Arguments" vernacular. In particular constants can be marked so that they are always/never unfolded by "simpl", or unfolded only when a set of arguments evaluates to a constructor. Last one can mark a constant so that it is unfolded only if the simplified term does not expose a match in head position. Commands - It is now mandatory to have a space (or tabulation or newline or end-of-file) after a "." ending a sentence. - In SearchAbout, the [ ] delimiters are now optional. - New command "Add/Remove Search Blacklist ...": a Search or SearchAbout or similar query will never mention lemmas whose qualified names contain any of the declared substrings. The default blacklisted substrings are ``_subproof``, ``Private_``. - When the output file of "Print Universes" ends in ".dot" or ".gv", the universe graph is printed in the DOT language, and can be processed by Graphviz tools. - New command "Print Sorted Universes". - The undocumented and obsolete option "Set/Unset Boxed Definitions" has been removed, as well as syntaxes like "Boxed Fixpoint foo". - A new option "Set Default Timeout n / Unset Default Timeout". - Qed now uses information from the reduction tactics used in proof script to avoid conversion at Qed time to go into a very long computation. - New command "Show Goal ident" to display the statement of a goal, even a closed one (available from Proof General). - Command "Proof" accept a new modifier "using" to force generalization over a given list of section variables at section ending (DOC TODO). - New command "Arguments" generalizing "Implicit Arguments" and "Arguments Scope" and that also allows to rename the parameters of a definition and to tune the behavior of the tactic "simpl". Module System - During subtyping checks, an opaque constant in a module type could now be implemented by anything of the right type, even if bodies differ. Said otherwise, with respect to subtyping, an opaque constant behaves just as a parameter. Coqchk was already implementing this, but not coqtop. - The inlining done during application of functors can now be controlled more precisely, by the annotations (no inline) or (inline at level XX). With the latter annotation, only functor parameters whose levels are lower or equal than XX will be inlined. The level of a parameter can be fixed by "Parameter Inline(30) foo". When levels aren't given, the default value is 100. One can also use the flag "Set Inline Level ..." to set a level (DOC TODO). - Print Assumptions should now handle correctly opaque modules (#2168). - Print Module (Type) now tries to print more details, such as types and bodies of the module elements. Note that Print Module Type could be used on a module to display only its interface. The option "Set Short Module Printing" could be used to switch back to the earlier behavior were only field names were displayed. Libraries - Extension of the abstract part of Numbers, which now provide axiomatizations and results about many more integer functions, such as pow, gcd, lcm, sqrt, log2 and bitwise functions. These functions are implemented for nat, N, BigN, Z, BigZ. See in particular file NPeano for new functions about nat. - The definition of types positive, N, Z is now in file BinNums.v - Major reorganization of ZArith. The initial file ZArith/BinInt.v now contains an internal module Z implementing the Numbers interface for integers. This module Z regroups: * all functions over type Z : Z.add, Z.mul, ... * the minimal proofs of specifications for these functions : Z.add_0_l, ... * an instantiation of all derived properties proved generically in Numbers : Z.add_comm, Z.add_assoc, ... A large part of ZArith is now simply compatibility notations, for instance Zplus_comm is an alias for Z.add_comm. The direct use of module Z is now recommended instead of relying on these compatibility notations. - Similar major reorganization of NArith, via a module N in NArith/BinNat.v - Concerning the positive datatype, BinPos.v is now in a specific directory PArith, and contains an internal submodule Pos. We regroup there functions such as Pos.add Pos.mul etc as well as many results about them. These results are here proved directly (no Number interface for strictly positive numbers). - Note that in spite of the compatibility layers, all these reorganizations may induce some marginal incompatibilies in scripts. In particular: * the "?=" notation for positive now refers to a binary function Pos.compare, instead of the infamous ternary Pcompare (now Pos.compare_cont). * some hypothesis names generated by the system may changed (typically for a "destruct Z_le_gt_dec") since naming is done after the short name of the head predicate (here now "le" in module Z instead of "Zle", etc). * the internals of Z.add has changed, now relying of Z.pos_sub. - Also note these new notations: * "= XP SP1. - The communication between CoqIDE and coqtop is now done via a dialect of XML (DOC TODO). - The backtrack engine of CoqIDE has been reworked, it now uses the "Backtrack" command similarly to Proof General. - The CoqIDE parsing of sentences has be reworked and now supports tactic delimitation via { }. - CoqIDE now accepts the Abort command (wish #2357). - CoqIDE can read coq_makefile files as "project file" and use it to set automatically options to send to coqtop. - Preference files have moved to $XDG_CONFIG_HOME/coq and accelerators are not stored as a list anymore. Tools - Coq now searches directories specified in COQPATH, $XDG_DATA_HOME/coq, $XDG_DATA_DIRS/coq, and user-contribs before the standard library. - Coq rc file has moved to $XDG_CONFIG_HOME/coq. - Major changes to coq_makefile: * mli/mlpack/mllib taken into account, ml not preproccessed anymore, ml4 work; * mlihtml generates doc of mli, install-doc install the html doc in DOCDIR with the same policy as vo in COQLIB; * More variables are given by coqtop -config, others are defined only if the users doesn't have defined them elsewhere. Consequently, generated makefile should work directly on any architecture; * Packagers can take advantage of $(DSTROOT) introduction. Installation can be made in $XDG_DATA_HOME/coq; * -arg option allows to send option as argument to coqc. Details of changes in 8.4beta2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Commands - Commands "Back" and "BackTo" are now handling the proof states. They may perform some extra steps of backtrack to avoid states where the proof state is unavailable (typically a closed proof). - The commands "Suspend" and "Resume" have been removed. - A basic Show Script has been reintroduced (no indentation). - New command "Set Parsing Explicit" for deactivating parsing (and printing) of implicit arguments (useful for teaching). - New command "Grab Existential Variables" to transform the unresolved evars at the end of a proof into goals. Tactics - Still no general "info" tactical, but new specific tactics info_auto, info_eauto, info_trivial which provides information on the proofs found by auto/eauto/trivial. Display of these details could also be activated by "Set Info Auto"/"Set Info Eauto"/"Set Info Trivial". - Details on everything tried by auto/eauto/trivial during a proof search could be obtained by "debug auto", "debug eauto", "debug trivial" or by a global "Set Debug Auto"/"Set Debug Eauto"/"Set Debug Trivial". - New command "r string" in Ltac debugger that interprets "idtac string" in Ltac code as a breakpoint and jumps to its next use. - Tactics from the Dp plugin (simplify, ergo, yices, cvc3, z3, cvcl, harvey, zenon, gwhy) have been removed, since Why2 has not been maintained for the last few years. The Why3 plugin should be a suitable replacement in most cases. Libraries - MSetRBT: a new implementation of MSets via Red-Black trees (initial contribution by Andrew Appel). - MSetAVL: for maximal sharing with the new MSetRBT, the argument order of Node has changed (this should be transparent to regular MSets users). Module System - The names of modules (and module types) are now in a fully separated namespace from ordinary definitions: "Definition E:=0. Module E. End E." is now accepted. CoqIDE - CoqIDE now supports the "Restart" command, and "Undo" (with a warning). Better support for "Abort". Details of changes in 8.4 ~~~~~~~~~~~~~~~~~~~~~~~~~ Commands - The "Reset" command is now supported again in files given to coqc or Load. - "Show Script" now indents again the displayed scripts. It can also work correctly across Load'ed files if the option "Unset Atomic Load" is used. - "Open Scope" can now be given the delimiter (e.g. Z) instead of the full scope name (e.g. Z_scope). Notations - Most compatibility notations of the standard library are now tagged as (compat xyz), where xyz is a former Coq version, for instance "8.3". These notations behave as (only parsing) notations, except that they may triggers warnings (or errors) when used while Coq is not in a corresponding -compat mode. - To activate these compatibility warnings, use "Set Verbose Compat Notations" or the command-line flag -verbose-compat-notations. - For a strict mode without these compatibility notations, use "Unset Compat Notations" or the command-line flag -no-compat-notations. Tactics - An annotation "eqn:H" or "eqn:?" can be added to a "destruct" or "induction" to make it generate equations in the spirit of "case_eq". The former syntax "_eqn" is discontinued. - The name of the hypothesis introduced by tactic "remember" can be set via the new syntax "remember t as x eqn:H" (wish #2489). Libraries - Reals: changed definition of PI, no more axiom about sin(PI/2). - SetoidPermutation: a notion of permutation for lists modulo a setoid equality. - BigN: fixed the ocaml code doing the parsing/printing of big numbers. - List: a couple of lemmas added especially about no-duplication, partitions. - Init: Removal of the coercions between variants of sigma-types and subset types (possible source of incompatibility). Version 8.3 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.3 is before all a transition version with refinements or extensions of the existing features and libraries and a new tactic nsatz based on Hilbert’s Nullstellensatz for deciding systems of equations over rings. With respect to libraries, the main evolutions are due to Pierre Letouzey with a rewriting of the library of finite sets FSets and a new round of evolutions in the modular development of arithmetic (library Numbers). The reason for making FSets evolve is that the computational and logical contents were quite intertwined in the original implementation, leading in some cases to longer computations than expected and this problem is solved in the new MSets implementation. As for the modular arithmetic library, it was only dealing with the basic arithmetic operators in the former version and its current extension adds the standard theory of the division, min and max functions, all made available for free to any implementation of :math:`\mathbb{N}`, :math:`\mathbb{Z}` or :math:`\mathbb{Z}/n\mathbb{Z}`. The main other evolutions of the library are due to Hugo Herbelin who made a revision of the sorting library (including a certified merge-sort) and to Guillaume Melquiond who slightly revised and cleaned up the library of reals. The module system evolved significantly. Besides the resolution of some efficiency issues and a more flexible construction of module types, Élie Soubiran brought a new model of name equivalence, the :math:`\Delta`-equivalence, which respects as much as possible the names given by the users. He also designed with Pierre Letouzey a new, convenient operator ``<+`` for nesting functor application that provides a light notation for inheriting the properties of cascading modules. The new tactic nsatz is due to Loïc Pottier. It works by computing Gröbner bases. Regarding the existing tactics, various improvements have been done by Matthieu Sozeau, Hugo Herbelin and Pierre Letouzey. Matthieu Sozeau extended and refined the typeclasses and Program features (the Russell language). Pierre Letouzey maintained and improved the extraction mechanism. Bruno Barras and Élie Soubiran maintained the Coq checker, Julien Forest maintained the Function mechanism for reasoning over recursively defined functions. Matthieu Sozeau, Hugo Herbelin and Jean-Marc Notin maintained coqdoc. Frédéric Besson maintained the Micromega platform for deciding systems of inequalities. Pierre Courtieu maintained the support for the Proof General Emacs interface. Claude Marché maintained the plugin for calling external provers (dp). Yves Bertot made some improvements to the libraries of lists and integers. Matthias Puech improved the search functions. Guillaume Melquiond usefully contributed here and there. Yann Régis-Gianas grounded the support for Unicode on a more standard and more robust basis. Though invisible from outside, Arnaud Spiwack improved the general process of management of existential variables. Pierre Letouzey and Stéphane Glondu improved the compilation scheme of the Coq archive. Vincent Gross provided support to CoqIDE. Jean-Marc Notin provided support for benchmarking and archiving. Many users helped by reporting problems, providing patches, suggesting improvements or making useful comments, either on the bug tracker or on the Coq-Club mailing list. This includes but not exhaustively Cédric Auger, Arthur Charguéraud, François Garillot, Georges Gonthier, Robin Green, Stéphane Lescuyer, Eelis van der Weegen, ... Though not directly related to the implementation, special thanks are going to Yves Bertot, Pierre Castéran, Adam Chlipala, and Benjamin Pierce for the excellent teaching materials they provided. | Paris, April 2010 | Hugo Herbelin | Details of changes ~~~~~~~~~~~~~~~~~~ Rewriting tactics - Tactic "rewrite" now supports rewriting on ad hoc equalities such as eq_true. - "Hint Rewrite" now checks that the lemma looks like an equation. - New tactic "etransitivity". - Support for heterogeneous equality (JMeq) in "injection" and "discriminate". - Tactic "subst" now supports heterogeneous equality and equality proofs that are dependent (use "simple subst" for preserving compatibility). - Added support for Leibniz-rewriting of dependent hypotheses. - Renamed "Morphism" into "Proper" and "respect" into "proper_prf" (possible source of incompatibility). A partial fix is to define "Notation Morphism R f := (Proper (R%signature) f)." - New tactic variants "rewrite* by" and "autorewrite*" that rewrite respectively the first and all matches whose side-conditions are solved. - "Require Import Setoid" does not export all of "Morphisms" and "RelationClasses" anymore (possible source of incompatibility, fixed by importing "Morphisms" too). - Support added for using Chung-Kil Hur's Heq library for rewriting over heterogeneous equality (courtesy of the library's author). - Tactic "replace" supports matching terms with holes. Automation tactics - Tactic ``intuition`` now preserves inner ``iff`` and ``not`` (exceptional source of incompatibilities solvable by redefining ``intuition`` as ``unfold iff, not in *; intuition``, or, for iff only, by using ``Set Intuition Iff Unfolding``.) - Tactic ``tauto`` now proves classical tautologies as soon as classical logic (i.e. library ``Classical_Prop`` or ``Classical``) is loaded. - Tactic ``gappa`` has been removed from the Dp plugin. - Tactic ``firstorder`` now supports the combination of its ``using`` and ``with`` options. - New ``Hint Resolve ->`` (or ``<-``) for declaring iff's as oriented hints (wish #2104). - An inductive type as argument of the ``using`` option of ``auto`` / ``eauto`` / ``firstorder`` is interpreted as using the collection of its constructors. - New decision tactic "nsatz" to prove polynomial equations by computation of Groebner bases. Other tactics - Tactic "discriminate" now performs intros before trying to discriminate an hypothesis of the goal (previously it applied intro only if the goal had the form t1<>t2) (exceptional source of incompatibilities - former behavior can be obtained by "Unset Discriminate Introduction"). - Tactic "quote" now supports quotation of arbitrary terms (not just the goal). - Tactic "idtac" now displays its "list" arguments. - New introduction patterns "*" for introducing the next block of dependent variables and "**" for introducing all quantified variables and hypotheses. - Pattern Unification for existential variables activated in tactics and new option "Unset Tactic Evars Pattern Unification" to deactivate it. - Resolution of canonical structure is now part of the tactic's unification algorithm. - New tactic "decide lemma with hyp" for rewriting decidability lemmas when one knows which side is true. - Improved support of dependent goals over objects in dependent types for "destruct" (rare source of incompatibility that can be avoided by unsetting option "Dependent Propositions Elimination"). - Tactic "exists", "eexists", "destruct" and "edestruct" supports iteration using comma-separated arguments. - Tactic names "case" and "elim" now support clauses "as" and "in" and become then synonymous of "destruct" and "induction" respectively. - A new tactic name "exfalso" for the use of 'ex-falso quodlibet' principle. This tactic is simply a shortcut for "elimtype False". - Made quantified hypotheses get the name they would have if introduced in the context (possible but rare source of incompatibilities). - When applying a component of a conjunctive lemma, "apply in" (and sequences of "apply in") now leave the side conditions of the lemmas uniformly after the main goal (possible source of rare incompatibilities). - In "simpl c" and "change c with d", c can be a pattern. - Tactic "revert" now preserves let-in's making it the exact inverse of "intro". - New tactics "clear dependent H" and "revert dependent H" that clears (resp. reverts) H and all the hypotheses that depend on H. - Ltac's pattern-matching now supports matching metavariables that depend on variables bound upwards in the pattern. Tactic definitions - Ltac definitions support Local option for non-export outside modules. - Support for parsing non-empty lists with separators in tactic notations. - New command "Locate Ltac" to get the full name of an Ltac definition. Notations - Record syntax ``{|x=...; y=...|}`` now works inside patterns too. - Abbreviations from non-imported module now invisible at printing time. - Abbreviations now use implicit arguments and arguments scopes for printing. - Abbreviations to pure names now strictly behave like the name they refer to (make redirections of qualified names easier). - Abbreviations for applied constant now propagate the implicit arguments and arguments scope of the underlying reference (possible source of incompatibilities generally solvable by changing such abbreviations from e.g. ``Notation foo' := (foo x)`` to ``Notation foo' y := (foo x (y:=y))``). - The "where" clause now supports multiple notations per defined object. - Recursive notations automatically expand one step on the left for better factorization; recursion notations inner separators now ensured being tokens. - Added "Reserved Infix" as a specific shortcut of the corresponding "Reserved Notation". - Open/Close Scope command supports Global option in sections. Specification language - New support for local binders in the syntax of Record/Structure fields. - Fixpoint/CoFixpoint now support building part or all of bodies using tactics. - Binders given before ":" in lemmas and in definitions built by tactics are now automatically introduced (possible source of incompatibility that can be resolved by invoking "Unset Automatic Introduction"). - New support for multiple implicit arguments signatures per reference. Module system - Include Type is now deprecated since Include now accepts both modules and module types. - Declare ML Module supports Local option. - The sharing between non-logical object and the management of the name-space has been improved by the new "Delta-equivalence" on qualified name. - The include operator has been extended to high-order structures - Sequences of Include can be abbreviated via new syntax "<+". - A module (or module type) can be given several "<:" signatures. - Interactive proofs are now permitted in module type. Functors can hence be declared as Module Type and be used later to type themselves. - A functor application can be prefixed by a "!" to make it ignore any "Inline" annotation in the type of its argument(s) (for examples of use of the new features, see libraries Structures and Numbers). - Coercions are now active only when modules are imported (use "Set Automatic Coercions Import" to get the behavior of the previous versions of Coq). Extraction - When using (Recursive) Extraction Library, the filenames are directly the Coq ones with new appropriate extensions : we do not force anymore uncapital first letters for Ocaml and capital ones for Haskell. - The extraction now tries harder to avoid code transformations that can be dangerous for the complexity. In particular many eta-expansions at the top of functions body are now avoided, clever partial applications will likely be preserved, let-ins are almost always kept, etc. - In the same spirit, auto-inlining is now disabled by default, except for induction principles, since this feature was producing more frequently weird code than clear gain. The previous behavior can be restored via "Set Extraction AutoInline". - Unicode characters in identifiers are now transformed into ascii strings that are legal in Ocaml and other languages. - Harsh support of module extraction to Haskell and Scheme: module hierarchy is flattened, module abbreviations and functor applications are expanded, module types and unapplied functors are discarded. - Less unsupported situations when extracting modules to Ocaml. In particular module parameters might be alpha-renamed if a name clash is detected. - Extract Inductive is now possible toward non-inductive types (e.g. nat => int) - Extraction Implicit: this new experimental command allows to mark some arguments of a function or constructor for removed during extraction, even if these arguments don't fit the usual elimination principles of extraction, for instance the length n of a vector. - Files ExtrOcaml*.v in plugins/extraction try to provide a library of common extraction commands: mapping of basics types toward Ocaml's counterparts, conversions from/to int and big_int, or even complete mapping of nat,Z,N to int or big_int, or mapping of ascii to char and string to char list (in this case recognition of ascii constants is hard-wired in the extraction). Program - Streamlined definitions using well-founded recursion and measures so that they can work on any subset of the arguments directly (uses currying). - Try to automatically clear structural fixpoint prototypes in obligations to avoid issues with opacity. - Use return type clause inference in pattern-matching as in the standard typing algorithm. - Support [Local Obligation Tactic] and [Next Obligation with tactic]. - Use [Show Obligation Tactic] to print the current default tactic. - [fst] and [snd] have maximal implicit arguments in Program now (possible source of incompatibility). Type classes - Declaring axiomatic type class instances in Module Type should be now done via new command "Declare Instance", while the syntax "Instance" now always provides a concrete instance, both in and out of Module Type. - Use [Existing Class foo] to declare a preexisting object [foo] as a class. [foo] can be an inductive type or a constant definition. No projections or instances are defined. - Various bug fixes and improvements: support for defined fields, anonymous instances, declarations giving terms, better handling of sections and [Context]. Commands - New command "Timeout ." interprets a command and a timeout interrupts the execution after seconds. - New command "Compute ." is a shortcut for "Eval vm_compute in ". - New command "Fail ." interprets a command and is successful iff the command fails on an error (but not an anomaly). Handy for tests and illustration of wrong commands. - Most commands referring to constant (e.g. Print or About) now support referring to the constant by a notation string. - New option "Boolean Equality Schemes" to make generation of boolean equality automatic for datatypes (together with option "Decidable Equality Schemes", this replaces deprecated option "Equality Scheme"). - Made support for automatic generation of case analysis schemes available to user (governed by option "Set Case Analysis Schemes"). - New command :n:`{? Global } Generalizable {| All | No } {| Variable | Variables } {* @ident}` to declare which identifiers are generalizable in `` `{} `` and `` `() `` binders. - New command "Print Opaque Dependencies" to display opaque constants in addition to all variables, parameters or axioms a theorem or definition relies on. - New command "Declare Reduction := ", allowing to write later "Eval in ...". This command accepts a Local variant. - Syntax of Implicit Type now supports more than one block of variables of a given type. - Command "Canonical Structure" now warns when it has no effects. - Commands of the form "Set X" or "Unset X" now support "Local" and "Global" prefixes. Library - Use "standard" Coq names for the properties of eq and identity (e.g. refl_equal is now eq_refl). Support for compatibility is provided. - The function Compare_dec.nat_compare is now defined directly, instead of relying on lt_eq_lt_dec. The earlier version is still available under the name nat_compare_alt. - Lemmas in library Relations and Reals have been homogenized a bit. - The implicit argument of Logic.eq is now maximally inserted, allowing to simply write "eq" instead of "@eq _" in morphism signatures. - Wrongly named lemmas (Zlt_gt_succ and Zlt_succ_gt) fixed (potential source of incompatibilities) - List library: + Definitions of list, length and app are now in Init/Datatypes. Support for compatibility is provided. + Definition of Permutation is now in Sorting/Permtation.v + Some other light revisions and extensions (possible source of incompatibilities solvable by qualifying names accordingly). - In ListSet, set_map has been fixed (source of incompatibilities if used). - Sorting library: + new mergesort of worst-case complexity O(n*ln(n)) made available in Mergesort.v; + former notion of permutation up to setoid from Permutation.v is deprecated and moved to PermutSetoid.v; + heapsort from Heap.v of worst-case complexity O(n*n) is deprecated; + new file Sorted.v for some definitions of being sorted. - Structure library. This new library is meant to contain generic structures such as types with equalities or orders, either in Module version (for now) or Type Classes (still to do): + DecidableType.v and OrderedType.v: initial notions for FSets/FMaps, left for compatibility but considered as deprecated. + Equalities.v and Orders.v: evolutions of the previous files, with fine-grain Module architecture, many variants, use of Equivalence and other relevant Type Classes notions. + OrdersTac.v: a generic tactic for solving chains of (in)equalities over variables. See {Nat,N,Z,P}OrderedType.v for concrete instances. + GenericMinMax.v: any ordered type can be equipped with min and max. We derived here all the generic properties of these functions. - MSets library: an important evolution of the FSets library. "MSets" stands for Modular (Finite) Sets, by contrast with a forthcoming library of Class (Finite) Sets contributed by S. Lescuyer which will be integrated with the next release of Coq. The main features of MSets are: + The use of Equivalence, Proper and other Type Classes features easing the handling of setoid equalities. + The interfaces are now stated in iff-style. Old specifications are now derived properties. + The compare functions are now pure, and return a "comparison" value. Thanks to the CompSpec inductive type, reasoning on them remains easy. + Sets structures requiring invariants (i.e. sorted lists) are built first as "Raw" sets (pure objects and separate proofs) and attached with their proofs thanks to a generic functor. "Raw" sets have now a proper interface and can be manipulated directly. Note: No Maps yet in MSets. The FSets library is still provided for compatibility, but will probably be considered as deprecated in the next release of Coq. - Numbers library: + The abstract layer (NatInt, Natural/Abstract, Integer/Abstract) has been simplified and enhance thanks to new features of the module system such as Include (see above). It has been extended to Euclidean division (three flavors for integers: Trunc, Floor and Math). + The arbitrary-large efficient numbers (BigN, BigZ, BigQ) has also been reworked. They benefit from the abstract layer improvements (especially for div and mod). Note that some specifications have slightly changed (compare, div, mod, shift{r,l}). Ring/Field should work better (true recognition of constants). Tools - Option -R now supports binding Coq root read-only. - New coqtop/coqc option -beautify to reformat .v files (usable e.g. to globally update notations). - New tool beautify-archive to beautify a full archive of developments. - New coqtop/coqc option -compat X.Y to simulate the general behavior of previous versions of Coq (provides e.g. support for 8.2 compatibility). Coqdoc - List have been revamped. List depth and scope is now determined by an "offside" whitespace rule. - Text may be italicized by placing it in _underscores_. - The "--index " flag changes the filename of the index. - The "--toc-depth " flag limits the depth of headers which are included in the table of contents. - The "--lib-name " flag prints " Foo" instead of "Library Foo" where library titles are called for. The "--no-lib-name" flag eliminates the extra title. - New option "--parse-comments" to allow parsing of regular ``(* *)`` comments. - New option "--plain-comments" to disable interpretation inside comments. - New option "--interpolate" to try and typeset identifiers in Coq escapings using the available globalization information. - New option "--external url root" to refer to external libraries. - Links to section variables and notations now supported. Internal infrastructure - To avoid confusion with the repository of user's contributions, the subdirectory "contrib" has been renamed into "plugins". On platforms supporting ocaml native dynlink, code located there is built as loadable plugins for coqtop. - An experimental build mechanism via ocamlbuild is provided. From the top of the archive, run ./configure as usual, and then ./build. Feedback about this build mechanism is most welcome. Compiling Coq on platforms such as Windows might be simpler this way, but this remains to be tested. - The Makefile system has been simplified and factorized with the ocamlbuild system. In particular "make" takes advantage of .mllib files for building .cma/.cmxa. The .vo files to compile are now listed in several vo.itarget files. Version 8.2 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.2 adds new features, new libraries and improves on many various aspects. Regarding the language of Coq, the main novelty is the introduction by Matthieu Sozeau of a package of commands providing Haskell-style typeclasses. Typeclasses, which come with a few convenient features such as type-based resolution of implicit arguments, play a new landmark role in the architecture of Coq with respect to automation. For instance, thanks to typeclass support, Matthieu Sozeau could implement a new resolution-based version of the tactics dedicated to rewriting on arbitrary transitive relations. Another major improvement of Coq 8.2 is the evolution of the arithmetic libraries and of the tools associated with them. Benjamin Grégoire and Laurent Théry contributed a modular library for building arbitrarily large integers from bounded integers while Evgeny Makarov contributed a modular library of abstract natural and integer arithmetic together with a few convenient tactics. On his side, Pierre Letouzey made numerous extensions to the arithmetic libraries on :math:`\mathbb{Z}` and :math:`\mathbb{Q}`, including extra support for automation in presence of various number-theory concepts. Frédéric Besson contributed a reflective tactic based on Krivine-Stengle Positivstellensatz (the easy way) for validating provability of systems of inequalities. The platform is flexible enough to support the validation of any algorithm able to produce a “certificate” for the Positivstellensatz and this covers the case of Fourier-Motzkin (for linear systems in :math:`\mathbb{Q}` and :math:`\mathbb{R}`), Fourier-Motzkin with cutting planes (for linear systems in :math:`\mathbb{Z}`) and sum-of-squares (for non-linear systems). Evgeny Makarov made the platform generic over arbitrary ordered rings. Arnaud Spiwack developed a library of 31-bits machine integers and, relying on Benjamin Grégoire and Laurent Théry’s library, delivered a library of unbounded integers in base :math:`2^{31}`. As importantly, he developed a notion of “retro-knowledge” so as to safely extend the kernel-located bytecode-based efficient evaluation algorithm of Coq version 8.1 to use 31-bits machine arithmetic for efficiently computing with the library of integers he developed. Beside the libraries, various improvements were contributed to provide a more comfortable end-user language and more expressive tactic language. Hugo Herbelin and Matthieu Sozeau improved the pattern matching compilation algorithm (detection of impossible clauses in pattern matching, automatic inference of the return type). Hugo Herbelin, Pierre Letouzey and Matthieu Sozeau contributed various new convenient syntactic constructs and new tactics or tactic features: more inference of redundant information, better unification, better support for proof or definition by fixpoint, more expressive rewriting tactics, better support for meta-variables, more convenient notations... Élie Soubiran improved the module system, adding new features (such as an “include” command) and making it more flexible and more general. He and Pierre Letouzey improved the support for modules in the extraction mechanism. Matthieu Sozeau extended the Russell language, ending in an convenient way to write programs of given specifications, Pierre Corbineau extended the Mathematical Proof Language and the automation tools that accompany it, Pierre Letouzey supervised and extended various parts of the standard library, Stéphane Glondu contributed a few tactics and improvements, Jean-Marc Notin provided help in debugging, general maintenance and coqdoc support, Vincent Siles contributed extensions of the Scheme command and of injection. Bruno Barras implemented the ``coqchk`` tool: this is a stand-alone type checker that can be used to certify .vo files. Especially, as this verifier runs in a separate process, it is granted not to be “hijacked” by virtually malicious extensions added to Coq. Yves Bertot, Jean-Christophe Filliâtre, Pierre Courtieu and Julien Forest acted as maintainers of features they implemented in previous versions of Coq. Julien Narboux contributed to CoqIDE. Nicolas Tabareau made the adaptation of the interface of the old “setoid rewrite” tactic to the new version. Lionel Mamane worked on the interaction between Coq and its external interfaces. With Samuel Mimram, he also helped making Coq compatible with recent software tools. Russell O’Connor, Cezary Kaliszyk, Milad Niqui contributed to improve the libraries of integers, rational, and real numbers. We also thank many users and partners for suggestions and feedback, in particular Pierre Castéran and Arthur Charguéraud, the INRIA Marelle team, Georges Gonthier and the INRIA-Microsoft Mathematical Components team, the Foundations group at Radboud university in Nijmegen, reporters of bugs and participants to the Coq-Club mailing list. | Palaiseau, June 2008 | Hugo Herbelin | Details of changes ~~~~~~~~~~~~~~~~~~ Language - If a fixpoint is not written with an explicit { struct ... }, then all arguments are tried successively (from left to right) until one is found that satisfies the structural decreasing condition. - New experimental typeclass system giving ad-hoc polymorphism and overloading based on dependent records and implicit arguments. - New syntax "let 'pat := b in c" for let-binding using irrefutable patterns. - New syntax "forall {A}, T" for specifying maximally inserted implicit arguments in terms. - Sort of Record/Structure, Inductive and CoInductive defaults to Type if omitted. - (Co)Inductive types can be defined as records (e.g. "CoInductive stream := { hd : nat; tl : stream }.") - New syntax "Theorem id1:t1 ... with idn:tn" for proving mutually dependent statements. - Support for sort-polymorphism on constants denoting inductive types. - Several evolutions of the module system (handling of module aliases, functorial module types, an Include feature, etc). - Prop now a subtype of Set (predicative and impredicative forms). - Recursive inductive types in Prop with a single constructor of which all arguments are in Prop is now considered to be a singleton type. It consequently supports all eliminations to Prop, Set and Type. As a consequence, Acc_rect has now a more direct proof [possible source of easily fixed incompatibility in case of manual definition of a recursor in a recursive singleton inductive type]. Commands - Added option Global to "Arguments Scope" for section surviving. - Added option "Unset Elimination Schemes" to deactivate the automatic generation of elimination schemes. - Modification of the Scheme command so you can ask for the name to be automatically computed (e.g. Scheme Induction for nat Sort Set). - New command "Combined Scheme" to build combined mutual induction principles from existing mutual induction principles. - New command "Scheme Equality" to build a decidable (boolean) equality for simple inductive datatypes and a decision property over this equality (e.g. Scheme Equality for nat). - Added option "Set Equality Scheme" to make automatic the declaration of the boolean equality when possible. - Source of universe inconsistencies now printed when option "Set Printing Universes" is activated. - New option "Set Printing Existential Instances" for making the display of existential variable instances explicit. - Support for option "[id1 ... idn]", and "-[id1 ... idn]", for the "compute"/"cbv" reduction strategy, respectively meaning reduce only, or everything but, the constants id1 ... idn. "lazy" alone or followed by "[id1 ... idn]", and "-[id1 ... idn]" also supported, meaning apply all of beta-iota-zeta-delta, possibly restricting delta. - New command "Strategy" to control the expansion of constants during conversion tests. It generalizes commands Opaque and Transparent by introducing a range of levels. Lower levels are assigned to constants that should be expanded first. - New options Global and Local to Opaque and Transparent. - New command "Print Assumptions" to display all variables, parameters or axioms a theorem or definition relies on. - "Add Rec LoadPath" now provides references to libraries using partially qualified names (this holds also for coqtop/coqc option -R). - SearchAbout supports negated search criteria, reference to logical objects by their notation, and more generally search of subterms. - "Declare ML Module" now allows to import .cmxs files when Coq is compiled in native code with a version of OCaml that supports native Dynlink (>= 3.11). - Specific sort constraints on Record now taken into account. - "Print LoadPath" supports a path argument to filter the display. Libraries - Several parts of the libraries are now in Type, in particular FSets, SetoidList, ListSet, Sorting, Zmisc. This may induce a few incompatibilities. In case of trouble while fixing existing development, it may help to simply declare Set as an alias for Type (see file SetIsType). - New arithmetical library in theories/Numbers. It contains: * an abstract modular development of natural and integer arithmetics in Numbers/Natural/Abstract and Numbers/Integer/Abstract * an implementation of efficient computational bounded and unbounded integers that can be mapped to processor native arithmetics. See Numbers/Cyclic/Int31 for 31-bit integers and Numbers/Natural/BigN for unbounded natural numbers and Numbers/Integer/BigZ for unbounded integers. * some proofs that both older libraries Arith, ZArith and NArith and newer BigN and BigZ implement the abstract modular development. This allows in particular BigN and BigZ to already come with a large database of basic lemmas and some generic tactics (ring), This library has still an experimental status, as well as the processor-acceleration mechanism, but both its abstract and its concrete parts are already quite usable and could challenge the use of nat, N and Z in actual developments. Moreover, an extension of this framework to rational numbers is ongoing, and an efficient Q structure is already provided (see Numbers/Rational/BigQ), but this part is currently incomplete (no abstract layer and generic lemmas). - Many changes in FSets/FMaps. In practice, compatibility with earlier version should be fairly good, but some adaptations may be required. * Interfaces of unordered ("weak") and ordered sets have been factorized thanks to new features of Coq modules (in particular Include), see FSetInterface. Same for maps. Hints in these interfaces have been reworked (they are now placed in a "set" database). * To allow full subtyping between weak and ordered sets, a field "eq_dec" has been added to OrderedType. The old version of OrderedType is now called MiniOrderedType and functor MOT_to_OT allow to convert to the new version. The interfaces and implementations of sets now contain also such a "eq_dec" field. * FSetDecide, contributed by Aaron Bohannon, contains a decision procedure allowing to solve basic set-related goals (for instance, is a point in a particular set ?). See FSetProperties for examples. * Functors of properties have been improved, especially the ones about maps, that now propose some induction principles. Some properties of fold need less hypothesis. * More uniformity in implementations of sets and maps: they all use implicit arguments, and no longer export unnecessary scopes (see bug #1347) * Internal parts of the implementations based on AVL have evolved a lot. The main files FSetAVL and FMapAVL are now much more lightweight now. In particular, minor changes in some functions has allowed to fully separate the proofs of operational correctness from the proofs of well-balancing: well-balancing is critical for efficiency, but not anymore for proving that these trees implement our interfaces, hence we have moved these proofs into appendix files FSetFullAVL and FMapFullAVL. Moreover, a few functions like union and compare have been modified in order to be structural yet efficient. The appendix files also contains alternative versions of these few functions, much closer to the initial Ocaml code and written via the Function framework. - Library IntMap, subsumed by FSets/FMaps, has been removed from Coq Standard Library and moved into a user contribution Cachan/IntMap - Better computational behavior of some constants (eq_nat_dec and le_lt_dec more efficient, Z_lt_le_dec and Positive_as_OT.compare transparent, ...) (exceptional source of incompatibilities). - Boolean operators moved from module Bool to module Datatypes (may need to rename qualified references in script and force notations || and && to be at levels 50 and 40 respectively). - The constructors xI and xO of type positive now have postfix notations "~1" and "~0", allowing to write numbers in binary form easily, for instance 6 is 1~1~0 and 4*p is p~0~0 (see BinPos.v). - Improvements to NArith (Nminus, Nmin, Nmax), and to QArith (in particular a better power function). - Changes in ZArith: several additional lemmas (used in theories/Numbers), especially in Zdiv, Znumtheory, Zpower. Moreover, many results in Zdiv have been generalized: the divisor may simply be non-null instead of strictly positive (see lemmas with name ending by "_full"). An alternative file ZOdiv proposes a different behavior (the one of Ocaml) when dividing by negative numbers. - Changes in Arith: EqNat and Wf_nat now exported from Arith, some constructions on nat that were outside Arith are now in (e.g. iter_nat). - In SetoidList, eqlistA now expresses that two lists have similar elements at the same position, while the predicate previously called eqlistA is now equivlistA (this one only states that the lists contain the same elements, nothing more). - Changes in Reals: * Most statement in "sigT" (including the completeness axiom) are now in "sig" (in case of incompatibility, use proj1_sig instead of projT1, sig instead of sigT, etc). * More uniform naming scheme (identifiers in French moved to English, consistent use of 0 -- zero -- instead of O -- letter O --, etc). * Lemma on prod_f_SO is now on prod_f_R0. * Useless hypothesis of ln_exists1 dropped. * New Rlogic.v states a few logical properties about R axioms. * RIneq.v extended and made cleaner. - Slight restructuration of the Logic library regarding choice and classical logic. Addition of files providing intuitionistic axiomatizations of descriptions: Epsilon.v, Description.v and IndefiniteDescription.v. - Definition of pred and minus made compatible with the structural decreasing criterion for use in fixpoints. - Files Relations/Rstar.v and Relations/Newman.v moved out to the user contribution repository (contribution CoC_History). New lemmas about transitive closure added and some bound variables renamed (exceptional risk of incompatibilities). - Syntax for binders in terms (e.g. for "exists") supports anonymous names. Notations, coercions, implicit arguments and type inference - More automation in the inference of the return clause of dependent pattern-matching problems. - Experimental allowance for omission of the clauses easily detectable as impossible in pattern-matching problems. - Improved inference of implicit arguments. - New options "Set Maximal Implicit Insertion", "Set Reversible Pattern Implicit", "Set Strongly Strict Implicit" and "Set Printing Implicit Defensive" for controlling inference and use of implicit arguments. - New modifier in "Implicit Arguments" to force an implicit argument to be maximally inserted. - New modifier of "Implicit Arguments" to enrich the set of implicit arguments. - New options Global and Local to "Implicit Arguments" for section surviving or non-export outside module. - Level "constr" moved from 9 to 8. - Structure/Record now printed as Record (unless option Printing All is set). - Support for parametric notations defining constants. - Insertion of coercions below product types refrains to unfold constants (possible source of incompatibility). - New support for fix/cofix in notations. Tactic Language - Second-order pattern-matching now working in Ltac "match" clauses (syntax for second-order unification variable is "@?X"). - Support for matching on let bindings in match context using syntax "H := body" or "H := body : type". - Ltac accepts integer arguments (syntax is "ltac:nnn" for nnn an integer). - The general sequence tactical "expr_0 ; [ expr_1 | ... | expr_n ]" is extended so that at most one expr_i may have the form "expr .." or just "..". Also, n can be different from the number of subgoals generated by expr_0. In this case, the value of expr (or idtac in case of just "..") is applied to the intermediate subgoals to make the number of tactics equal to the number of subgoals. - A name used as the name of the parameter of a lemma (like f in "apply f_equal with (f:=t)") is now interpreted as a ltac variable if such a variable exists (this is a possible source of incompatibility and it can be fixed by renaming the variables of a ltac function into names that do not clash with the lemmas parameter names used in the tactic). - New syntax "Ltac tac ::= ..." to rebind a tactic to a new expression. - "let rec ... in ... " now supported for expressions without explicit parameters; interpretation is lazy to the contrary of "let ... in ..."; hence, the "rec" keyword can be used to turn the argument of a "let ... in ..." into a lazy one. - Patterns for hypotheses types in "match goal" are now interpreted in type_scope. - A bound variable whose name is not used elsewhere now serves as metavariable in "match" and it gets instantiated by an identifier (allow e.g. to extract the name of a statement like "exists x, P x"). - New printing of Ltac call trace for better debugging. Tactics - New tactics "apply -> term", "apply <- term", "apply -> term in ident", "apply <- term in ident" for applying equivalences (iff). - Slight improvement of the hnf and simpl tactics when applied on expressions with explicit occurrences of match or fix. - New tactics "eapply in", "erewrite", "erewrite in". - New tactics "ediscriminate", "einjection", "esimplify_eq". - Tactics "discriminate", "injection", "simplify_eq" now support any term as argument. Clause "with" is also supported. - Unfoldable references can be given by notation's string rather than by name in unfold. - The "with" arguments are now typed using informations from the current goal: allows support for coercions and more inference of implicit arguments. - Application of "f_equal"-style lemmas works better. - Tactics elim, case, destruct and induction now support variants eelim, ecase, edestruct and einduction. - Tactics destruct and induction now support the "with" option and the "in" clause option. If the option "in" is used, an equality is added to remember the term to which the induction or case analysis applied (possible source of parsing incompatibilities when destruct or induction is part of a let-in expression in Ltac; extra parentheses are then required). - New support for "as" clause in tactics "apply in" and "eapply in". - Some new intro patterns: * intro pattern "?A" genererates a fresh name based on A. Caveat about a slight loss of compatibility: Some intro patterns don't need space between them. In particular intros ?a?b used to be legal and equivalent to intros ? a ? b. Now it is still legal but equivalent to intros ?a ?b. * intro pattern "(A & ... & Y & Z)" synonym to "(A,....,(Y,Z)))))" for right-associative constructs like /\ or exists. - Several syntax extensions concerning "rewrite": * "rewrite A,B,C" can be used to rewrite A, then B, then C. These rewrites occur only on the first subgoal: in particular, side-conditions of the "rewrite A" are not concerned by the "rewrite B,C". * "rewrite A by tac" allows to apply tac on all side-conditions generated by the "rewrite A". * "rewrite A at n" allows to select occurrences to rewrite: rewrite only happen at the n-th exact occurrence of the first successful matching of A in the goal. * "rewrite 3 A" or "rewrite 3!A" is equivalent to "rewrite A,A,A". * "rewrite !A" means rewriting A as long as possible (and at least once). * "rewrite 3?A" means rewriting A at most three times. * "rewrite ?A" means rewriting A as long as possible (possibly never). * many of the above extensions can be combined with each other. - Introduction patterns better respect the structure of context in presence of missing or extra names in nested disjunction-conjunction patterns [possible source of rare incompatibilities]. - New syntax "rename a into b, c into d" for "rename a into b; rename c into d" - New tactics "dependent induction/destruction H [ generalizing id_1 .. id_n ]" to do induction-inversion on instantiated inductive families à la BasicElim. - Tactics "apply" and "apply in" now able to reason modulo unfolding of constants (possible source of incompatibility in situations where apply may fail, e.g. as argument of a try or a repeat and in a ltac function); versions that do not unfold are renamed into "simple apply" and "simple apply in" (usable for compatibility or for automation). - Tactics "apply" and "apply in" now able to traverse conjunctions and to select the first matching lemma among the components of the conjunction; tactic "apply" also able to apply lemmas of conclusion an empty type. - Tactic "apply" now supports application of several lemmas in a row. - Tactics "set" and "pose" can set functions using notation "(f x1..xn := c)". - New tactic "instantiate" (without argument). - Tactic firstorder "with" and "using" options have their meaning swapped for consistency with auto/eauto (source of incompatibility). - Tactic "generalize" now supports "at" options to specify occurrences and "as" options to name the quantified hypotheses. - New tactic "specialize H with a" or "specialize (H a)" allows to transform in-place a universally-quantified hypothesis (H : forall x, T x) into its instantiated form (H : T a). Nota: "specialize" was in fact there in earlier versions of Coq, but was undocumented, and had a slightly different behavior. - New tactic "contradict H" can be used to solve any kind of goal as long as the user can provide afterwards a proof of the negation of the hypothesis H. If H is already a negation, say ~T, then a proof of T is asked. If the current goal is a negation, say ~U, then U is saved in H afterwards, hence this new tactic "contradict" extends earlier tactic "swap", which is now obsolete. - Tactics f_equal is now done in ML instead of Ltac: it now works on any equality of functions, regardless of the arity of the function. - New options "before id", "at top", "at bottom" for tactics "move"/"intro". - Some more debug of reflexive omega (``romega``), and internal clarifications. Moreover, romega now has a variant ``romega with *`` that can be also used on non-Z goals (nat, N, positive) via a call to a translation tactic named zify (its purpose is to Z-ify your goal...). This zify may also be used independently of romega. - Tactic "remember" now supports an "in" clause to remember only selected occurrences of a term. - Tactic "pose proof" supports name overriding in case of specialization of an hypothesis. - Semi-decision tactic "jp" for first-order intuitionistic logic moved to user contributions (subsumed by "firstorder"). Program - Moved useful tactics in theories/Program and documented them. - Add Program.Basics which contains standard definitions for functional programming (id, apply, flip...) - More robust obligation handling, dependent pattern-matching and well-founded definitions. - New syntax " dest term as pat in term " for destructing objects using an irrefutable pattern while keeping equalities (use this instead of "let" in Programs). - Program CoFixpoint is accepted, Program Fixpoint uses the new way to infer which argument decreases structurally. - Program Lemma, Axiom etc... now permit to have obligations in the statement iff they can be automatically solved by the default tactic. - Renamed "Obligations Tactic" command to "Obligation Tactic". - New command "Preterm [ of id ]" to see the actual term fed to Coq for debugging purposes. - New option "Transparent Obligations" to control the declaration of obligations as transparent or opaque. All obligations are now transparent by default, otherwise the system declares them opaque if possible. - Changed the notations "left" and "right" to "in_left" and "in_right" to hide the proofs in standard disjunctions, to avoid breaking existing scripts when importing Program. Also, put them in program_scope. Type Classes - New "Class", "Instance" and "Program Instance" commands to define classes and instances documented in the reference manual. - New binding construct " [ Class_1 param_1 .. param_n, Class_2 ... ] " for binding type classes, usable everywhere. - New command " Print Classes " and " Print Instances some_class " to print tables for typeclasses. - New default eauto hint database "typeclass_instances" used by the default typeclass instance search tactic. - New theories directory "theories/Classes" for standard typeclasses declarations. Module Classes.RelationClasses is a typeclass port of Relation_Definitions plus a generic development of algebra on n-ary heterogeneous predicates. Setoid rewriting - Complete (and still experimental) rewrite of the tactic based on typeclasses. The old interface and semantics are almost entirely respected, except: + Import Setoid is now mandatory to be able to call setoid_replace and declare morphisms. + "-->", "++>" and "==>" are now right associative notations declared at level 55 in scope signature_scope. Their introduction may break existing scripts that defined them as notations with different levels. + One needs to use [Typeclasses unfold [cst]] if [cst] is used as an abbreviation hiding products in types of morphisms, e.g. if ones redefines [relation] and declares morphisms whose type mentions [relation]. + The [setoid_rewrite]'s semantics change when rewriting with a lemma: it can rewrite two different instantiations of the lemma at once. Use [setoid_rewrite H at 1] for (almost) the usual semantics. [setoid_rewrite] will also try to rewrite under binders now, and can succeed on different terms than before. In particular, it will unify under let-bound variables. When called through [rewrite], the semantics are unchanged though. + [Add Morphism term : id] has different semantics when used with parametric morphism: it will try to find a relation on the parameters too. The behavior has also changed with respect to default relations: the most recently declared Setoid/Relation will be used, the documentation explains how to customize this behavior. + Parametric Relation and Morphism are declared differently, using the new [Add Parametric] commands, documented in the manual. + Setoid_Theory is now an alias to Equivalence, scripts building objects of type Setoid_Theory need to unfold (or "red") the definitions of Reflexive, Symmetric and Transitive in order to get the same goals as before. Scripts which introduced variables explicitly will not break. + The order of subgoals when doing [setoid_rewrite] with side-conditions is always the same: first the new goal, then the conditions. - New standard library modules ``Classes.Morphisms`` declares standard morphisms on ``refl`` / ``sym`` / ``trans`` relations. ``Classes.Morphisms_Prop`` declares morphisms on propositional connectives and ``Classes.Morphisms_Relations`` on generalized predicate connectives. ``Classes.Equivalence`` declares notations and tactics related to equivalences and ``Classes.SetoidTactics`` defines the setoid_replace tactics and some support for the ``Add *`` interface, notably the tactic applied automatically before each ``Add Morphism`` proof. - User-defined subrelations are supported, as well as higher-order morphisms and rewriting under binders. The tactic is also extensible entirely in Ltac. The documentation has been updated to cover these features. - [setoid_rewrite] and [rewrite] now support the [at] modifier to select occurrences to rewrite, and both use the [setoid_rewrite] code, even when rewriting with leibniz equality if occurrences are specified. Extraction - Improved behavior of the Caml extraction of modules: name clashes should not happen anymore. - The command Extract Inductive has now a syntax for infix notations. This allows in particular to map Coq lists and pairs onto OCaml ones: + Extract Inductive list => list [ "[]" "(::)" ]. + Extract Inductive prod => "(*)" [ "(,)" ]. - In pattern matchings, a default pattern "| _ -> ..." is now used whenever possible if several branches are identical. For instance, functions corresponding to decidability of equalities are now linear instead of quadratic. - A new instruction Extraction Blacklist id1 .. idn allows to prevent filename conflits with existing code, for instance when extracting module List to Ocaml. CoqIDE - CoqIDE font defaults to monospace so as indentation to be meaningful. - CoqIDE supports nested goals and any other kind of declaration in the middle of a proof. - Undoing non-tactic commands in CoqIDE works faster. - New CoqIDE menu for activating display of various implicit informations. - Added the possibility to choose the location of tabs in coqide: (in Edit->Preferences->Misc) - New Open and Save As dialogs in CoqIDE which filter ``*.v`` files. Tools - New stand-alone .vo files verifier "coqchk". - Extended -I coqtop/coqc option to specify a logical dir: "-I dir -as coqdir". - New coqtop/coqc option -exclude-dir to exclude subdirs for option -R. - The binary "parser" has been renamed to "coq-parser". - Improved coqdoc and dump of globalization information to give more meta-information on identifiers. All categories of Coq definitions are supported, which makes typesetting trivial in the generated documentation. Support for hyperlinking and indexing developments in the tex output has been implemented as well. Miscellaneous - Coq installation provides enough files so that Ocaml's extensions need not the Coq sources to be compiled (this assumes O'Caml 3.10 and Camlp5). - New commands "Set Whelp Server" and "Set Whelp Getter" to customize the Whelp search tool. - Syntax of "Test Printing Let ref" and "Test Printing If ref" changed into "Test Printing Let for ref" and "Test Printing If for ref". - An overhauled build system (new Makefiles); see dev/doc/build-system.txt. - Add -browser option to configure script. - Build a shared library for the C part of Coq, and use it by default on non-(Windows or MacOS) systems. Bytecode executables are now pure. The behavior is configurable with -coqrunbyteflags, -coqtoolsbyteflags and -custom configure options. - Complexity tests can be skipped by setting the environment variable COQTEST_SKIPCOMPLEXITY. Version 8.1 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8.1 adds various new functionalities. Benjamin Grégoire implemented an alternative algorithm to check the convertibility of terms in the Coq type checker. This alternative algorithm works by compilation to an efficient bytecode that is interpreted in an abstract machine similar to Xavier Leroy’s ZINC machine. Convertibility is performed by comparing the normal forms. This alternative algorithm is specifically interesting for proofs by reflection. More generally, it is convenient in case of intensive computations. Christine Paulin implemented an extension of inductive types allowing recursively non-uniform parameters. Hugo Herbelin implemented sort-polymorphism for inductive types (now called template polymorphism). Claudio Sacerdoti Coen improved the tactics for rewriting on arbitrary compatible equivalence relations. He also generalized rewriting to arbitrary transition systems. Claudio Sacerdoti Coen added new features to the module system. Benjamin Grégoire, Assia Mahboubi and Bruno Barras developed a new, more efficient and more general simplification algorithm for rings and semirings. Laurent Théry and Bruno Barras developed a new, significantly more efficient simplification algorithm for fields. Hugo Herbelin, Pierre Letouzey, Julien Forest, Julien Narboux and Claudio Sacerdoti Coen added new tactic features. Hugo Herbelin implemented matching on disjunctive patterns. New mechanisms made easier the communication between Coq and external provers. Nicolas Ayache and Jean-Christophe Filliâtre implemented connections with the provers cvcl, Simplify and zenon. Hugo Herbelin implemented an experimental protocol for calling external tools from the tactic language. Matthieu Sozeau developed Russell, an experimental language to specify the behavior of programs with subtypes. A mechanism to automatically use some specific tactic to solve unresolved implicit has been implemented by Hugo Herbelin. Laurent Théry’s contribution on strings and Pierre Letouzey and Jean-Christophe Filliâtre’s contribution on finite maps have been integrated to the Coq standard library. Pierre Letouzey developed a library about finite sets “à la Objective Caml”. With Jean-Marc Notin, he extended the library on lists. Pierre Letouzey’s contribution on rational numbers has been integrated and extended. Pierre Corbineau extended his tactic for solving first-order statements. He wrote a reflection-based intuitionistic tautology solver. Pierre Courtieu, Julien Forest and Yves Bertot added extra support to reason on the inductive structure of recursively defined functions. Jean-Marc Notin significantly contributed to the general maintenance of the system. He also took care of ``coqdoc``. Pierre Castéran contributed to the documentation of (co)inductive types and suggested improvements to the libraries. Pierre Corbineau implemented a declarative mathematical proof language, usable in combination with the tactic-based style of proof. Finally, many users suggested improvements of the system through the Coq-Club mailing list and bug-tracker systems, especially user groups from INRIA Rocquencourt, Radboud University, University of Pennsylvania and Yale University. | Palaiseau, July 2006 | Hugo Herbelin | Details of changes in 8.1beta ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Logic - Added sort-polymorphism on inductive families - Allowance for recursively non-uniform parameters in inductive types Syntax - No more support for version 7 syntax and for translation to version 8 syntax. - In fixpoints, the { struct ... } annotation is not mandatory any more when only one of the arguments has an inductive type - Added disjunctive patterns in match-with patterns - Support for primitive interpretation of string literals - Extended support for Unicode ranges Commands - Added "Print Ltac qualid" to print a user defined tactic. - Added "Print Rewrite HintDb" to print the content of a DB used by autorewrite. - Added "Print Canonical Projections". - Added "Example" as synonym of "Definition". - Added "Proposition" and "Corollary" as extra synonyms of "Lemma". - New command "Whelp" to send requests to the Helm database of proofs formalized in the Calculus of Inductive Constructions. - Command "functional induction" has been re-implemented from the new "Function" command. Ltac and tactic syntactic extensions - New primitive "external" for communication with tool external to Coq - New semantics for "match t with": if a clause returns a tactic, it is now applied to the current goal. If it fails, the next clause or next matching subterm is tried (i.e. it behaves as "match goal with" does). The keyword "lazymatch" can be used to delay the evaluation of tactics occurring in matching clauses. - Hint base names can be parametric in auto and trivial. - Occurrence values can be parametric in unfold, pattern, etc. - Added entry constr_may_eval for tactic extensions. - Low-priority term printer made available in ML-written tactic extensions. - "Tactic Notation" extended to allow notations of tacticals. Tactics - New implementation and generalization of ``setoid_*`` (``setoid_rewrite``, ``setoid_symmetry``, ``setoid_transitivity``, ``setoid_reflexivity`` and ``autorewite``). New syntax for declaring relations and morphisms (old syntax still working with minor modifications, but deprecated). - New implementation (still experimental) of the ring tactic with a built-in notion of coefficients and a better usage of setoids. - New conversion tactic "vm_compute": evaluates the goal (or an hypothesis) with a call-by-value strategy, using the compiled version of terms. - When rewriting H where H is not directly a Coq equality, search first H for a registered setoid equality before starting to reduce in H. This is unlikely to break any script. Should this happen nonetheless, one can insert manually some "unfold ... in H" before rewriting. - Fixed various bugs about (setoid) rewrite ... in ... (in particular bug #5941) - "rewrite ... in" now accepts a clause as place where to rewrite instead of just a simple hypothesis name. For instance: ``rewrite H in H1,H2 |- *`` means ``rewrite H in H1; rewrite H in H2; rewrite H`` ``rewrite H in * |-`` will do try ``rewrite H in Hi`` for all hypothesis Hi <> H. - Added "dependent rewrite term" and "dependent rewrite term in hyp". - Added "autorewrite with ... in hyp [using ...]". - Tactic "replace" now accepts a "by" tactic clause. - Added "clear - id" to clear all hypotheses except the ones depending in id. - The argument of Declare Left Step and Declare Right Step is now a term (it used to be a reference). - Omega now handles arbitrary precision integers. - Several bug fixes in Reflexive Omega (romega). - Idtac can now be left implicit in a [...|...] construct: for instance, [ foo | | bar ] stands for [ foo | idtac | bar ]. - Fixed a "fold" bug (noncritical but possible source of incompatibilities). - Added classical_left and classical_right which transforms ``|- A \/ B`` into ``~B |- A`` and ``~A |- B`` respectively. - Added command "Declare Implicit Tactic" to set up a default tactic to be used to solve unresolved subterms of term arguments of tactics. - Better support for coercions to Sortclass in tactics expecting type arguments. - Tactic "assert" now accepts "as" intro patterns and "by" tactic clauses. - New tactic "pose proof" that generalizes "assert (id:=p)" with intro patterns. - New introduction pattern "?" for letting Coq choose a name. - Introduction patterns now support side hypotheses (e.g. intros [|] on "(nat -> nat) -> nat" works). - New introduction patterns "->" and "<-" for immediate rewriting of introduced hypotheses. - Introduction patterns coming after nontrivial introduction patterns now force full introduction of the first pattern (e.g. ``intros [[|] p]`` on ``nat->nat->nat`` now behaves like ``intros [[|?] p]``) - Added "eassumption". - Added option 'using lemmas' to auto, trivial and eauto. - Tactic "congruence" is now complete for its intended scope (ground equalities and inequalities with constructors). Furthermore, it tries to equates goal and hypotheses. - New tactic "rtauto" solves pure propositional logic and gives a reflective version of the available proof. - Numbering of "pattern", "unfold", "simpl", ... occurrences in "match with" made consistent with the printing of the return clause after the term to match in the "match-with" construct (use "Set Printing All" to see hidden occurrences). - Generalization of induction "induction x1...xn using scheme" where scheme is an induction principle with complex predicates (like the ones generated by function induction). - Some small Ltac tactics has been added to the standard library (file Tactics.v): * f_equal : instead of using the different f_equalX lemmas * case_eq : a "case" without loss of information. An equality stating the current situation is generated in every sub-cases. * swap : for a negated goal ~B and a negated hypothesis H:~A, swap H asks you to prove A from hypothesis B * revert : revert H is generalize H; clear H. Extraction - All type parts should now disappear instead of sometimes producing _ (for instance in Map.empty). - Haskell extraction: types of functions are now printed, better unsafeCoerce mechanism, both for hugs and ghc. - Scheme extraction improved, see http://www.pps.jussieu.fr/~letouzey/scheme. - Many bug fixes. Modules - Added "Locate Module qualid" to get the full path of a module. - Module/Declare Module syntax made more uniform. - Added syntactic sugar "Declare Module Export/Import" and "Module Export/Import". - Added syntactic sugar "Module M(Export/Import X Y: T)" and "Module Type M(Export/Import X Y: T)" (only for interactive definitions) - Construct "with" generalized to module paths: T with (Definition|Module) M1.M2....Mn.l := l'. Notations - Option "format" aware of recursive notations. - Added insertion of spaces by default in recursive notations w/o separators. - No more automatic printing box in case of user-provided printing "format". - New notation "exists! x:A, P" for unique existence. - Notations for specific numerals now compatible with generic notations of numerals (e.g. "1" can be used to denote the unit of a group without hiding 1%nat) Libraries - New library on String and Ascii characters (contributed by L. Thery). - New library FSets+FMaps of finite sets and maps. - New library QArith on rational numbers. - Small extension of Zmin.V, new Zmax.v, new Zminmax.v. - Reworking and extension of the files on classical logic and description principles (possible incompatibilities) - Few other improvements in ZArith potentially exceptionally breaking the compatibility (useless hypothesys of Zgt_square_simpl and Zlt_square_simpl removed; fixed names mentioning letter O instead of digit 0; weaken premises in Z_lt_induction). - Restructuration of Eqdep_dec.v and Eqdep.v: more lemmas in Type. - Znumtheory now contains a gcd function that can compute within Coq. - More lemmas stated on Type in Wf.v, removal of redundant Acc_iter and Acc_iter2. - Change of the internal names of lemmas in OmegaLemmas. - Acc in Wf.v and clos_refl_trans in Relation_Operators.v now rely on the allowance for recursively non-uniform parameters (possible source of incompatibilities: explicit pattern-matching on these types may require to remove the occurrence associated with their recursively non-uniform parameter). - Coq.List.In_dec has been set transparent (this may exceptionally break proof scripts, set it locally opaque for compatibility). - More on permutations of lists in List.v and Permutation.v. - List.v has been much expanded. - New file SetoidList.v now contains results about lists seen with respect to a setoid equality. - Library NArith has been expanded, mostly with results coming from Intmap (for instance a bitwise xor), plus also a bridge between N and Bitvector. - Intmap has been reorganized. In particular its address type "addr" is now N. User contributions known to use Intmap have been adapted accordingly. If you're using this library please contact us. A wrapper FMapIntMap now presents Intmap as a particular implementation of FMaps. New developments are strongly encouraged to use either this wrapper or any other implementations of FMap instead of using directly this obsolete Intmap. Tools - New semantics for coqtop options ("-batch" expects option "-top dir" for loading vernac file that contains definitions). - Tool coq_makefile now removes custom targets that are file names in "make clean" - New environment variable COQREMOTEBROWSER to set the command invoked to start the remote browser both in Coq and CoqIDE. Standard syntax: "%s" is the placeholder for the URL. Details of changes in 8.1gamma ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Syntax - changed parsing precedence of let/in and fun constructions of Ltac: let x := t in e1; e2 is now parsed as let x := t in (e1;e2). Language and commands - Added sort-polymorphism for definitions in Type (but finally abandoned). - Support for implicit arguments in the types of parameters in (co)fixpoints and (co)inductive declarations. - Improved type inference: use as much of possible general information. before applying irreversible unification heuristics (allow e.g. to infer the predicate in "(exist _ 0 (refl_equal 0) : {n:nat | n=0 })"). - Support for Miller-Pfenning's patterns unification in type synthesis (e.g. can infer P such that P x y = phi(x,y)). - Support for "where" clause in cofixpoint definitions. - New option "Set Printing Universes" for making Type levels explicit. Tactics - Improved implementation of the ring and field tactics. For compatibility reasons, the previous tactics are renamed as legacy ring and legacy field, but should be considered as deprecated. - New declarative mathematical proof language. - Support for argument lists of arbitrary length in Tactic Notation. - ``rewrite ... in H`` now fails if ``H`` is used either in an hypothesis or in the goal. - The semantics of ``rewrite ... in *`` has been slightly modified (see doc). - Support for ``as`` clause in tactic injection. - New forward-reasoning tactic "apply in". - Ltac fresh operator now builds names from a concatenation of its arguments. - New ltac tactic "remember" to abstract over a subterm and keep an equality - Support for Miller-Pfenning's patterns unification in apply/rewrite/... (may lead to few incompatibilities - generally now useless tactic calls). Bug fixes - Fix for notations involving basic "match" expressions. - Numerous other bugs solved (a few fixes may lead to incompatibilities). Details of changes in 8.1 ~~~~~~~~~~~~~~~~~~~~~~~~~ Bug fixes - Many bugs have been fixed (cf coq-bugs web page) Tactics - New tactics ring, ring_simplify and new tactic field now able to manage power to a positive integer constant. Tactic ring on Z and R, and field on R manage power (may lead to incompatibilities with V8.1gamma). - Tactic field_simplify now applicable in hypotheses. - New field_simplify_eq for simplifying field equations into ring equations. - Tactics ring, ring_simplify, field, field_simplify and field_simplify_eq all able to apply user-given equations to rewrite monoms on the fly (see documentation). Libraries - New file ConstructiveEpsilon.v defining an epsilon operator and proving the axiom of choice constructively for a countable domain and a decidable predicate. Version 8.0 ----------- Summary of changes ~~~~~~~~~~~~~~~~~~ Coq version 8 is a major revision of the Coq proof assistant. First, the underlying logic is slightly different. The so-called *impredicativity* of the sort Set has been dropped. The main reason is that it is inconsistent with the principle of description which is quite a useful principle for formalizing mathematics within classical logic. Moreover, even in an constructive setting, the impredicativity of Set does not add so much in practice and is even subject of criticism from a large part of the intuitionistic mathematician community. Nevertheless, the impredicativity of Set remains optional for users interested in investigating mathematical developments which rely on it. Secondly, the concrete syntax of terms has been completely revised. The main motivations were - a more uniform, purified style: all constructions are now lowercase, with a functional programming perfume (e.g. abstraction is now written fun), and more directly accessible to the novice (e.g. dependent product is now written forall and allows omission of types). Also, parentheses are no longer mandatory for function application. - extensibility: some standard notations (e.g. “<” and “>”) were incompatible with the previous syntax. Now all standard arithmetic notations (=, +, \*, /, <, <=, ... and more) are directly part of the syntax. Together with the revision of the concrete syntax, a new mechanism of *notation scopes* permits to reuse the same symbols (typically +, -, \*, /, <, <=) in various mathematical theories without any ambiguities for Coq, leading to a largely improved readability of Coq scripts. New commands to easily add new symbols are also provided. Coming with the new syntax of terms, a slight reform of the tactic language and of the language of commands has been carried out. The purpose here is a better uniformity making the tactics and commands easier to use and to remember. Thirdly, a restructuring and uniformization of the standard library of Coq has been performed. There is now just one Leibniz equality usable for all the different kinds of Coq objects. Also, the set of real numbers now lies at the same level as the sets of natural and integer numbers. Finally, the names of the standard properties of numbers now follow a standard pattern and the symbolic notations for the standard definitions as well. The fourth point is the release of CoqIDE, a new graphical gtk2-based interface fully integrated with Coq. Close in style to the Proof General Emacs interface, it is faster and its integration with Coq makes interactive developments more friendly. All mathematical Unicode symbols are usable within CoqIDE. Finally, the module system of Coq completes the picture of Coq version 8.0. Though released with an experimental status in the previous version 7.4, it should be considered as a salient feature of the new version. Besides, Coq comes with its load of novelties and improvements: new or improved tactics (including a new tactic for solving first-order statements), new management commands, extended libraries. Bruno Barras and Hugo Herbelin have been the main contributors of the reflection and the implementation of the new syntax. The smart automatic translator from old to new syntax released with Coq is also their work with contributions by Olivier Desmettre. Hugo Herbelin is the main designer and implementer of the notion of notation scopes and of the commands for easily adding new notations. Hugo Herbelin is the main implementer of the restructured standard library. Pierre Corbineau is the main designer and implementer of the new tactic for solving first-order statements in presence of inductive types. He is also the maintainer of the non-domain specific automation tactics. Benjamin Monate is the developer of the CoqIDE graphical interface with contributions by Jean-Christophe Filliâtre, Pierre Letouzey, Claude Marché and Bruno Barras. Claude Marché coordinated the edition of the Reference Manual for Coq V8.0. Pierre Letouzey and Jacek Chrząszcz respectively maintained the extraction tool and module system of Coq. Jean-Christophe Filliâtre, Pierre Letouzey, Hugo Herbelin and other contributors from Sophia-Antipolis and Nijmegen participated in extending the library. Julien Narboux built a NSIS-based automatic Coq installation tool for the Windows platform. Hugo Herbelin and Christine Paulin coordinated the development which was under the responsibility of Christine Paulin. | Palaiseau & Orsay, Apr. 2004 | Hugo Herbelin & Christine Paulin | (updated Apr. 2006) | Details of changes in 8.0beta old syntax ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Logic - Set now predicative by default - New option -impredicative-set to set Set impredicative - The standard library doesn't need impredicativity of Set and is compatible with the classical axioms which contradict Set impredicativity Syntax for arithmetic - Notation "=" and "<>" in Z and R are no longer implicitly in Z or R (with possible introduction of a coercion), use ...=... or ...<>... instead - Locate applied to a simple string (e.g. "+") searches for all notations containing this string Commands - "Declare ML Module" now allows to import .cma files. This avoids to use a bunch of "Declare ML Module" statements when using several ML files. - "Set Printing Width n" added, allows to change the size of width printing. - "Implicit Variables Type x,y:t" (new syntax: "Implicit Types x y:t") assigns default types for binding variables. - Declarations of Hints and Notation now accept a "Local" flag not to be exported outside the current file even if not in section - "Print Scopes" prints all notations - New command "About name" for light printing of type, implicit arguments, etc. - New command "Admitted" to declare incompletely proven statement as axioms - New keyword "Conjecture" to declare an axiom intended to be provable - SearchAbout can now search for lemmas referring to more than one constant and on substrings of the name of the lemma - "Print Implicit" displays the implicit arguments of a constant - Locate now searches for all names having a given suffix - New command "Functional Scheme" for building an induction principle from a function defined by case analysis and fix. Commands - new coqtop/coqc option -dont-load-proofs not to load opaque proofs in memory Implicit arguments - Inductive in sections declared with implicits now "discharged" with implicits (like constants and variables) - Implicit Arguments flags are now synchronous with reset - New switch "Unset/Set Printing Implicits" (new syntax: "Unset/Set Printing Implicit") to globally control printing of implicits Grammar extensions - Many newly supported UTF-8 encoded unicode blocks - Greek letters (0380-03FF), Hebrew letters (U05D0-05EF), letter-like symbols (2100-214F, that includes double N,Z,Q,R), prime signs (from 2080-2089) and characters from many written languages are valid in identifiers - mathematical operators (2200-22FF), supplemental mathematical operators (2A00-2AFF), miscellaneous technical (2300-23FF that includes sqrt symbol), miscellaneous symbols (2600-26FF), arrows (2190-21FF and 2900-297F), invisible mathematical operators (from 2080-2089), ... are valid symbols Library - New file about the factorial function in Arith - An additional elimination Acc_iter for Acc, simpler than Acc_rect. This new elimination principle is used for definition well_founded_induction. - New library NArith on binary natural numbers - R is now of type Set - Restructuration in ZArith library + "true_sub" used in Zplus now a definition, not a local one (source of incompatibilities in proof referring to true_sub, may need extra Unfold) + Some lemmas about minus moved from fast_integer to Arith/Minus.v (le_minus, lt_mult_left) (theoretical source of incompatibilities) + Several lemmas moved from auxiliary.v and zarith_aux.v to fast_integer.v (theoretical source of incompatibilities) + Variables names of iff_trans changed (source of incompatibilities) + ZArith lemmas named ``OMEGA`` something or ``fast_`` something, and lemma ``new_var`` are now out of ZArith (except ``OMEGA2``) + Redundant ZArith lemmas have been renamed: for the following pairs, use the second name (Zle_Zmult_right2, Zle_mult_simpl), (OMEGA2, Zle_0_plus), (Zplus_assoc_l, Zplus_assoc), (Zmult_one, Zmult_1_n), (Zmult_assoc_l, Zmult_assoc), (Zmult_minus_distr, Zmult_Zminus_distr_l) (add_un_double_moins_un_xO, is_double_moins_un), (Rlt_monotony_rev,Rlt_monotony_contra) (source of incompatibilities) - Few minor changes (no more implicit arguments in Zmult_Zminus_distr_l and Zmult_Zminus_distr_r, lemmas moved from Zcomplements to other files) (rare source of incompatibilities) - New lemmas provided by users added Tactic language - Fail tactic now accepts a failure message - Idtac tactic now accepts a message - New primitive tactic "FreshId" (new syntax: "fresh") to generate new names - Debugger prints levels of calls Tactics - Replace can now replace proofs also - Fail levels are now decremented at "Match Context" blocks only and if the right-hand-side of "Match term With" are tactics, these tactics are never evaluated immediately and do not induce backtracking (in contrast with "Match Context") - Quantified names now avoid global names of the current module (like Intro names did) [source of rare incompatibilities: 2 changes in the set of user contribs] - NewDestruct/NewInduction accepts intro patterns as introduction names - NewDestruct/NewInduction now work for non-inductive type using option "using" - A NewInduction naming bug for inductive types with functional arguments (e.g. the accessibility predicate) has been fixed (source of incompatibilities) - Symmetry now applies to hypotheses too - Inversion now accept option "as [ ... ]" to name the hypotheses - Contradiction now looks also for contradictory hypotheses stating ~A and A (source of incompatibility) - "Contradiction c" try to find an hypothesis in context which contradicts the type of c - Ring applies to new library NArith (require file NArithRing) - Field now works on types in Set - Auto with reals now try to replace le by ge (Rge_le is no longer an immediate hint), resulting in shorter proofs - Instantiate now works in hyps (syntax : Instantiate in ...) - Some new tactics : EConstructor, ELeft, Eright, ESplit, EExists - New tactic "functional induction" to perform case analysis and induction following the definition of a function. - Clear now fails when trying to remove a local definition used by a constant appearing in the current goal Extraction (See details in plugins/extraction/CHANGES) - The old commands: (Recursive) Extraction Module M. are now: (Recursive) Extraction Library M. To use these commands, M should come from a library M.v - The other syntax Extraction & Recursive Extraction now accept module names as arguments. Bugs - see coq-bugs server for the complete list of fixed bugs Miscellaneous - Implicit parameters of inductive types definition now taken into account for inferring other implicit arguments Incompatibilities - Persistence of true_sub (4 incompatibilities in Coq user contributions) - Variable names of some constants changed for a better uniformity (2 changes in Coq user contributions) - Naming of quantified names in goal now avoid global names (2 occurrences) - NewInduction naming for inductive types with functional arguments (no incompatibility in Coq user contributions) - Contradiction now solve more goals (source of 2 incompatibilities) - Merge of eq and eqT may exceptionally result in subgoals now solved automatically - Redundant pairs of ZArith lemmas may have different names: it may cause "Apply/Rewrite with" to fail if using the first name of a pair of redundant lemmas (this is solved by renaming the variables bound by "with"; 3 incompatibilities in Coq user contribs) - ML programs referring to constants from fast_integer.v must use "Coqlib.gen_constant_modules Coqlib.zarith_base_modules" instead Details of changes in 8.0beta new syntax ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ New concrete syntax - A completely new syntax for terms - A more uniform syntax for tactics and the tactic language - A few syntactic changes for commands - A smart automatic translator translating V8.0 files in old syntax to files valid for V8.0 Syntax extensions - "Grammar" for terms disappears - "Grammar" for tactics becomes "Tactic Notation" - "Syntax" disappears - Introduction of a notion of notation scope allowing to use the same notations in various contexts without using specific delimiters (e.g the same expression "4<=3+x" is interpreted either in "nat", "positive", "N" (previously "entier"), "Z", "R", depending on which Notation scope is currently open) [see documentation for details] - Notation now requires a precedence and associativity (default was to set precedence to 1 and associativity to none) Revision of the standard library - Many lemmas and definitions names have been made more uniform mostly in Arith, NArith, ZArith and Reals (e.g : "times" -> "Pmult", "times_sym" -> "Pmult_comm", "Zle_Zmult_pos_right" -> "Zmult_le_compat_r", "SUPERIEUR" -> "Gt", "ZERO" -> "Z0") - Order and names of arguments of basic lemmas on nat, Z, positive and R have been made uniform. - Notions of Coq initial state are declared with (strict) implicit arguments - eq merged with eqT: old eq disappear, new eq (written =) is old eqT and new eqT is syntactic sugar for new eq (notation == is an alias for = and is written as it, exceptional source of incompatibilities) - Similarly, ex, ex2, all, identity are merged with exT, exT2, allT, identityT - Arithmetical notations for nat, positive, N, Z, R, without needing any backquote or double-backquotes delimiters. - In Lists: new concrete notations; argument of nil is now implicit - All changes in the library are taken in charge by the translator Semantical changes during translation - Recursive keyword set by default (and no longer needed) in Tactic Definition - Set Implicit Arguments is strict by default in new syntax - reductions in hypotheses of the form "... in H" now apply to the type also if H is a local definition - etc Gallina - New syntax of the form "Inductive bool : Set := true, false : bool." for enumerated types - Experimental syntax of the form p.(fst) for record projections (activable with option "Set Printing Projections" which is recognized by the translator) Known problems of the automatic translation - iso-latin-1 characters are no longer supported: move your files to 7-bits ASCII or unicode before translation (switch to unicode is automatically done if a file is loaded and saved again by coqide) - Renaming in ZArith: incompatibilities in Coq user contribs due to merging names INZ, from Reals, and inject_nat. - Renaming and new lemmas in ZArith: may clash with names used by users - Restructuration of ZArith: replace requirement of specific modules in ZArith by "Require Import ZArith_base" or "Require Import ZArith" - Some implicit arguments must be made explicit before translation: typically for "length nil", the implicit argument of length must be made explicit - Grammar rules, Infix notations and V7.4 Notations must be updated wrt the new scheme for syntactic extensions (see translator documentation) - Unsafe for annotation Cases when constructors coercions are used or when annotations are eta-reduced predicates Details of changes in 8.0 ~~~~~~~~~~~~~~~~~~~~~~~~~ Commands - New option "Set Printing All" to deactivate all high-level forms of printing (implicit arguments, coercions, destructing let, if-then-else, notations, projections) - "Functional Scheme" and "Functional Induction" extended to polymorphic types and dependent types - Notation now allows recursive patterns, hence recovering parts of the functionalities of pre-V8 Grammar/Syntax commands - Command "Print." discontinued. - Redundant syntax "Implicit Arguments On/Off" discontinued New syntax - Semantics change of the if-then-else construction in new syntax: "if c then t1 else t2" now stands for "match c with c1 _ ... _ => t1 | c2 _ ... _ => t2 end" with no dependency of t1 and t2 in the arguments of the constructors; this may cause incompatibilities for files translated using coq 8.0beta Notation scopes - Delimiting key %bool for bool_scope added - Import no more needed to activate argument scopes from a module Tactics and the tactic Language - Semantics of "assert" is now consistent with the reference manual - New tactics stepl and stepr for chaining transitivity steps - Tactic "replace ... with ... in" added - Intro patterns now supported in Ltac (parsed with prefix "ipattern:") Executables and tools - Added option -top to change the name of the toplevel module "Top" - Coqdoc updated to new syntax and now part of Coq sources - XML exportation tool now exports the structure of vernacular files (cf chapter 13 in the reference manual) User contributions - User contributions have been updated to the new syntax Bug fixes - Many bugs have been fixed (cf coq-bugs web page) coq-8.15.0/doc/sphinx/conf.py000077500000000000000000000406731417001151100157500ustar00rootroot00000000000000#!/usr/bin/env python3 ########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## v documentation" by default. #html_title = 'Coq 8.5 v8.5pl1' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. #html_last_updated_fmt = None # FIXME: this could be re-enabled after ensuring that smart quotes are locally # disabled for all relevant directives smartquotes = False # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # -- Options for LaTeX output --------------------------------------------- ########################### # Set things up for XeTeX # ########################### latex_elements = { 'babel': '', 'fontenc': '', 'inputenc': '', 'utf8extra': '', 'cmappkg': '', 'papersize': 'letterpaper', 'classoptions': ',openany', # No blank pages 'polyglossia': '\\usepackage{polyglossia}', 'sphinxsetup': 'verbatimwithframe=false', 'preamble': r""" \usepackage{unicode-math} \usepackage{microtype} % Macro definitions \usepackage{refman-preamble} % Style definitions for notations \usepackage{coqnotations} % Style tweaks \newcssclass{sigannot}{\textrm{#1:}} % Silence 'LaTeX Warning: Command \nobreakspace invalid in math mode' \everymath{\def\nobreakspace{\ }} """ } latex_engine = "xelatex" # Cf. https://github.com/sphinx-doc/sphinx/issues/7015 latex_use_xindy = False ######## # done # ######## latex_additional_files = [ "refman-preamble.sty", "_static/coqnotations.sty" ] latex_documents = [('index', 'CoqRefMan.tex', 'The Coq Reference Manual', author, 'manual')] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = "../../ide/coq.png" # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. latex_show_urls = 'footnote' # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). #man_pages = [ # (master_doc, 'coq', 'Coq Documentation', # [author], 1) #] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) #texinfo_documents = [ # (master_doc, 'Coq', 'Coq Documentation', # author, 'Coq', 'One line description of project.', # 'Miscellaneous'), #] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. #epub_title = project #epub_author = author #epub_publisher = author #epub_copyright = copyright # The basename for the epub file. It defaults to the project name. #epub_basename = project # The HTML theme for the epub output. Since the default themes are not # optimized for small screen space, using the same theme for HTML and epub # output is usually not wise. This defaults to 'epub', a theme designed to save # visual space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or 'en' if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files that should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the Pillow. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True # navtree options navtree_shift = True # since sphinxcontrib-bibtex version 2 we need this bibtex_bibfiles = [ "biblio.bib" ] coq-8.15.0/doc/sphinx/coq-attrindex.rst000066400000000000000000000001551417001151100177510ustar00rootroot00000000000000:orphan: .. hack to get index in TOC .. _attribute_index: --------------- Attribute index --------------- coq-8.15.0/doc/sphinx/coq-cmdindex.rst000066400000000000000000000001551417001151100175420ustar00rootroot00000000000000:orphan: .. hack to get index in TOC .. _command_index: ----------------- Command index ----------------- coq-8.15.0/doc/sphinx/coq-exnindex.rst000066400000000000000000000001651417001151100175720ustar00rootroot00000000000000:orphan: .. hack to get index in TOC ------------------------- Errors and warnings index ------------------------- coq-8.15.0/doc/sphinx/coq-optindex.rst000066400000000000000000000002331417001151100175760ustar00rootroot00000000000000:orphan: .. hack to get index in TOC .. _options_index: ------------------------------- Flags, options and tables index ------------------------------- coq-8.15.0/doc/sphinx/coq-tacindex.rst000066400000000000000000000001431417001151100175430ustar00rootroot00000000000000:orphan: .. hack to get index in TOC .. _tactic_index: ------------- Tactic index ------------- coq-8.15.0/doc/sphinx/dune000066400000000000000000000004031417001151100153070ustar00rootroot00000000000000(dirs :standard _static _templates) (rule (targets README.gen.rst) (deps (source_tree ../tools/coqrst) README.template.rst) (action (run ../tools/coqrst/regen_readme.py %{targets}))) (rule (alias refman-html) (action (diff README.rst README.gen.rst))) coq-8.15.0/doc/sphinx/genindex.rst000066400000000000000000000001211417001151100167610ustar00rootroot00000000000000:orphan: .. hack to get index in TOC ------------- General index ------------- coq-8.15.0/doc/sphinx/history.rst000066400000000000000000001726031417001151100167000ustar00rootroot00000000000000.. _history: ---------------------- Early history of Coq ---------------------- Historical roots ---------------- Coq is a proof assistant for higher-order logic, allowing the development of computer programs consistent with their formal specification. It is the result of about ten years [#years]_ of research of the Coq project. We shall briefly survey here three main aspects: the *logical language* in which we write our axiomatizations and specifications, the *proof assistant* which allows the development of verified mathematical proofs, and the *program extractor* which synthesizes computer programs obeying their formal specifications, written as logical assertions in the language. The logical language used by Coq is a variety of type theory, called the *Calculus of Inductive Constructions*. Without going back to Leibniz and Boole, we can date the creation of what is now called mathematical logic to the work of Frege and Peano at the turn of the century. The discovery of antinomies in the free use of predicates or comprehension principles prompted Russell to restrict predicate calculus with a stratification of *types*. This effort culminated with *Principia Mathematica*, the first systematic attempt at a formal foundation of mathematics. A simplification of this system along the lines of simply typed λ-calculus occurred with Church’s *Simple Theory of Types*. The λ-calculus notation, originally used for expressing functionality, could also be used as an encoding of natural deduction proofs. This Curry-Howard isomorphism was used by N. de Bruijn in the *Automath* project, the first full-scale attempt to develop and mechanically verify mathematical proofs. This effort culminated with Jutting’s verification of Landau’s *Grundlagen* in the 1970’s. Exploiting this Curry-Howard isomorphism, notable achievements in proof theory saw the emergence of two type-theoretic frameworks; the first one, Martin-Löf’s *Intuitionistic Theory of Types*, attempts a new foundation of mathematics on constructive principles. The second one, Girard’s polymorphic λ-calculus :math:`F_\omega`, is a very strong functional system in which we may represent higher-order logic proof structures. Combining both systems in a higher-order extension of the Automath language, T. Coquand presented in 1985 the first version of the *Calculus of Constructions*, CoC. This strong logical system allowed powerful axiomatizations, but direct inductive definitions were not possible, and inductive notions had to be defined indirectly through functional encodings, which introduced inefficiencies and awkwardness. The formalism was extended in 1989 by T. Coquand and C. Paulin with primitive inductive definitions, leading to the current *Calculus of Inductive Constructions*. This extended formalism is not rigorously defined here. Rather, numerous concrete examples are discussed. We refer the interested reader to relevant research papers for more information about the formalism, its meta-theoretic properties, and semantics. However, it should not be necessary to understand this theoretical material in order to write specifications. It is possible to understand the Calculus of Inductive Constructions at a higher level, as a mixture of predicate calculus, inductive predicate definitions presented as typed PROLOG, and recursive function definitions close to the language ML. Automated theorem-proving was pioneered in the 1960’s by Davis and Putnam in propositional calculus. A complete mechanization (in the sense of a semidecision procedure) of classical first-order logic was proposed in 1965 by J.A. Robinson, with a single uniform inference rule called *resolution*. Resolution relies on solving equations in free algebras (i.e. term structures), using the *unification algorithm*. Many refinements of resolution were studied in the 1970’s, but few convincing implementations were realized, except of course that PROLOG is in some sense issued from this effort. A less ambitious approach to proof development is computer-aided proof-checking. The most notable proof-checkers developed in the 1970’s were LCF, designed by R. Milner and his colleagues at U. Edinburgh, specialized in proving properties about denotational semantics recursion equations, and the Boyer and Moore theorem-prover, an automation of primitive recursion over inductive data types. While the Boyer-Moore theorem-prover attempted to synthesize proofs by a combination of automated methods, LCF constructed its proofs through the programming of *tactics*, written in a high-level functional meta-language, ML. The salient feature which clearly distinguishes our proof assistant from say LCF or Boyer and Moore’s, is its possibility to extract programs from the constructive contents of proofs. This computational interpretation of proof objects, in the tradition of Bishop’s constructive mathematics, is based on a realizability interpretation, in the sense of Kleene, due to C. Paulin. The user must just mark his intention by separating in the logical statements the assertions stating the existence of a computational object from the logical assertions which specify its properties, but which may be considered as just comments in the corresponding program. Given this information, the system automatically extracts a functional term from a consistency proof of its specifications. This functional term may be in turn compiled into an actual computer program. This methodology of extracting programs from proofs is a revolutionary paradigm for software engineering. Program synthesis has long been a theme of research in artificial intelligence, pioneered by R. Waldinger. The Tablog system of Z. Manna and R. Waldinger allows the deductive synthesis of functional programs from proofs in tableau form of their specifications, written in a variety of first-order logic. Development of a systematic *programming logic*, based on extensions of Martin-Löf’s type theory, was undertaken at Cornell U. by the Nuprl team, headed by R. Constable. The first actual program extractor, PX, was designed and implemented around 1985 by S. Hayashi from Kyoto University. It allows the extraction of a LISP program from a proof in a logical system inspired by the logical formalisms of S. Feferman. Interest in this methodology is growing in the theoretical computer science community. We can foresee the day when actual computer systems used in applications will contain certified modules, automatically generated from a consistency proof of their formal specifications. We are however still far from being able to use this methodology in a smooth interaction with the standard tools from software engineering, i.e. compilers, linkers, run-time systems taking advantage of special hardware, debuggers, and the like. We hope that Coq can be of use to researchers interested in experimenting with this new methodology. .. [#years] At the time of writing, i.e. 1995. Versions 1 to 5 --------------- .. note:: This summary was written in 1995 together with the previous section and formed the initial version of the Credits chapter. A more comprehensive description of these early versions is available in the following subsections, which come from a document written in September 2015 by Gérard Huet, Thierry Coquand and Christine Paulin. A first implementation of CoC was started in 1984 by G. Huet and T. Coquand. Its implementation language was CAML, a functional programming language from the ML family designed at INRIA in Rocquencourt. The core of this system was a proof-checker for CoC seen as a typed λ-calculus, called the *Constructive Engine*. This engine was operated through a high-level notation permitting the declaration of axioms and parameters, the definition of mathematical types and objects, and the explicit construction of proof objects encoded as λ-terms. A section mechanism, designed and implemented by G. Dowek, allowed hierarchical developments of mathematical theories. This high-level language was called the *Mathematical Vernacular*. Furthermore, an interactive *Theorem Prover* permitted the incremental construction of proof trees in a top-down manner, subgoaling recursively and backtracking from dead-ends. The theorem prover executed tactics written in CAML, in the LCF fashion. A basic set of tactics was predefined, which the user could extend by his own specific tactics. This system (Version 4.10) was released in 1989. Then, the system was extended to deal with the new calculus with inductive types by C. Paulin, with corresponding new tactics for proofs by induction. A new standard set of tactics was streamlined, and the vernacular extended for tactics execution. A package to compile programs extracted from proofs to actual computer programs in CAML or some other functional language was designed and implemented by B. Werner. A new user-interface, relying on a CAML-X interface by D. de Rauglaudre, was designed and implemented by A. Felty. It allowed operation of the theorem-prover through the manipulation of windows, menus, mouse-sensitive buttons, and other widgets. This system (Version 5.6) was released in 1991. Coq was ported to the new implementation Caml-light of X. Leroy and D. Doligez by D. de Rauglaudre (Version 5.7) in 1992. A new version of Coq was then coordinated by C. Murthy, with new tools designed by C. Parent to prove properties of ML programs (this methodology is dual to program extraction) and a new user-interaction loop. This system (Version 5.8) was released in May 1993. A Centaur interface CTCoq was then developed by Y. Bertot from the Croap project from INRIA-Sophia-Antipolis. In parallel, G. Dowek and H. Herbelin developed a new proof engine, allowing the general manipulation of existential variables consistently with dependent types in an experimental version of Coq (V5.9). The version V5.10 of Coq is based on a generic system for manipulating terms with binding operators due to Chet Murthy. A new proof engine allows the parallel development of partial proofs for independent subgoals. The structure of these proof trees is a mixed representation of derivation trees for the Calculus of Inductive Constructions with abstract syntax trees for the tactics scripts, allowing the navigation in a proof at various levels of details. The proof engine allows generic environment items managed in an object-oriented way. This new architecture, due to C. Murthy, supports several new facilities which make the system easier to extend and to scale up: - User-programmable tactics are allowed - It is possible to separately verify development modules, and to load their compiled images without verifying them again - a quick relocation process allows their fast loading - A generic parsing scheme allows user-definable notations, with a symmetric table-driven pretty-printer - Syntactic definitions allow convenient abbreviations - A limited facility of meta-variables allows the automatic synthesis of certain type expressions, allowing generic notations for e.g. equality, pairing, and existential quantification. In the Fall of 1994, C. Paulin-Mohring replaced the structure of inductively defined types and families by a new structure, allowing the mutually recursive definitions. P. Manoury implemented a translation of recursive definitions into the primitive recursive style imposed by the internal recursion operators, in the style of the ProPre system. C. Muñoz implemented a decision procedure for intuitionistic propositional logic, based on results of R. Dyckhoff. J.C. Filliâtre implemented a decision procedure for first-order logic without contraction, based on results of J. Ketonen and R. Weyhrauch. Finally C. Murthy implemented a library of inversion tactics, relieving the user from tedious definitions of “inversion predicates”. | Rocquencourt, Feb. 1st 1995 | Gérard Huet | Version 1 ~~~~~~~~~ This software is a prototype type checker for a higher-order logical formalism known as the Theory of Constructions, presented in his PhD thesis by Thierry Coquand, with influences from Girard's system F and de Bruijn's Automath. The metamathematical analysis of the system is the PhD work of Thierry Coquand. The software is mostly the work of Gérard Huet. Most of the mathematical examples verified with the software are due to Thierry Coquand. The programming language of the CONSTR software (as it was called at the time) was a version of ML adapted from the Edinburgh LCF system and running on a LISP backend. The main improvements from the original LCF ML were that ML was compiled rather than interpreted (Gérard Huet building on the original translator by Lockwood Morris), and that it was enriched by recursively defined types (work of Guy Cousineau). This ancestor of CAML was used and improved by Larry Paulson for his implementation of Cambridge LCF. Software developments of this prototype occurred from late 1983 to early 1985. Version 1.10 was frozen on December 22nd 1984. It is the version used for the examples in Thierry Coquand's thesis, defended on January 31st 1985. There was a unique binding operator, used both for universal quantification (dependent product) at the level of types and functional abstraction (λ) at the level of terms/proofs, in the manner of Automath. Substitution (λ-reduction) was implemented using de Bruijn's indexes. Version 1.11 was frozen on February 19th, 1985. It is the version used for the examples in the paper: T. Coquand, G. Huet. *Constructions: A Higher Order Proof System for Mechanizing Mathematics* :cite:`CH85`. Christine Paulin joined the team at this point, for her DEA research internship. In her DEA memoir (August 1985) she presents developments for the *lambo* function – :math:`\text{lambo}(f)(n)` computes the minimal :math:`m` such that :math:`f(m)` is greater than :math:`n`, for :math:`f` an increasing integer function, a challenge for constructive mathematics. She also encoded the majority voting algorithm of Boyer and Moore. Version 2 ~~~~~~~~~ The formal system, now renamed as the *Calculus of Constructions*, was presented with a proof of consistency and comparisons with proof systems of Per Martin Löf, Girard, and the Automath family of N. de Bruijn, in the paper: T. Coquand and G. Huet. *The Calculus of Constructions* :cite:`CH88`. An abstraction of the software design, in the form of an abstract machine for proof checking, and a fuller sequence of mathematical developments was presented in: T. Coquand, G. Huet. *Concepts Mathématiques et Informatiques Formalisés dans le Calcul des Constructions* :cite:`CH87`. Version 2.8 was frozen on December 16th, 1985, and served for developing the examples in the above papers. This calculus was then enriched in version 2.9 with a cumulative hierarchy of universes. Universe levels were initially explicit natural numbers. Another improvement was the possibility of automatic synthesis of implicit type arguments, relieving the user of tedious redundant declarations. Christine Paulin wrote an article *Algorithm development in the Calculus of Constructions* :cite:`P86`. Besides *lambo* and *majority*, she presents *quicksort* and a text formatting algorithm. Version 2.13 of the Calculus of Constructions with universes was frozen on June 25th, 1986. A synthetic presentation of type theory along constructive lines with ML algorithms was given by Gérard Huet in his May 1986 CMU course notes *Formal Structures for Computation and Deduction*. Its chapter *Induction and Recursion in the Theory of Constructions* was presented as an invited paper at the Joint Conference on Theory and Practice of Software Development TAPSOFT’87 at Pise in March 1987, and published as *Induction Principles Formalized in the Calculus of Constructions* :cite:`H88`. Version 3 ~~~~~~~~~ This version saw the beginning of proof automation, with a search algorithm inspired from PROLOG and the applicative logic programming programs of the course notes *Formal structures for computation and deduction*. The search algorithm was implemented in ML by Thierry Coquand. The proof system could thus be used in two modes: proof verification and proof synthesis, with tactics such as ``AUTO``. The implementation language was now called CAML, for Categorical Abstract Machine Language. It used as backend the LLM3 virtual machine of Le Lisp by Jérôme Chailloux. The main developers of CAML were Michel Mauny, Ascander Suarez and Pierre Weis. V3.1 was started in the summer of 1986, V3.2 was frozen at the end of November 1986. V3.4 was developed in the first half of 1987. Thierry Coquand held a post-doctoral position in Cambridge University in 1986-87, where he developed a variant implementation in SML, with which he wrote some developments on fixpoints in Scott's domains. Version 4 ~~~~~~~~~ This version saw the beginning of program extraction from proofs, with two varieties of the type ``Prop`` of propositions, indicating constructive intent. The proof extraction algorithms were implemented by Christine Paulin-Mohring. V4.1 was frozen on July 24th, 1987. It had a first identified library of mathematical developments (directory ``exemples``), with libraries ``Logic`` (containing impredicative encodings of intuitionistic logic and algebraic primitives for booleans, natural numbers and list), ``Peano`` developing second-order Peano arithmetic, ``Arith`` defining addition, multiplication, euclidean division and factorial. Typical developments were the Knaster-Tarski theorem and Newman's lemma from rewriting theory. V4.2 was a joint development of a team consisting of Thierry Coquand, Gérard Huet and Christine Paulin-Mohring. A file V4.2.log records the log of changes. It was frozen on September 1987 as the last version implemented in CAML 2.3, and V4.3 followed on CAML 2.5, a more stable development system. V4.3 saw the first top-level of the system. Instead of evaluating explicit quotations, the user could develop his mathematics in a high-level language called the mathematical vernacular (following Automath terminology). The user could develop files in the vernacular notation (with ``.v`` extension) which were now separate from the ``ml`` sources of the implementation. Gilles Dowek joined the team to develop the vernacular language as his DEA internship research. A notion of sticky constant was introduced, in order to keep names of lemmas when local hypotheses of proofs were discharged. This gave a notion of global mathematical environment with local sections. Another significant practical change was that the system, originally developed on the VAX central computer of our lab, was transferred on SUN personal workstations, allowing a level of distributed development. The extraction algorithm was modified, with three annotations ``Pos``, ``Null`` and ``Typ`` decorating the sorts ``Prop`` and ``Type``. Version 4.3 was frozen at the end of November 1987, and was distributed to an early community of users (among those were Hugo Herbelin and Loic Colson). V4.4 saw the first version of (encoded) inductive types. Now natural numbers could be defined as:: [source, coq] Inductive NAT : Prop = O : NAT | Succ : NAT->NAT. These inductive types were encoded impredicatively in the calculus, using a subsystem *rec* due to Christine Paulin. V4.4 was frozen on March 6th 1988. Version 4.5 was the first one to support inductive types and program extraction. Its banner was *Calcul des Constructions avec Réalisations et Synthèse*. The vernacular language was enriched to accommodate extraction commands. The verification engine design was presented as: G. Huet. *The Constructive Engine*. Version 4.5. Invited Conference, 2nd European Symposium on Programming, Nancy, March 88. The final paper, describing the V4.9 implementation, appeared in: A perspective in Theoretical Computer Science, Commemorative Volume in memory of Gift Siromoney, Ed. R. Narasimhan, World Scientific Publishing, 1989. Version 4.5 was demonstrated in June 1988 at the YoP Institute on Logical Foundations of Functional Programming organized by Gérard Huet at Austin, Texas. Version 4.6 was started during the summer of 1988. Its main improvement was the complete rehaul of the proof synthesis engine by Thierry Coquand, with a tree structure of goals. Its source code was communicated to Randy Pollack on September 2nd 1988. It evolved progressively into LEGO, proof system for Luo's formalism of Extended Calculus of Constructions. The discharge tactic was modified by Gérard Huet to allow for inter-dependencies in discharged lemmas. Christine Paulin improved the inductive definition scheme in order to accommodate predicates of any arity. Version 4.7 was started on September 6th, 1988. This version starts exploiting the CAML notion of module in order to improve the modularity of the implementation. Now the term verifier is identified as a proper module Machine, which the structure of its internal data structures being hidden and thus accessible only through the legitimate operations. This machine (the constructive engine) was the trusted core of the implementation. The proof synthesis mechanism was a separate proof term generator. Once a complete proof term was synthesized with the help of tactics, it was entirely re-checked by the engine. Thus there was no need to certify the tactics, and the system took advantage of this fact by having tactics ignore the universe levels, universe consistency check being relegated to the final type checking pass. This induced a certain puzzlement in early users who saw, after a successful proof search, their ``QED`` followed by silence, followed by a failure message due to a universe inconsistency… The set of examples comprise set theory experiments by Hugo Herbelin, and notably the Schroeder-Bernstein theorem. Version 4.8, started on October 8th, 1988, saw a major re-implementation of the abstract syntax type ``constr``, separating variables of the formalism and metavariables denoting incomplete terms managed by the search mechanism. A notion of level (with three values ``TYPE``, ``OBJECT`` and ``PROOF``) is made explicit and a type judgement clarifies the constructions, whose implementation is now fully explicit. Structural equality is speeded up by using pointer equality, yielding spectacular improvements. Thierry Coquand adapts the proof synthesis to the new representation, and simplifies pattern matching to first-order predicate calculus matching, with important performance gain. A new representation of the universe hierarchy is then defined by Gérard Huet. Universe levels are now implemented implicitly, through a hidden graph of abstract levels constrained with an order relation. Checking acyclicity of the graph insures well-foundedness of the ordering, and thus consistency. This was documented in a memo *Adding Type:Type to the Calculus of Constructions* which was never published. The development version is released as a stable 4.8 at the end of 1988. Version 4.9 is released on March 1st 1989, with the new "elastic" universe hierarchy. The spring of 1989 saw the first attempt at documenting the system usage, with a number of papers describing the formalism: - *Metamathematical Investigations of a Calculus of Constructions*, by Thierry Coquand :cite:`C90`, - *Inductive definitions in the Calculus of Constructions*, by Christine Paulin-Mohrin, - *Extracting Fω's programs from proofs in the Calculus of Constructions*, by Christine Paulin-Mohring* :cite:`P89`, - *The Constructive Engine*, by Gérard Huet :cite:`H89`, as well as a number of user guides: - *A short user's guide for the Constructions*, Version 4.10, by Gérard Huet - *A Vernacular Syllabus*, by Gilles Dowek. - *The Tactics Theorem Prover, User's guide*, Version 4.10, by Thierry Coquand. Stable V4.10, released on May 1st, 1989, was then a mature system, distributed with CAML V2.6. In the mean time, Thierry Coquand and Christine Paulin-Mohring had been investigating how to add native inductive types to the Calculus of Constructions, in the manner of Per Martin-Löf's Intuitionistic Type Theory. The impredicative encoding had already been presented in: F. Pfenning and C. Paulin-Mohring. *Inductively defined types in the Calculus of Constructions* :cite:`PP90`. An extension of the calculus with primitive inductive types appeared in: T. Coquand and C. Paulin-Mohring. *Inductively defined types* :cite:`CP90`. This led to the Calculus of Inductive Constructions, logical formalism implemented in Versions 5 upward of the system, and documented in: C. Paulin-Mohring. *Inductive Definitions in the System Coq - Rules and Properties* :cite:`P93`. The last version of CONSTR is Version 4.11, which was last distributed in the spring of 1990. It was demonstrated at the first workshop of the European Basic Research Action Logical Frameworks In Sophia Antipolis in May 1990. Version 5 ~~~~~~~~~ At the end of 1989, Version 5.1 was started, and renamed as the system Coq for the Calculus of Inductive Constructions. It was then ported to the new stand-alone implementation of ML called Caml-light. In 1990 many changes occurred. Thierry Coquand left for Chalmers University in Göteborg. Christine Paulin-Mohring took a CNRS researcher position at the LIP laboratory of École Normale Supérieure de Lyon. Project Formel was terminated, and gave rise to two teams: Cristal at INRIA-Roquencourt, that continued developments in functional programming with Caml-light then OCaml, and Coq, continuing the type theory research, with a joint team headed by Gérard Huet at INRIA-Rocquencourt and Christine Paulin-Mohring at the LIP laboratory of CNRS-ENS Lyon. Chetan Murthy joined the team in 1991 and became the main software architect of Version 5. He completely rehauled the implementation for efficiency. Versions 5.6 and 5.8 were major distributed versions, with complete documentation and a library of users' developments. The use of the RCS revision control system, and systematic ChangeLog files, allow a more precise tracking of the software developments. | September 2015 + | Thierry Coquand, Gérard Huet and Christine Paulin-Mohring. | Versions 6 ---------- Version 6.1 ~~~~~~~~~~~ The present version 6.1 of Coq is based on the V5.10 architecture. It was ported to the new language Objective Caml by Bruno Barras. The underlying framework has slightly changed and allows more conversions between sorts. The new version provides powerful tools for easier developments. Cristina Cornes designed an extension of the Coq syntax to allow definition of terms using a powerful pattern matching analysis in the style of ML programs. Amokrane Saïbi wrote a mechanism to simulate inheritance between types families extending a proposal by Peter Aczel. He also developed a mechanism to automatically compute which arguments of a constant may be inferred by the system and consequently do not need to be explicitly written. Yann Coscoy designed a command which explains a proof term using natural language. Pierre Crégut built a new tactic which solves problems in quantifier-free Presburger Arithmetic. Both functionalities have been integrated to the Coq system by Hugo Herbelin. Samuel Boutin designed a tactic for simplification of commutative rings using a canonical set of rewriting rules and equality modulo associativity and commutativity. Finally the organisation of the Coq distribution has been supervised by Jean-Christophe Filliâtre with the help of Judicaël Courant and Bruno Barras. | Lyon, Nov. 18th 1996 | Christine Paulin | Version 6.2 ~~~~~~~~~~~ In version 6.2 of Coq, the parsing is done using camlp4, a preprocessor and pretty-printer for CAML designed by Daniel de Rauglaudre at INRIA. Daniel de Rauglaudre made the first adaptation of Coq for camlp4, this work was continued by Bruno Barras who also changed the structure of Coq abstract syntax trees and the primitives to manipulate them. The result of these changes is a faster parsing procedure with greatly improved syntax-error messages. The user-interface to introduce grammar or pretty-printing rules has also changed. Eduardo Giménez redesigned the internal tactic libraries, giving uniform names to Caml functions corresponding to Coq tactic names. Bruno Barras wrote new, more efficient reduction functions. Hugo Herbelin introduced more uniform notations in the Coq specification language: the definitions by fixpoints and pattern matching have a more readable syntax. Patrick Loiseleur introduced user-friendly notations for arithmetic expressions. New tactics were introduced: Eduardo Giménez improved the mechanism to introduce macros for tactics, and designed special tactics for (co)inductive definitions; Patrick Loiseleur designed a tactic to simplify polynomial expressions in an arbitrary commutative ring which generalizes the previous tactic implemented by Samuel Boutin. Jean-Christophe Filliâtre introduced a tactic for refining a goal, using a proof term with holes as a proof scheme. David Delahaye designed the tool to search an object in the library given its type (up to isomorphism). Henri Laulhère produced the Coq distribution for the Windows environment. Finally, Hugo Herbelin was the main coordinator of the Coq documentation with principal contributions by Bruno Barras, David Delahaye, Jean-Christophe Filliâtre, Eduardo Giménez, Hugo Herbelin and Patrick Loiseleur. | Orsay, May 4th 1998 | Christine Paulin | Version 6.3 ~~~~~~~~~~~ The main changes in version V6.3 were the introduction of a few new tactics and the extension of the guard condition for fixpoint definitions. B. Barras extended the unification algorithm to complete partial terms and fixed various tricky bugs related to universes. D. Delahaye developed the ``AutoRewrite`` tactic. He also designed the new behavior of ``Intro`` and provided the tacticals ``First`` and ``Solve``. J.-C. Filliâtre developed the ``Correctness`` tactic. \E. Giménez extended the guard condition in fixpoints. H. Herbelin designed the new syntax for definitions and extended the ``Induction`` tactic. P. Loiseleur developed the ``Quote`` tactic and the new design of the ``Auto`` tactic, he also introduced the index of errors in the documentation. C. Paulin wrote the ``Focus`` command and introduced the reduction functions in definitions, this last feature was proposed by J.-F. Monin from CNET Lannion. | Orsay, Dec. 1999 | Christine Paulin | Versions 7 ---------- Summary of changes ~~~~~~~~~~~~~~~~~~ The version V7 is a new implementation started in September 1999 by Jean-Christophe Filliâtre. This is a major revision with respect to the internal architecture of the system. The Coq version 7.0 was distributed in March 2001, version 7.1 in September 2001, version 7.2 in January 2002, version 7.3 in May 2002 and version 7.4 in February 2003. Jean-Christophe Filliâtre designed the architecture of the new system. He introduced a new representation for environments and wrote a new kernel for type checking terms. His approach was to use functional data-structures in order to get more sharing, to prepare the addition of modules and also to get closer to a certified kernel. Hugo Herbelin introduced a new structure of terms with local definitions. He introduced “qualified” names, wrote a new pattern matching compilation algorithm and designed a more compact algorithm for checking the logical consistency of universes. He contributed to the simplification of Coq internal structures and the optimisation of the system. He added basic tactics for forward reasoning and coercions in patterns. David Delahaye introduced a new language for tactics. General tactics using pattern matching on goals and context can directly be written from the Coq toplevel. He also provided primitives for the design of user-defined tactics in Caml. Micaela Mayero contributed the library on real numbers. Olivier Desmettre extended this library with axiomatic trigonometric functions, square, square roots, finite sums, Chasles property and basic plane geometry. Jean-Christophe Filliâtre and Pierre Letouzey redesigned a new extraction procedure from Coq terms to Caml or Haskell programs. This new extraction procedure, unlike the one implemented in previous version of Coq is able to handle all terms in the Calculus of Inductive Constructions, even involving universes and strong elimination. P. Letouzey adapted user contributions to extract ML programs when it was sensible. Jean-Christophe Filliâtre wrote ``coqdoc``, a documentation tool for Coq libraries usable from version 7.2. Bruno Barras improved the efficiency of the reduction algorithm and the confidence level in the correctness of Coq critical type checking algorithm. Yves Bertot designed the ``SearchPattern`` and ``SearchRewrite`` tools and the support for the pcoq interface (http://www-sop.inria.fr/lemme/pcoq/). Micaela Mayero and David Delahaye introduced Field, a decision tactic for commutative fields. Christine Paulin changed the elimination rules for empty and singleton propositional inductive types. Loïc Pottier developed Fourier, a tactic solving linear inequalities on real numbers. Pierre Crégut developed a new, reflection-based version of the Omega decision procedure. Claudio Sacerdoti Coen designed an XML output for the Coq modules to be used in the Hypertextual Electronic Library of Mathematics (HELM cf http://www.cs.unibo.it/helm). A library for efficient representation of finite maps using binary trees contributed by Jean Goubault was integrated in the basic theories. Pierre Courtieu developed a command and a tactic to reason on the inductive structure of recursively defined functions. Jacek Chrząszcz designed and implemented the module system of Coq whose foundations are in Judicaël Courant’s PhD thesis. The development was coordinated by C. Paulin. Many discussions within the Démons team and the LogiCal project influenced significantly the design of Coq especially with J. Courant, J. Duprat, J. Goubault, A. Miquel, C. Marché, B. Monate and B. Werner. Intensive users suggested improvements of the system : Y. Bertot, L. Pottier, L. Théry, P. Zimmerman from INRIA, C. Alvarado, P. Crégut, J.-F. Monin from France Telecom R & D. | Orsay, May. 2002 | Hugo Herbelin & Christine Paulin | Details of changes in 7.0 and 7.1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Notes: - items followed by (**) are important sources of incompatibilities - items followed by (*) may exceptionally be sources of incompatibilities - items followed by (+) have been introduced in version 7.0 Main novelties ^^^^^^^^^^^^^^ References are to Coq 7.1 reference manual - New primitive let-in construct (see sections 1.2.8 and ) - Long names (see sections 2.6 and 2.7) - New high-level tactic language (see chapter 10) - Improved search facilities (see section 5.2) - New extraction algorithm managing the Type level (see chapter 17) - New rewriting tactic for arbitrary equalities (see chapter 19) - New tactic Field to decide equalities on commutative fields (see 7.11) - New tactic Fourier to solve linear inequalities on reals numbers (see 7.11) - New tactics for induction/case analysis in "natural" style (see 7.7) - Deep restructuration of the code (safer, simpler and more efficient) - Export of theories to XML for publishing and rendering purposes (see http://www.cs.unibo.it/helm) Details of changes ^^^^^^^^^^^^^^^^^^ Language: new "let-in" construction *********************************** - New construction for local definitions (let-in) with syntax [x:=u]t (*)(+) - Local definitions allowed in Record (a.k.a. record à la Randy Pollack) Language: long names ******************** - Each construction has a unique absolute names built from a base name, the name of the module in which they are defined (Top if in coqtop), and possibly an arbitrary long sequence of directory (e.g. "Coq.Lists.PolyList.flat_map" where "Coq" means that "flat_map" is part of Coq standard library, "Lists" means it is defined in the Lists library and "PolyList" means it is in the file Polylist) (+) - Constructions can be referred by their base name, or, in case of conflict, by a "qualified" name, where the base name is prefixed by the module name (and possibly by a directory name, and so on). A fully qualified name is an absolute name which always refer to the construction it denotes (to preserve the visibility of all constructions, no conflict is allowed for an absolute name) (+) - Long names are available for modules with the possibility of using the directory name as a component of the module full name (with option -R to coqtop and coqc, or command Add LoadPath) (+) - Improved conflict resolution strategy (the Unix PATH model), allowing more constructions to be referred just by their base name Language: miscellaneous *********************** - The names of variables for Record projections _and_ for induction principles (e.g. sum_ind) is now based on the first letter of their type (main source of incompatibility) (**)(+) - Most typing errors have now a precise location in the source (+) - Slightly different mechanism to solve "?" (*)(+) - More arguments may be considered implicit at section closing (*)(+) - Bug with identifiers ended by a number greater than 2^30 fixed (+) - New visibility discipline for Remark, Fact and Local: Remark's and Fact's now survive at the end of section, but are only accessible using a qualified names as soon as their strength expires; Local's disappear and are moved into local definitions for each construction persistent at section closing Language: Cases *************** - Cases no longer considers aliases inferable from dependencies in types (*)(+) - A redundant clause in Cases is now an error (*) Reduction ********* - New reduction flags "Zeta" and "Evar" in Eval Compute, for inlining of local definitions and instantiation of existential variables - Delta reduction flag does not perform Zeta and Evar reduction any more (*) - Constants declared as opaque (using Qed) can no longer become transparent (a constant intended to be alternatively opaque and transparent must be declared as transparent (using Defined)); a risk exists (until next Coq version) that Simpl and Hnf reduces opaque constants (*) New tactics *********** - New set of tactics to deal with types equipped with specific equalities (a.k.a. Setoids, e.g. nat equipped with eq_nat) [by C. Renard] - New tactic Assert, similar to Cut but expected to be more user-friendly - New tactic NewDestruct and NewInduction intended to replace Elim and Induction, Case and Destruct in a more user-friendly way (see restrictions in the reference manual) - New tactic ROmega: an experimental alternative (based on reflexion) to Omega [by P. Crégut] - New tactic language Ltac (see reference manual) (+) - New versions of Tauto and Intuition, fully rewritten in the new Ltac language; they run faster and produce more compact proofs; Tauto is fully compatible but, in exchange of a better uniformity, Intuition is slightly weaker (then use Tauto instead) (**)(+) - New tactic Field to decide equalities on commutative fields (as a special case, it works on real numbers) (+) - New tactic Fourier to solve linear inequalities on reals numbers [by L. Pottier] (+) - New tactics dedicated to real numbers: DiscrR, SplitRmult, SplitAbsolu (+) Changes in existing tactics *************************** - Reduction tactics in local definitions apply only to the body - New syntax of the form "Compute in Type of H." to require a reduction on the types of local definitions - Inversion, Injection, Discriminate, ... apply also on the quantified premises of a goal (using the "Intros until" syntax) - Decompose has been fixed but hypotheses may get different names (*)(+) - Tauto now manages uniformly hypotheses and conclusions of the form ``t=t`` which all are considered equivalent to ``True``. Especially, Tauto now solves goals of the form ``H : ~ t = t |- A``. - The "Let" tactic has been renamed "LetTac" and is now based on the primitive "let-in" (+) - Elim can no longer be used with an elimination schema different from the one defined at definition time of the inductive type. To overload an elimination schema, use "Elim using " (*)(+) - Simpl no longer unfolds the recursive calls of a mutually defined fixpoint (*)(+) - Intro now fails if the hypothesis name already exists (*)(+) - "Require Prolog" is no longer needed (i.e. it is available by default) (*)(+) - Unfold now fails on a non-unfoldable identifier (*)(+) - Unfold also applies on definitions of the local context - AutoRewrite now deals only with the main goal and it is the purpose of Hint Rewrite to deal with generated subgoals (+) - Redundant or incompatible instantiations in Apply ... with ... are now correctly managed (+) Efficiency ********** - Excessive memory uses specific to V7.0 fixed - Sizes of .vo files vary a lot compared to V6.3 (from -30% to +300% depending on the developments) - An improved reduction strategy for lazy evaluation - A more economical mechanism to ensure logical consistency at the Type level; warning: this is experimental and may produce "universes" anomalies (please report) Concrete syntax of constructions ******************************** - Only identifiers starting with "_" or a letter, and followed by letters, digits, "_" or "'" are allowed (e.g. "$" and "@" are no longer allowed) (*) - A multiple binder like (a:A)(a,b:(P a))(Q a) is no longer parsed as (a:A)(a0:(P a))(b:(P a))(Q a0) but as (a:A)(a0:(P a))(b:(P a0))(Q a0) (*)(+) - A dedicated syntax has been introduced for Reals (e.g ``3+1/x``) (+) - Pretty-printing of Infix notations fixed. (+) Parsing and grammar extension ***************************** - More constraints when writing ast - "{...}" and the macros $LIST, $VAR, etc. now expect a metavariable (an identifier starting with $) (*) - identifiers should starts with a letter or "_" and be followed by letters, digits, "_" or "'" (other characters are still supported but it is not advised to use them) (*)(+) - Entry "command" in "Grammar" and quotations (<<...>> stuff) is renamed "constr" as in "Syntax" (+) - New syntax "[" sentence_1 ... sentence_n"]." to group sentences (useful for Time and to write grammar rules abbreviating several commands) (+) - The default parser for actions in the grammar rules (and for patterns in the pretty-printing rules) is now the one associated with the grammar (i.e. vernac, tactic or constr); no need then for quotations as in <:vernac:<...>>; to return an "ast", the grammar must be explicitly typed with tag ": ast" or ": ast list", or if a syntax rule, by using <<...>> in the patterns (expression inside these angle brackets are parsed as "ast"); for grammars other than vernac, tactic or constr, you may explicitly type the action with tags ": constr", ": tactic", or ":vernac" (**)(+) - Interpretation of names in Grammar rule is now based on long names, which allows to avoid problems (or sometimes tricks;) related to overloaded names (+) New commands ************ - New commands "Print XML All", "Show XML Proof", ... to show or export theories to XML to be used with Helm's publishing and rendering tools (see http://www.cs.unibo.it/helm) (by Claudio Sacerdoti Coen) (+) - New commands to manually set implicit arguments (+) - "Implicits ident." to activate the implicit arguments mode just for ident - "Implicits ident [num1 num2 ...]." to explicitly give which arguments have to be considered as implicit - New SearchPattern/SearchRewrite (by Yves Bertot) (+) - New commands "Debug on"/"Debug off" to activate/deactivate the tactic language debugger (+) - New commands to map physical paths to logical paths (+) - Add LoadPath physical_dir as logical_dir - Add Rec LoadPath physical_dir as logical_dir Changes in existing commands **************************** - Generalization of the usage of qualified identifiers in tactics and commands about globals, e.g. Decompose, Eval Delta; Hints Unfold, Transparent, Require - Require synchronous with Reset; Require's scope stops at Section ending (*) - For a module indirectly loaded by a "Require" but not exported, the command "Import module" turns the constructions defined in the module accessible by their short name, and activates the Grammar, Syntax, Hint, ... declared in the module (+) - The scope of the "Search" command can be restricted to some modules (+) - Final dot in command (full stop/period) must be followed by a blank (newline, tabulation or whitespace) (+) - Slight restriction of the syntax for Cbv Delta: if present, option [-myconst] must immediately follow the Delta keyword (*)(+) - SearchIsos currently not supported - Add ML Path is now implied by Add LoadPath (+) - New names for the following commands (+) AddPath -> Add LoadPath Print LoadPath -> Print LoadPath DelPath -> Remove LoadPath AddRecPath -> Add Rec LoadPath Print Path -> Print Coercion Paths Implicit Arguments On -> Set Implicit Arguments Implicit Arguments Off -> Unset Implicit Arguments Begin Silent -> Set Silent End Silent -> Unset Silent. Tools ***** - coqtop (+) - Two executables: coqtop.byte and coqtop.opt (if supported by the platform) - coqtop is a link to the more efficient executable (coqtop.opt if present) - option -full is obsolete (+) - do_Makefile renamed into coq_makefile (+) - New option -R to coqtop and coqc to map a physical directory to a logical one (+) - coqc no longer needs to create a temporary file - No more warning if no initialization file .coqrc exists Extraction ********** - New algorithm for extraction able to deal with "Type" (+) (by J.-C. Filliâtre and P. Letouzey) Standard library **************** - New library on maps on integers (IntMap, contributed by Jean Goubault) - New lemmas about integer numbers [ZArith] - New lemmas and a "natural" syntax for reals [Reals] (+) - Exc/Error/Value renamed into Option/Some/None (*) New user contributions ********************** - Constructive complex analysis and the Fundamental Theorem of Algebra [FTA] (Herman Geuvers, Freek Wiedijk, Jan Zwanenburg, Randy Pollack, Henk Barendregt, Nijmegen) - A new axiomatization of ZFC set theory [Functions_in_ZFC] (C. Simpson, Sophia-Antipolis) - Basic notions of graph theory [GRAPHS-BASICS] (Jean Duprat, Lyon) - A library for floating-point numbers [Float] (Laurent Théry, Sylvie Boldo, Sophia-Antipolis) - Formalisation of CTL and TCTL temporal logic [CtlTctl] (Carlos Daniel Luna,Montevideo) - Specification and verification of the Railroad Crossing Problem in CTL and TCTL [RailroadCrossing] (Carlos Daniel Luna,Montevideo) - P-automaton and the ABR algorithm [PAutomata] (Christine Paulin, Emmanuel Freund, Orsay) - Semantics of a subset of the C language [MiniC] (Eduardo Giménez, Emmanuel Ledinot, Suresnes) - Correctness proofs of the following imperative algorithms: Bresenham line drawing algorithm [Bresenham], Marché's minimal edition distance algorithm [Diff] (Jean-Christophe Filliâtre, Orsay) - Correctness proofs of Buchberger's algorithm [Buchberger] and RSA cryptographic algorithm [Rsa] (Laurent Théry, Sophia-Antipolis) - Correctness proof of Stalmarck tautology checker algorithm [Stalmarck] (Laurent Théry, Pierre Letouzey, Sophia-Antipolis) Details of changes in 7.2 ~~~~~~~~~~~~~~~~~~~~~~~~~ Language - Automatic insertion of patterns for local definitions in the type of the constructors of an inductive types (for compatibility with V6.3 let-in style) - Coercions allowed in Cases patterns - New declaration "Canonical Structure id = t : I" to help resolution of equations of the form (proj ?)=a; if proj(e)=a then a is canonically equipped with the remaining fields in e, i.e. ? is instantiated by e Tactics - New tactic "ClearBody H" to clear the body of definitions in local context - New tactic "Assert H := c" for forward reasoning - Slight improvement in naming strategy for NewInduction/NewDestruct - Intuition/Tauto do not perform useless unfolding and work up to conversion Extraction (details in plugins/extraction/CHANGES or documentation) - Syntax changes: there are no more options inside the extraction commands. New commands for customization and options have been introduced instead. - More optimizations on extracted code. - Extraction tests are now embedded in 14 user contributions. Standard library - In [Relations], Rstar.v and Newman.v now axiom-free. - In [Sets], Integers.v now based on nat - In [Arith], more lemmas in Min.v, new file Max.v, tail-recursive plus and mult added to Plus.v and Mult.v respectively - New directory [Sorting] with a proof of heapsort (dragged from 6.3.1 lib) - In [Reals], more lemmas in Rbase.v, new lemmas on square, square root and trigonometric functions (R_sqr.v - Rtrigo.v); a complementary approach and new theorems about continuity and derivability in Ranalysis.v; some properties in plane geometry such as translation, rotation or similarity in Rgeom.v; finite sums and Chasles property in Rsigma.v Bugs - Confusion between implicit args of locals and globals of same base name fixed - Various incompatibilities wrt inference of "?" in V6.3.1 fixed - Implicits in infix section variables bug fixed - Known coercions bugs fixed - Apply "universe anomaly" bug fixed - NatRing now working - "Discriminate 1", "Injection 1", "Simplify_eq 1" now working - NewInduction bugs with let-in and recursively dependent hypotheses fixed - Syntax [x:=t:T]u now allowed as mentioned in documentation - Bug with recursive inductive types involving let-in fixed - Known pattern-matching bugs fixed - Known Cases elimination predicate bugs fixed - Improved errors messages for pattern-matching and projections - Better error messages for ill-typed Cases expressions Incompatibilities - New naming strategy for NewInduction/NewDestruct may affect 7.1 compatibility - Extra parentheses may exceptionally be needed in tactic definitions. - Coq extensions written in OCaml need to be updated (see dev/changements.txt for a description of the main changes in the interface files of V7.2) - New behavior of Intuition/Tauto may exceptionally lead to incompatibilities Details of changes in 7.3 ~~~~~~~~~~~~~~~~~~~~~~~~~ Language - Slightly improved compilation of pattern-matching (slight source of incompatibilities) - Record's now accept anonymous fields "_" which does not build projections - Changes in the allowed elimination sorts for certain class of inductive definitions : an inductive definition without constructors of Sort Prop can be eliminated on sorts Set and Type A "singleton" inductive definition (one constructor with arguments in the sort Prop like conjunction of two propositions or equality) can be eliminated directly on sort Type (In V7.2, only the sorts Prop and Set were allowed) Tactics - New tactic "Rename x into y" for renaming hypotheses - New tactics "Pose x:=u" and "Pose u" to add definitions to local context - Pattern now working on partially applied subterms - Ring no longer applies irreversible congruence laws of mult but better applies congruence laws of plus (slight source of incompatibilities). - Field now accepts terms to be simplified as arguments (as for Ring). This extension has been also implemented using the toplevel tactic language. - Intuition does no longer unfold constants except "<->" and "~". It can be parameterized by a tactic. It also can introduce dependent product if needed (source of incompatibilities) - "Match Context" now matching more recent hypotheses first and failing only on user errors and Fail tactic (possible source of incompatibilities) - Tactic Definition's without arguments now allowed in Coq states - Better simplification and discrimination made by Inversion (source of incompatibilities) Bugs - "Intros H" now working like "Intro H" trying first to reduce if not a product - Forward dependencies in Cases now taken into account - Known bugs related to Inversion and let-in's fixed - Bug unexpected Delta with let-in now fixed Extraction (details in plugins/extraction/CHANGES or documentation) - Signatures of extracted terms are now mostly expunged from dummy arguments. - Haskell extraction is now operational (tested & debugged). Standard library - Some additions in [ZArith]: three files (Zcomplements.v, Zpower.v and Zlogarithms.v) moved from plugins/omega in order to be more visible, one Zsgn function, more induction principles (Wf_Z.v and tail of Zcomplements.v), one more general Euclid theorem - Peano_dec.v and Compare_dec.v now part of Arith.v Tools - new option -dump-glob to coqtop to dump globalizations (to be used by the new documentation tool coqdoc; see http://www.lri.fr/~filliatr/coqdoc) User Contributions - CongruenceClosure (congruence closure decision procedure) [Pierre Corbineau, ENS Cachan] - MapleMode (an interface to embed Maple simplification procedures over rational fractions in Coq) [David Delahaye, Micaela Mayero, Chalmers University] - Presburger: A formalization of Presburger's algorithm [Laurent Thery, INRIA Sophia Antipolis] - Chinese has been rewritten using Z from ZArith as datatype ZChinese is the new version, Chinese the obsolete one [Pierre Letouzey, LRI Orsay] Incompatibilities - Ring: exceptional incompatibilities (1 above 650 in submitted user contribs, leading to a simplification) - Intuition: does not unfold any definition except "<->" and "~" - Cases: removal of some extra Cases in configurations of the form "Cases ... of C _ => ... | _ D => ..." (effects on 2 definitions of submitted user contributions necessitating the removal of now superfluous proof steps in 3 different proofs) - Match Context, in case of incompatibilities because of a now non trapped error (e.g. Not_found or Failure), use instead tactic Fail to force Match Context trying the next clause - Inversion: better simplification and discrimination may occasionally lead to less subgoals and/or hypotheses and different naming of hypotheses - Unification done by Apply/Elim has been changed and may exceptionally lead to incompatible instantiations - Peano_dec.v and Compare_dec.v parts of Arith.v make Auto more powerful if these files were not already required (1 occurrence of this in submitted user contribs) Changes in 7.3.1 ^^^^^^^^^^^^^^^^ Bug fixes - Corrupted Field tactic and Match Context tactic construction fixed - Checking of names already existing in Assert added (#1386) - Invalid argument bug in Exact tactic solved (#1387) - Colliding bound names bug fixed (#1412) - Wrong non-recursivity test for Record fixed (#1394) - Out of memory/seg fault bug related to parametric inductive fixed (#1404) - Setoid_replace/Setoid_rewrite bug wrt "==" fixed Misc - Ocaml version >= 3.06 is needed to compile Coq from sources - Simplification of fresh names creation strategy for Assert, Pose and LetTac (#1402) Details of changes in 7.4 ~~~~~~~~~~~~~~~~~~~~~~~~~ Symbolic notations - Introduction of a notion of scope gathering notations in a consistent set; a notation sets has been developed for nat, Z and R (undocumented) - New command "Notation" for declaring notations simultaneously for parsing and printing (see chap 10 of the reference manual) - Declarations with only implicit arguments now handled (e.g. the argument of nil can be set implicit; use !nil to refer to nil without arguments) - "Print Scope sc" and "Locate ntn" allows to know to what expression a notation is bound - New defensive strategy for printing or not implicit arguments to ensure re-type-checkability of the printed term - In Grammar command, the only predefined non-terminal entries are ident, global, constr and pattern (e.g. nvar, numarg disappears); the only allowed grammar types are constr and pattern; ast and ast list are no longer supported; some incompatibilities in Grammar: when a syntax is a initial segment of an other one, Grammar does not work, use Notation Library - Lemmas in Set from Compare_dec.v (le_lt_dec, ...) and Wf_nat.v (lt_wf_rec, ...) are now transparent. This may be source of incompatibilities. - Syntactic Definitions Fst, Snd, Ex, All, Ex2, AllT, ExT, ExT2, ProjS1, ProjS2, Error, Value and Except are turned to notations. They now must be applied (incompatibilities only in unrealistic cases). - More efficient versions of Zmult and times (30% faster) - Reals: the library is now divided in 6 parts (Rbase, Rfunctions, SeqSeries, Rtrigo, Ranalysis, Integration). New tactics: Sup and RCompute. See Reals.v for details. Modules - Beta version, see doc chap 2.5 for commands and chap 5 for theory Language - Inductive definitions now accept ">" in constructor types to declare the corresponding constructor as a coercion. - Idem for assumptions declarations and constants when the type is mentioned. - The "Coercion" and "Canonical Structure" keywords now accept the same syntax as "Definition", i.e. "hyps :=c (:t)?" or "hyps :t". - Theorem-like declaration now accepts the syntax "Theorem thm [x:t;...] : u". - Remark's and Fact's now definitively behave as Theorem and Lemma: when sections are closed, the full name of a Remark or a Fact has no longer a section part (source of incompatibilities) - Opaque Local's (i.e. built by tactics and ended by Qed), do not survive section closing any longer; as a side-effect, Opaque Local's now appear in the local context of proofs; their body is hidden though (source of incompatibilities); use one of Remark/Fact/Lemma/Theorem instead to simulate the old behavior of Local (the section part of the name is not kept though) ML tactics and commands - "Grammar tactic" and "Grammar vernac" of type "ast" are no longer supported (only "Grammar tactic simple_tactic" of type "tactic" remains available). - Concrete syntax for ML written commands and tactics is now declared at ML level using camlp4 macros TACTIC EXTEND et VERNAC COMMAND EXTEND. - "Check n c" now "n:Check c", "Eval n ..." now "n:Eval ..." - ``Proof with T`` (no documentation) - SearchAbout id - prints all theorems which contain id in their type Tactic definitions - Static globalisation of identifiers and global references (source of incompatibilities, especially, Recursive keyword is required for mutually recursive definitions). - New evaluation semantics: no more partial evaluation at definition time; evaluation of all Tactic/Meta Definition, even producing terms, expect a proof context to be evaluated (especially "()" is no longer needed). - Debugger now shows the nesting level and the reasons of failure Tactics - Equality tactics (Rewrite, Reflexivity, Symmetry, Transitivity) now understand JM equality - Simpl and Change now apply to subterms also - "Simpl f" reduces subterms whose head constant is f - Double Induction now referring to hypotheses like "Intros until" - "Inversion" now applies also on quantified hypotheses (naming as for Intros until) - NewDestruct now accepts terms with missing hypotheses - NewDestruct and NewInduction now accept user-provided elimination scheme - NewDestruct and NewInduction now accept user-provided introduction names - Omega could solve goals such as ``~x=y`` but failed when the hypothesis was unfolded to ``x < y -> False``. This is fixed. In addition, it can also recognize 'False' in the hypothesis and use it to solve the goal. - Coercions now handled in "with" bindings - "Subst x" replaces all occurrences of x by t in the goal and hypotheses when an hypothesis x=t or x:=t or t=x exists - Fresh names for Assert and Pose now based on collision-avoiding Intro naming strategy (exceptional source of incompatibilities) - LinearIntuition (no documentation) - Unfold expects a correct evaluable argument - Clear expects existing hypotheses Extraction (See details in plugins/extraction/CHANGES and README): - An experimental Scheme extraction is provided. - Concerning OCaml, extracted code is now ensured to always type check, thanks to automatic inserting of Obj.magic. - Experimental extraction of Coq new modules to Ocaml modules. Proof rendering in natural language - Export of theories to XML for publishing and rendering purposes now includes proof-trees (see http://www.cs.unibo.it/helm) Miscellaneous - Printing Coercion now used through the standard keywords Set/Add, Test, Print - "Print Term id" is an alias for "Print id" - New switch "Unset/Set Printing Symbols" to control printing of symbolic notations - Two new variants of implicit arguments are available + ``Unset``/``Set Contextual Implicits`` tells to consider implicit also the arguments inferable from the context (e.g. for nil or refl_eq) + ``Unset``/``Set Strict Implicits`` tells to consider implicit only the arguments that are inferable in any case (i.e. arguments that occurs as argument of rigid constants in the type of the remaining arguments; e.g. the witness of an existential is not strict since it can vanish when applied to a predicate which does not use its argument) Incompatibilities - "Grammar tactic ... : ast" and "Grammar vernac ... : ast" are no longer supported, use TACTIC EXTEND and VERNAC COMMAND EXTEND on the ML-side instead - Transparency of le_lt_dec and co (leads to some simplification in proofs; in some cases, incompatibilites is solved by declaring locally opaque the relevant constant) - Opaque Local do not now survive section closing (rename them into Remark/Lemma/... to get them still surviving the sections; this renaming allows also to solve incompatibilites related to now forbidden calls to the tactic Clear) - Remark and Fact have no longer (very) long names (use Local instead in case of name conflict) Bugs - Improved localisation of errors in Syntactic Definitions - Induction principle creation failure in presence of let-in fixed (#1459) - Inversion bugs fixed (#1427 and #1437) - Omega bug related to Set fixed (#1384) - Type-checking inefficiency of nested destructuring let-in fixed (#1435) - Improved handling of let-in during holes resolution phase (#1460) Efficiency - Implementation of a memory sharing strategy reducing memory requirements by an average ratio of 3. coq-8.15.0/doc/sphinx/index.html.rst000066400000000000000000000012241417001151100172370ustar00rootroot00000000000000========================== Introduction and Contents ========================== .. include:: introduction.rst Contents -------- .. toctree:: self .. toctree:: :caption: Specification language language/core/index language/extensions/index .. toctree:: :caption: Proofs proofs/writing-proofs/index proofs/automatic-tactics/index proofs/creating-tactics/index .. toctree:: :caption: Using Coq using/libraries/index using/tools/index .. toctree:: :caption: Appendix appendix/history-and-changes/index appendix/indexes/index zebibliography .. No entries yet * :index:`thmindex` .. include:: license.rst coq-8.15.0/doc/sphinx/index.latex.rst000066400000000000000000000012001417001151100174020ustar00rootroot00000000000000========================== The Coq Reference Manual ========================== ------------ Introduction ------------ .. include:: introduction.rst .. include:: license.rst ---------------------- Specification language ---------------------- .. toctree:: language/core/index language/extensions/index ------ Proofs ------ .. toctree:: proofs/writing-proofs/index proofs/automatic-tactics/index proofs/creating-tactics/index --------- Using Coq --------- .. toctree:: using/libraries/index using/tools/index -------- Appendix -------- .. toctree:: appendix/history-and-changes/index zebibliography coq-8.15.0/doc/sphinx/introduction.rst000066400000000000000000000067031417001151100177150ustar00rootroot00000000000000This is the reference manual of Coq. Coq is an interactive theorem prover. It lets you formalize mathematical concepts and then helps you interactively generate machine-checked proofs of theorems. Machine checking gives users much more confidence that the proofs are correct compared to human-generated and -checked proofs. Coq has been used in a number of flagship verification projects, including the `CompCert verified C compiler `_, and has served to verify the proof of the `four color theorem `_ (among many other mathematical formalizations). Users generate proofs by entering a series of tactics that constitute steps in the proof. There are many built-in tactics, some of which are elementary, while others implement complex decision procedures (such as :tacn:`lia`, a decision procedure for linear integer arithmetic). :ref:`Ltac ` and its planned replacement, :ref:`Ltac2 `, provide languages to define new tactics by combining existing tactics with looping and conditional constructs. These permit automation of large parts of proofs and sometimes entire proofs. Furthermore, users can add novel tactics or functionality by creating Coq plugins using OCaml. The Coq kernel, a small part of Coq, does the final verification that the tactic-generated proof is valid. Usually the tactic-generated proof is indeed correct, but delegating proof verification to the kernel means that even if a tactic is buggy, it won't be able to introduce an incorrect proof into the system. Finally, Coq also supports extraction of verified programs to programming languages such as OCaml and Haskell. This provides a way of executing Coq code efficiently and can be used to create verified software libraries. To learn Coq, beginners are advised to first start with a tutorial / book. Several such tutorials / books are listed at https://coq.inria.fr/documentation. This manual is organized in three main parts, plus an appendix: - **The first part presents the specification language of Coq**, that allows to define programs and state mathematical theorems. :ref:`core-language` presents the language that the kernel of Coq understands. :ref:`extensions` presents the richer language, with notations, implicits, etc. that a user can use and which is translated down to the language of the kernel by means of an "elaboration process". - **The second part presents proof mode**, the central feature of Coq. :ref:`writing-proofs` introduces this interactive mode and the available proof languages. :ref:`automatic-tactics` presents some more advanced tactics, while :ref:`writing-tactics` is about the languages that allow a user to combine tactics together and develop new ones. - **The third part shows how to use Coq in practice.** :ref:`libraries` presents some of the essential reusable blocks from the ecosystem and some particularly important extensions such as the program extraction mechanism. :ref:`tools` documents important tools that a user needs to build a Coq project. - In the appendix, :ref:`history-and-changes` presents the history of Coq and changes in recent releases. This is an important reference if you upgrade the version of Coq that you use. The various :ref:`indexes ` are very useful to **quickly browse the manual and find what you are looking for.** They are often the main entry point to the manual. The full table of contents is presented below: coq-8.15.0/doc/sphinx/language/000077500000000000000000000000001417001151100162175ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/language/cic.rst000066400000000000000000000471741417001151100175240ustar00rootroot00000000000000Typing rules ==================================== The underlying formal language of Coq is a :gdef:`Calculus of Inductive Constructions` (|Cic|) whose inference rules are presented in this chapter. The history of this formalism as well as pointers to related work are provided in a separate chapter; see :ref:`history`. .. _The-terms: The terms ------------- The expressions of the |Cic| are *terms* and all terms have a *type*. There are types for functions (or programs), there are atomic types (especially datatypes)... but also types for proofs and types for the types themselves. Especially, any object handled in the formalism must belong to a type. For instance, universal quantification is relative to a type and takes the form “*for all x of type* :math:`T`, :math:`P`”. The expression “:math:`x` *of type* :math:`T`” is written “:math:`x:T`”. Informally, “:math:`x:T`” can be thought as “:math:`x` *belongs to* :math:`T`”. Terms are built from sorts, variables, constants, abstractions, applications, local definitions, and products. From a syntactic point of view, types cannot be distinguished from terms, except that they cannot start by an abstraction or a constructor. More precisely the language of the *Calculus of Inductive Constructions* is built from the following rules. #. the sorts :math:`\SProp`, :math:`\Prop`, :math:`\Set`, :math:`\Type(i)` are terms. #. variables, hereafter ranged over by letters :math:`x`, :math:`y`, etc., are terms #. constants, hereafter ranged over by letters :math:`c`, :math:`d`, etc., are terms. #. if :math:`x` is a variable and :math:`T`, :math:`U` are terms then :math:`∀ x:T,~U` (:g:`forall x:T, U` in Coq concrete syntax) is a term. If :math:`x` occurs in :math:`U`, :math:`∀ x:T,~U` reads as “for all :math:`x` of type :math:`T`, :math:`U`”. As :math:`U` depends on :math:`x`, one says that :math:`∀ x:T,~U` is a *dependent product*. If :math:`x` does not occur in :math:`U` then :math:`∀ x:T,~U` reads as “if :math:`T` then :math:`U`”. A *non-dependent product* can be written: :math:`T \rightarrow U`. #. if :math:`x` is a variable and :math:`T`, :math:`u` are terms then :math:`λ x:T .~u` (:g:`fun x:T => u` in Coq concrete syntax) is a term. This is a notation for the λ-abstraction of λ-calculus :cite:`Bar81`. The term :math:`λ x:T .~u` is a function which maps elements of :math:`T` to the expression :math:`u`. #. if :math:`t` and :math:`u` are terms then :math:`(t~u)` is a term (:g:`t u` in Coq concrete syntax). The term :math:`(t~u)` reads as “:math:`t` applied to :math:`u`”. #. if :math:`x` is a variable, and :math:`t`, :math:`T` and :math:`u` are terms then :math:`\letin{x}{t:T}{u}` is a term which denotes the term :math:`u` where the variable :math:`x` is locally bound to :math:`t` of type :math:`T`. This stands for the common “let-in” construction of functional programs such as ML or Scheme. .. _Free-variables: **Free variables.** The notion of free variables is defined as usual. In the expressions :math:`λx:T.~U` and :math:`∀ x:T,~U` the occurrences of :math:`x` in :math:`U` are bound. .. _Substitution: **Substitution.** The notion of substituting a term :math:`t` to free occurrences of a variable :math:`x` in a term :math:`u` is defined as usual. The resulting term is written :math:`\subst{u}{x}{t}`. .. _The-logical-vs-programming-readings: **The logical vs programming readings.** The constructions of the |Cic| can be used to express both logical and programming notions, according to the Curry-Howard correspondence between proofs and programs, and between propositions and types :cite:`Cur58,How80,Bru72`. For instance, let us assume that :math:`\nat` is the type of natural numbers with zero element written :math:`0` and that :g:`True` is the always true proposition. Then :math:`→` is used both to denote :math:`\nat→\nat` which is the type of functions from :math:`\nat` to :math:`\nat`, to denote True→True which is an implicative proposition, to denote :math:`\nat →\Prop` which is the type of unary predicates over the natural numbers, etc. Let us assume that ``mult`` is a function of type :math:`\nat→\nat→\nat` and ``eqnat`` a predicate of type :math:`\nat→\nat→ \Prop`. The λ-abstraction can serve to build “ordinary” functions as in :math:`λ x:\nat.~(\kw{mult}~x~x)` (i.e. :g:`fun x:nat => mult x x` in Coq notation) but may build also predicates over the natural numbers. For instance :math:`λ x:\nat.~(\kw{eqnat}~x~0)` (i.e. :g:`fun x:nat => eqnat x 0` in Coq notation) will represent the predicate of one variable :math:`x` which asserts the equality of :math:`x` with :math:`0`. This predicate has type :math:`\nat → \Prop` and it can be applied to any expression of type :math:`\nat`, say :math:`t`, to give an object :math:`P~t` of type :math:`\Prop`, namely a proposition. Furthermore :g:`forall x:nat, P x` will represent the type of functions which associate with each natural number :math:`n` an object of type :math:`(P~n)` and consequently represent the type of proofs of the formula “:math:`∀ x.~P(x)`”. .. _Typing-rules: Typing rules ---------------- As objects of type theory, terms are subjected to *type discipline*. The well typing of a term depends on a local context and a global environment. .. _Local-context: **Local context.** A :term:`local context` is an ordered list of declarations of *variables*. The declaration of a variable :math:`x` is either an *assumption*, written :math:`x:T` (where :math:`T` is a type) or a *definition*, written :math:`x:=t:T`. Local contexts are written in brackets, for example :math:`[x:T;~y:=u:U;~z:V]`. The variables declared in a local context must be distinct. If :math:`Γ` is a local context that declares :math:`x`, we write :math:`x ∈ Γ`. Writing :math:`(x:T) ∈ Γ` means there is an assumption or a definition giving the type :math:`T` to :math:`x` in :math:`Γ`. If :math:`Γ` defines :math:`x:=t:T`, we also write :math:`(x:=t:T) ∈ Γ`. For the rest of the chapter, :math:`Γ::(y:T)` denotes the local context :math:`Γ` enriched with the local assumption :math:`y:T`. Similarly, :math:`Γ::(y:=t:T)` denotes the local context :math:`Γ` enriched with the local definition :math:`(y:=t:T)`. The notation :math:`[]` denotes the empty local context. Writing :math:`Γ_1 ; Γ_2` means concatenation of the local context :math:`Γ_1` and the local context :math:`Γ_2`. .. _Global-environment: **Global environment.** A :term:`global environment` is an ordered list of *declarations*. Global declarations are either *assumptions*, *definitions* or declarations of inductive objects. Inductive objects declare both constructors and inductive or coinductive types (see Section :ref:`inductive-definitions`). In the global environment, *assumptions* are written as :math:`(c:T)`, indicating that :math:`c` is of the type :math:`T`. *Definitions* are written as :math:`c:=t:T`, indicating that :math:`c` has the value :math:`t` and type :math:`T`. We shall call such names :term:`constants `. For the rest of the chapter, the :math:`E;~c:T` denotes the global environment :math:`E` enriched with the assumption :math:`c:T`. Similarly, :math:`E;~c:=t:T` denotes the global environment :math:`E` enriched with the definition :math:`(c:=t:T)`. The rules for inductive definitions (see Section :ref:`inductive-definitions`) have to be considered as assumption rules in which the following definitions apply: if the name :math:`c` is declared in :math:`E`, we write :math:`c ∈ E` and if :math:`c:T` or :math:`c:=t:T` is declared in :math:`E`, we write :math:`(c : T) ∈ E`. .. _Typing-rules2: **Typing rules.** In the following, we define simultaneously two judgments. The first one :math:`\WTEG{t}{T}` means the term :math:`t` is well-typed and has type :math:`T` in the global environment :math:`E` and local context :math:`Γ`. The second judgment :math:`\WFE{Γ}` means that the global environment :math:`E` is well-formed and the local context :math:`Γ` is a valid local context in this global environment. A term :math:`t` is well typed in a global environment :math:`E` iff there exists a local context :math:`\Gamma` and a term :math:`T` such that the judgment :math:`\WTEG{t}{T}` can be derived from the following rules. .. inference:: W-Empty --------- \WF{[]}{} .. inference:: W-Local-Assum \WTEG{T}{s} s \in \Sort x \not\in \Gamma % \cup E ------------------------- \WFE{\Gamma::(x:T)} .. inference:: W-Local-Def \WTEG{t}{T} x \not\in \Gamma % \cup E ------------------------- \WFE{\Gamma::(x:=t:T)} .. inference:: W-Global-Assum \WTE{}{T}{s} s \in \Sort c \notin E ------------ \WF{E;~c:T}{} .. inference:: W-Global-Def \WTE{}{t}{T} c \notin E --------------- \WF{E;~c:=t:T}{} .. inference:: Ax-SProp \WFE{\Gamma} ---------------------- \WTEG{\SProp}{\Type(1)} .. inference:: Ax-Prop \WFE{\Gamma} ---------------------- \WTEG{\Prop}{\Type(1)} .. inference:: Ax-Set \WFE{\Gamma} --------------------- \WTEG{\Set}{\Type(1)} .. inference:: Ax-Type \WFE{\Gamma} --------------------------- \WTEG{\Type(i)}{\Type(i+1)} .. inference:: Var \WFE{\Gamma} (x:T) \in \Gamma~~\mbox{or}~~(x:=t:T) \in \Gamma~\mbox{for some $t$} -------------------------------------------------------------------- \WTEG{x}{T} .. inference:: Const \WFE{\Gamma} (c:T) \in E~~\mbox{or}~~(c:=t:T) \in E~\mbox{for some $t$} ---------------------------------------------------------- \WTEG{c}{T} .. inference:: Prod-SProp \WTEG{T}{s} s \in {\Sort} \WTE{\Gamma::(x:T)}{U}{\SProp} ----------------------------- \WTEG{\forall~x:T,U}{\SProp} .. inference:: Prod-Prop \WTEG{T}{s} s \in \Sort \WTE{\Gamma::(x:T)}{U}{\Prop} ----------------------------- \WTEG{∀ x:T,~U}{\Prop} .. inference:: Prod-Set \WTEG{T}{s} s \in \{\SProp, \Prop, \Set\} \WTE{\Gamma::(x:T)}{U}{\Set} ---------------------------- \WTEG{∀ x:T,~U}{\Set} .. inference:: Prod-Type \WTEG{T}{s} s \in \{\SProp, \Type(i)\} \WTE{\Gamma::(x:T)}{U}{\Type(i)} -------------------------------- \WTEG{∀ x:T,~U}{\Type(i)} .. inference:: Lam \WTEG{∀ x:T,~U}{s} \WTE{\Gamma::(x:T)}{t}{U} ------------------------------------ \WTEG{λ x:T\mto t}{∀ x:T,~U} .. inference:: App \WTEG{t}{∀ x:U,~T} \WTEG{u}{U} ------------------------------ \WTEG{(t\ u)}{\subst{T}{x}{u}} .. inference:: Let \WTEG{t}{T} \WTE{\Gamma::(x:=t:T)}{u}{U} ----------------------------------------- \WTEG{\letin{x}{t:T}{u}}{\subst{U}{x}{t}} .. note:: **Prod-Prop** and **Prod-Set** typing-rules make sense if we consider the semantic difference between :math:`\Prop` and :math:`\Set`: + All values of a type that has a sort :math:`\Set` are extractable. + No values of a type that has a sort :math:`\Prop` are extractable. .. note:: We may have :math:`\letin{x}{t:T}{u}` well-typed without having :math:`((λ x:T.~u)~t)` well-typed (where :math:`T` is a type of :math:`t`). This is because the value :math:`t` associated with :math:`x` may be used in a conversion rule (see Section :ref:`Conversion-rules`). .. _subtyping-rules: Subtyping rules ------------------- At the moment, we did not take into account one rule between universes which says that any term in a universe of index :math:`i` is also a term in the universe of index :math:`i+1` (this is the *cumulativity* rule of |Cic|). This property extends the equivalence relation of convertibility into a *subtyping* relation inductively defined by: #. if :math:`E[Γ] ⊢ t =_{βδιζη} u` then :math:`E[Γ] ⊢ t ≤_{βδιζη} u`, #. if :math:`i ≤ j` then :math:`E[Γ] ⊢ \Type(i) ≤_{βδιζη} \Type(j)`, #. for any :math:`i`, :math:`E[Γ] ⊢ \Set ≤_{βδιζη} \Type(i)`, #. :math:`E[Γ] ⊢ \Prop ≤_{βδιζη} \Set`, hence, by transitivity, :math:`E[Γ] ⊢ \Prop ≤_{βδιζη} \Type(i)`, for any :math:`i` (note: :math:`\SProp` is not related by cumulativity to any other term) #. if :math:`E[Γ] ⊢ T =_{βδιζη} U` and :math:`E[Γ::(x:T)] ⊢ T' ≤_{βδιζη} U'` then :math:`E[Γ] ⊢ ∀x:T,~T′ ≤_{βδιζη} ∀ x:U,~U′`. #. if :math:`\ind{p}{Γ_I}{Γ_C}` is a universe polymorphic and cumulative (see Chapter :ref:`polymorphicuniverses`) inductive type (see below) and :math:`(t : ∀Γ_P ,∀Γ_{\mathit{Arr}(t)}, S)∈Γ_I` and :math:`(t' : ∀Γ_P' ,∀Γ_{\mathit{Arr}(t)}', S')∈Γ_I` are two different instances of *the same* inductive type (differing only in universe levels) with constructors .. math:: [c_1 : ∀Γ_P ,∀ T_{1,1} … T_{1,n_1} ,~t~v_{1,1} … v_{1,m} ;~…;~ c_k : ∀Γ_P ,∀ T_{k,1} … T_{k,n_k} ,~t~v_{k,1} … v_{k,m} ] and .. math:: [c_1 : ∀Γ_P' ,∀ T_{1,1}' … T_{1,n_1}' ,~t'~v_{1,1}' … v_{1,m}' ;~…;~ c_k : ∀Γ_P' ,∀ T_{k,1}' … T_{k,n_k}' ,~t'~v_{k,1}' … v_{k,m}' ] respectively then .. math:: E[Γ] ⊢ t~w_1 … w_m ≤_{βδιζη} t'~w_1' … w_m' (notice that :math:`t` and :math:`t'` are both fully applied, i.e., they have a sort as a type) if .. math:: E[Γ] ⊢ w_i =_{βδιζη} w_i' for :math:`1 ≤ i ≤ m` and we have .. math:: E[Γ] ⊢ T_{i,j} ≤_{βδιζη} T_{i,j}' and .. math:: E[Γ] ⊢ A_i ≤_{βδιζη} A_i' where :math:`Γ_{\mathit{Arr}(t)} = [a_1 : A_1 ;~ … ;~a_l : A_l ]` and :math:`Γ_{\mathit{Arr}(t)}' = [a_1 : A_1';~ … ;~a_l : A_l']`. The conversion rule up to subtyping is now exactly: .. inference:: Conv E[Γ] ⊢ U : s E[Γ] ⊢ t : T E[Γ] ⊢ T ≤_{βδιζη} U -------------- E[Γ] ⊢ t : U .. _Normal-form: **Normal form**. A term which cannot be any more reduced is said to be in *normal form*. There are several ways (or strategies) to apply the reduction rules. Among them, we have to mention the *head reduction* which will play an important role (see Chapter :ref:`tactics`). Any term :math:`t` can be written as :math:`λ x_1 :T_1 .~… λ x_k :T_k .~(t_0~t_1 … t_n )` where :math:`t_0` is not an application. We say then that :math:`t_0` is the *head of* :math:`t`. If we assume that :math:`t_0` is :math:`λ x:T.~u_0` then one step of β-head reduction of :math:`t` is: .. math:: λ x_1 :T_1 .~… λ x_k :T_k .~(λ x:T.~u_0~t_1 … t_n ) ~\triangleright~ λ (x_1 :T_1 )…(x_k :T_k ).~(\subst{u_0}{x}{t_1}~t_2 … t_n ) Iterating the process of head reduction until the head of the reduced term is no more an abstraction leads to the *β-head normal form* of :math:`t`: .. math:: t \triangleright … \triangleright λ x_1 :T_1 .~…λ x_k :T_k .~(v~u_1 … u_m ) where :math:`v` is not an abstraction (nor an application). Note that the head normal form must not be confused with the normal form since some :math:`u_i` can be reducible. Similar notions of head-normal forms involving δ, ι and ζ reductions or any combination of those can also be defined. .. _Admissible-rules-for-global-environments: Admissible rules for global environments -------------------------------------------- From the original rules of the type system, one can show the admissibility of rules which change the local context of definition of objects in the global environment. We show here the admissible rules that are used in the discharge mechanism at the end of a section. .. _Abstraction: **Abstraction.** One can modify a global declaration by generalizing it over a previously assumed constant :math:`c`. For doing that, we need to modify the reference to the global declaration in the subsequent global environment and local context by explicitly applying this constant to the constant :math:`c`. Below, if :math:`Γ` is a context of the form :math:`[y_1 :A_1 ;~…;~y_n :A_n]`, we write :math:`∀x:U,~\subst{Γ}{c}{x}` to mean :math:`[y_1 :∀ x:U,~\subst{A_1}{c}{x};~…;~y_n :∀ x:U,~\subst{A_n}{c}{x}]` and :math:`\subst{E}{|Γ|}{|Γ|c}` to mean the parallel substitution :math:`E\{y_1 /(y_1~c)\}…\{y_n/(y_n~c)\}`. .. _First-abstracting-property: **First abstracting property:** .. math:: \frac{\WF{E;~c:U;~E′;~c′:=t:T;~E″}{Γ}} {\WF{E;~c:U;~E′;~c′:=λ x:U.~\subst{t}{c}{x}:∀x:U,~\subst{T}{c}{x};~\subst{E″}{c′}{(c′~c)}} {\subst{Γ}{c′}{(c′~c)}}} .. math:: \frac{\WF{E;~c:U;~E′;~c′:T;~E″}{Γ}} {\WF{E;~c:U;~E′;~c′:∀ x:U,~\subst{T}{c}{x};~\subst{E″}{c′}{(c′~c)}}{\subst{Γ}{c′}{(c′~c)}}} .. math:: \frac{\WF{E;~c:U;~E′;~\ind{p}{Γ_I}{Γ_C};~E″}{Γ}} {\WFTWOLINES{E;~c:U;~E′;~\ind{p+1}{∀ x:U,~\subst{Γ_I}{c}{x}}{∀ x:U,~\subst{Γ_C}{c}{x}};~ \subst{E″}{|Γ_I ;Γ_C |}{|Γ_I ;Γ_C | c}} {\subst{Γ}{|Γ_I ;Γ_C|}{|Γ_I ;Γ_C | c}}} One can similarly modify a global declaration by generalizing it over a previously defined constant :math:`c`. Below, if :math:`Γ` is a context of the form :math:`[y_1 :A_1 ;~…;~y_n :A_n]`, we write :math:`\subst{Γ}{c}{u}` to mean :math:`[y_1 :\subst{A_1} {c}{u};~…;~y_n:\subst{A_n} {c}{u}]`. .. _Second-abstracting-property: **Second abstracting property:** .. math:: \frac{\WF{E;~c:=u:U;~E′;~c′:=t:T;~E″}{Γ}} {\WF{E;~c:=u:U;~E′;~c′:=(\letin{x}{u:U}{\subst{t}{c}{x}}):\subst{T}{c}{u};~E″}{Γ}} .. math:: \frac{\WF{E;~c:=u:U;~E′;~c′:T;~E″}{Γ}} {\WF{E;~c:=u:U;~E′;~c′:\subst{T}{c}{u};~E″}{Γ}} .. math:: \frac{\WF{E;~c:=u:U;~E′;~\ind{p}{Γ_I}{Γ_C};~E″}{Γ}} {\WF{E;~c:=u:U;~E′;~\ind{p}{\subst{Γ_I}{c}{u}}{\subst{Γ_C}{c}{u}};~E″}{Γ}} .. _Pruning-the-local-context: **Pruning the local context.** If one abstracts or substitutes constants with the above rules then it may happen that some declared or defined constant does not occur any more in the subsequent global environment and in the local context. One can consequently derive the following property. .. _First-pruning-property: .. inference:: First pruning property: \WF{E;~c:U;~E′}{Γ} c~\kw{does not occur in}~E′~\kw{and}~Γ -------------------------------------- \WF{E;E′}{Γ} .. _Second-pruning-property: .. inference:: Second pruning property: \WF{E;~c:=u:U;~E′}{Γ} c~\kw{does not occur in}~E′~\kw{and}~Γ -------------------------------------- \WF{E;E′}{Γ} .. _The-Calculus-of-Inductive-Construction-with-impredicative-Set: The Calculus of Inductive Constructions with impredicative Set ----------------------------------------------------------------- Coq can be used as a type checker for the Calculus of Inductive Constructions with an impredicative sort :math:`\Set` by using the compiler option ``-impredicative-set``. For example, using the ordinary `coqtop` command, the following is rejected, .. example:: .. coqtop:: all Fail Definition id: Set := forall X:Set,X->X. while it will type check, if one uses instead the `coqtop` ``-impredicative-set`` option.. The major change in the theory concerns the rule for product formation in the sort :math:`\Set`, which is extended to a domain in any sort: .. inference:: ProdImp E[Γ] ⊢ T : s s ∈ \Sort E[Γ::(x:T)] ⊢ U : \Set --------------------- E[Γ] ⊢ ∀ x:T,~U : \Set This extension has consequences on the inductive definitions which are allowed. In the impredicative system, one can build so-called *large inductive definitions* like the example of second-order existential quantifier (:g:`exSet`). There should be restrictions on the eliminations which can be performed on such definitions. The elimination rules in the impredicative system for sort :math:`\Set` become: .. inference:: Set1 s ∈ \{\Prop, \Set\} ----------------- [I:\Set|I→ s] .. inference:: Set2 I~\kw{is a small inductive definition} s ∈ \{\Type(i)\} ---------------- [I:\Set|I→ s] coq-8.15.0/doc/sphinx/language/coq-library.rst000066400000000000000000000774221417001151100212110ustar00rootroot00000000000000.. _thecoqlibrary: The Coq library ================= .. index:: single: Theories The Coq library has two parts: * The :gdef:`prelude`: definitions and theorems for the most commonly used elementary logical notions and data types. Coq normally loads these files automatically when it starts. * The :gdef:`standard library`: general-purpose libraries with definitions and theorems for sets, lists, sorting, arithmetic, etc. To use these files, users must load them explicitly with the ``Require`` command (see :ref:`compiled-files`) There are also many libraries provided by Coq users' community. These libraries and developments are available for download at http://coq.inria.fr (see :ref:`userscontributions`). This chapter briefly reviews the Coq libraries whose contents can also be browsed at http://coq.inria.fr/stdlib/. The prelude ----------- This section lists the basic notions and results which are directly available in the standard Coq system. Most of these constructions are defined in the ``Prelude`` module in directory ``theories/Init`` in the Coq root directory; this includes the modules ``Notations``, ``Logic``, ``Datatypes``, ``Specif``, ``Peano``, ``Wf`` and ``Tactics``. Module ``Logic_Type`` also makes it in the initial state. .. _init-notations: Notations ~~~~~~~~~ This module defines the parsing and pretty-printing of many symbols (infixes, prefixes, etc.). However, it does not assign a meaning to these notations. The purpose of this is to define and fix once for all the precedence and associativity of very common notations. The main notations fixed in the initial state are : ================ ============ =============== Notation Precedence Associativity ================ ============ =============== ``_ -> _`` 99 right ``_ <-> _`` 95 no ``_ \/ _`` 85 right ``_ /\ _`` 80 right ``~ _`` 75 right ``_ = _`` 70 no ``_ = _ = _`` 70 no ``_ = _ :> _`` 70 no ``_ <> _`` 70 no ``_ <> _ :> _`` 70 no ``_ < _`` 70 no ``_ > _`` 70 no ``_ <= _`` 70 no ``_ >= _`` 70 no ``_ < _ < _`` 70 no ``_ < _ <= _`` 70 no ``_ <= _ < _`` 70 no ``_ <= _ <= _`` 70 no ``_ + _`` 50 left ``_ || _`` 50 left ``_ - _`` 50 left ``_ * _`` 40 left ``_ _`` 40 left ``_ / _`` 40 left ``- _`` 35 right ``/ _`` 35 right ``_ ^ _`` 30 right ================ ============ =============== .. _coq-library-logic: Logic ~~~~~ `Logic.v` in the basic library of Coq has the definitions of standard (intuitionistic) logical connectives defined as inductive constructions. They are equipped with an appealing syntax enriching the subclass :token:`form` of the syntactic class :token:`term`. The constructs for :production:`form` are: ============================================== ======= True True False False :n:`~ @form` not :n:`@form /\ @form` and :n:`@form \/ @form` or :n:`@form -> @form` primitive implication :n:`@form <-> @form` iff :n:`forall @ident : @type, @form` primitive for all :n:`exists @ident {? @specif}, @form` ex :n:`exists2 @ident {? @specif}, @form & @form` ex2 :n:`@term = @term` eq :n:`@term = @term :> @specif` eq ============================================== ======= .. note:: Implication is not defined but primitive (it is a non-dependent product of a proposition over another proposition). There is also a primitive universal quantification (it is a dependent product over a proposition). The primitive universal quantification allows both first-order and higher-order quantification. Propositional Connectives +++++++++++++++++++++++++ .. index:: single: Connectives single: True (term) single: I (term) single: False (term) single: not (term) single: and (term) single: conj (term) single: proj1 (term) single: proj2 (term) single: or (term) single: or_introl (term) single: or_intror (term) single: iff (term) single: IF_then_else (term) First, we find propositional calculus connectives. At times, it's helpful to know exactly what these notations represent. .. coqdoc:: Inductive True : Prop := I. Inductive False : Prop := . Definition not (A: Prop) := A -> False. Inductive and (A B:Prop) : Prop := conj (_:A) (_:B). Section Projections. Variables A B : Prop. Theorem proj1 : A /\ B -> A. Theorem proj2 : A /\ B -> B. End Projections. Inductive or (A B:Prop) : Prop := | or_introl (_:A) | or_intror (_:B). Definition iff (P Q:Prop) := (P -> Q) /\ (Q -> P). Definition IF_then_else (P Q R:Prop) := P /\ Q \/ ~ P /\ R. Quantifiers +++++++++++ .. index:: single: Quantifiers single: all (term) single: ex (term) single: exists (term) single: ex_intro (term) single: ex2 (term) single: exists2 (term) single: ex_intro2 (term) Then we find first-order quantifiers: .. coqtop:: in Definition all (A:Set) (P:A -> Prop) := forall x:A, P x. Inductive ex (A: Set) (P:A -> Prop) : Prop := ex_intro (x:A) (_:P x). Inductive ex2 (A:Set) (P Q:A -> Prop) : Prop := ex_intro2 (x:A) (_:P x) (_:Q x). The following abbreviations are allowed: ====================== ======================================= ``exists x:A, P`` ``ex A (fun x:A => P)`` ``exists x, P`` ``ex _ (fun x => P)`` ``exists2 x:A, P & Q`` ``ex2 A (fun x:A => P) (fun x:A => Q)`` ``exists2 x, P & Q`` ``ex2 _ (fun x => P) (fun x => Q)`` ====================== ======================================= The type annotation ``:A`` can be omitted when ``A`` can be synthesized by the system. .. _coq-equality: Equality ++++++++ .. index:: single: Equality single: eq (term) single: eq_refl (term) Then, we find equality, defined as an inductive relation. That is, given a type ``A`` and an ``x`` of type ``A``, the predicate :g:`(eq A x)` is the smallest one which contains ``x``. This definition, due to Christine Paulin-Mohring, is equivalent to define ``eq`` as the smallest reflexive relation, and it is also equivalent to Leibniz' equality. .. coqtop:: in Inductive eq (A:Type) (x:A) : A -> Prop := eq_refl : eq A x x. Lemmas ++++++ Finally, a few easy lemmas are provided. .. index:: single: absurd (term) single: eq_sym (term) single: eq_trans (term) single: f_equal (term) single: sym_not_eq (term) single: eq_ind_r (term) single: eq_rec_r (term) single: eq_rect (term) single: eq_rect_r (term) .. coqdoc:: Theorem absurd : forall A C:Prop, A -> ~ A -> C. Section equality. Variables A B : Type. Variable f : A -> B. Variables x y z : A. Theorem eq_sym : x = y -> y = x. Theorem eq_trans : x = y -> y = z -> x = z. Theorem f_equal : x = y -> f x = f y. Theorem not_eq_sym : x <> y -> y <> x. End equality. Definition eq_ind_r : forall (A:Type) (x:A) (P:A->Prop), P x -> forall y:A, y = x -> P y. Definition eq_rec_r : forall (A:Type) (x:A) (P:A->Set), P x -> forall y:A, y = x -> P y. Definition eq_rect_r : forall (A:Type) (x:A) (P:A->Type), P x -> forall y:A, y = x -> P y. Hint Immediate eq_sym not_eq_sym : core. .. index:: single: f_equal2 ... f_equal5 (term) The theorem ``f_equal`` is extended to functions with two to five arguments. The theorem are names ``f_equal2``, ``f_equal3``, ``f_equal4`` and ``f_equal5``. For instance ``f_equal3`` is defined the following way. .. coqtop:: in abort Theorem f_equal3 : forall (A1 A2 A3 B:Type) (f:A1 -> A2 -> A3 -> B) (x1 y1:A1) (x2 y2:A2) (x3 y3:A3), x1 = y1 -> x2 = y2 -> x3 = y3 -> f x1 x2 x3 = f y1 y2 y3. .. _datatypes: Datatypes ~~~~~~~~~ .. index:: single: Datatypes In the basic library, we find in ``Datatypes.v`` the definition of the basic data-types of programming, defined as inductive constructions over the sort ``Set``. Some of them come with a special syntax shown below (this syntax table is common with the next section :ref:`specification`). The constructs for :production:`specif` are: ============================================= ======= :n:`@specif * @specif` prod :n:`@specif + @specif` sum :n:`@specif + { @specif }` sumor :n:`{ @specif } + { @specif }` sumbool :n:`{ @ident : @specif | @form }` sig :n:`{ @ident : @specif | @form & @form }` sig2 :n:`{ @ident : @specif & @specif }` sigT :n:`{ @ident : @specif & @specif & @specif }` sigT2 ============================================= ======= The notation for pairs (elements of type prod) is: :n:`(@term, @term)` Programming +++++++++++ .. index:: single: Programming single: unit (term) single: tt (term) single: bool (term) single: true (term) single: false (term) single: nat (term) single: O (term) single: S (term) single: option (term) single: Some (term) single: None (term) single: identity (term) single: refl_identity (term) .. coqtop:: in Inductive unit : Set := tt. Inductive bool : Set := true | false. Inductive nat : Set := O | S (n:nat). Inductive option (A:Set) : Set := Some (_:A) | None. Inductive identity (A:Type) (a:A) : A -> Type := refl_identity : identity A a a. Note that zero is the letter ``O``, and *not* the numeral ``0``. The predicate ``identity`` is logically equivalent to equality but it lives in sort ``Type``. It is mainly maintained for compatibility. We then define the disjoint sum of ``A+B`` of two sets ``A`` and ``B``, and their product ``A*B``. .. index:: single: sum (term) single: A+B (term) single: + (term) single: inl (term) single: inr (term) single: prod (term) single: A*B (term) single: * (term) single: pair (term) single: fst (term) single: snd (term) .. coqtop:: in Inductive sum (A B:Set) : Set := inl (_:A) | inr (_:B). Inductive prod (A B:Set) : Set := pair (_:A) (_:B). Section projections. Variables A B : Set. Definition fst (H: prod A B) := match H with | pair _ _ x y => x end. Definition snd (H: prod A B) := match H with | pair _ _ x y => y end. End projections. Some operations on ``bool`` are also provided: ``andb`` (with infix notation ``&&``), ``orb`` (with infix notation ``||``), ``xorb``, ``implb`` and ``negb``. .. _specification: Specification ~~~~~~~~~~~~~ The following notions defined in module ``Specif.v`` allow to build new data-types and specifications. They are available with the syntax shown in the previous section :ref:`datatypes`. For instance, given :g:`A:Type` and :g:`P:A->Prop`, the construct :g:`{x:A | P x}` (in abstract syntax :g:`(sig A P)`) is a ``Type``. We may build elements of this set as :g:`(exist x p)` whenever we have a witness :g:`x:A` with its justification :g:`p:P x`. From such a :g:`(exist x p)` we may in turn extract its witness :g:`x:A` (using an elimination construct such as ``match``) but *not* its justification, which stays hidden, like in an abstract data-type. In technical terms, one says that ``sig`` is a *weak (dependent) sum*. A variant ``sig2`` with two predicates is also provided. .. index:: single: {x:A | P x} (term) single: sig (term) single: exist (term) single: sig2 (term) single: exist2 (term) .. coqtop:: in Inductive sig (A:Set) (P:A -> Prop) : Set := exist (x:A) (_:P x). Inductive sig2 (A:Set) (P Q:A -> Prop) : Set := exist2 (x:A) (_:P x) (_:Q x). A *strong (dependent) sum* :g:`{x:A & P x}` may be also defined, when the predicate ``P`` is now defined as a constructor of types in ``Type``. .. index:: single: {x:A & P x} (term) single: sigT (term) single: existT (term) single: sigT2 (term) single: existT2 (term) single: projT1 (term) single: projT2 (term) .. coqtop:: in Inductive sigT (A:Type) (P:A -> Type) : Type := existT (x:A) (_:P x). Section Projections2. Variable A : Type. Variable P : A -> Type. Definition projT1 (H:sigT A P) := let (x, h) := H in x. Definition projT2 (H:sigT A P) := match H return P (projT1 H) with existT _ _ x h => h end. End Projections2. Inductive sigT2 (A: Type) (P Q:A -> Type) : Type := existT2 (x:A) (_:P x) (_:Q x). A related non-dependent construct is the constructive sum :g:`{A}+{B}` of two propositions ``A`` and ``B``. .. index:: single: sumbool (term) single: left (term) single: right (term) single: {A}+{B} (term) .. coqtop:: in Inductive sumbool (A B:Prop) : Set := left (_:A) | right (_:B). This ``sumbool`` construct may be used as a kind of indexed boolean data-type. An intermediate between ``sumbool`` and ``sum`` is the mixed ``sumor`` which combines :g:`A:Set` and :g:`B:Prop` in the construction :g:`A+{B}` in ``Set``. .. index:: single: sumor (term) single: inleft (term) single: inright (term) single: A+{B} (term) .. coqtop:: in Inductive sumor (A:Set) (B:Prop) : Set := | inleft (_:A) | inright (_:B). We may define variants of the axiom of choice, like in Martin-Löf's Intuitionistic Type Theory. .. index:: single: Choice (term) single: Choice2 (term) single: bool_choice (term) .. coqdoc:: Lemma Choice : forall (S S':Set) (R:S -> S' -> Prop), (forall x:S, {y : S' | R x y}) -> {f : S -> S' | forall z:S, R z (f z)}. Lemma Choice2 : forall (S S':Set) (R:S -> S' -> Set), (forall x:S, {y : S' & R x y}) -> {f : S -> S' & forall z:S, R z (f z)}. Lemma bool_choice : forall (S:Set) (R1 R2:S -> Prop), (forall x:S, {R1 x} + {R2 x}) -> {f : S -> bool | forall x:S, f x = true /\ R1 x \/ f x = false /\ R2 x}. The next construct builds a sum between a data-type :g:`A:Type` and an exceptional value encoding errors: .. index:: single: Exc (term) single: value (term) single: error (term) .. coqtop:: in Definition Exc := option. Definition value := Some. Definition error := None. This module ends with theorems, relating the sorts ``Set`` or ``Type`` and ``Prop`` in a way which is consistent with the realizability interpretation. .. index:: single: False_rect (term) single: False_rec (term) single: eq_rect (term) single: absurd_set (term) single: and_rect (term) .. coqdoc:: Definition except := False_rec. Theorem absurd_set : forall (A:Prop) (C:Set), A -> ~ A -> C. Theorem and_rect2 : forall (A B:Prop) (P:Type), (A -> B -> P) -> A /\ B -> P. Basic Arithmetic ~~~~~~~~~~~~~~~~ The basic library includes a few elementary properties of natural numbers, together with the definitions of predecessor, addition and multiplication, in module ``Peano.v``. It also provides a scope ``nat_scope`` gathering standard notations for common operations (``+``, ``*``) and a decimal notation for numbers, allowing for instance to write ``3`` for :g:`S (S (S O)))`. This also works on the left hand side of a ``match`` expression (see for example section :tacn:`refine`). This scope is opened by default. .. example:: The following example is not part of the standard library, but it shows the usage of the notations: .. coqtop:: in reset Fixpoint even (n:nat) : bool := match n with | 0 => true | 1 => false | S (S n) => even n end. .. index:: single: eq_S (term) single: pred (term) single: pred_Sn (term) single: eq_add_S (term) single: not_eq_S (term) single: IsSucc (term) single: O_S (term) single: n_Sn (term) single: plus (term) single: plus_n_O (term) single: plus_n_Sm (term) single: mult (term) single: mult_n_O (term) single: mult_n_Sm (term) Now comes the content of module ``Peano``: .. coqdoc:: Theorem eq_S : forall x y:nat, x = y -> S x = S y. Definition pred (n:nat) : nat := match n with | 0 => 0 | S u => u end. Theorem pred_Sn : forall m:nat, m = pred (S m). Theorem eq_add_S : forall n m:nat, S n = S m -> n = m. Hint Immediate eq_add_S : core. Theorem not_eq_S : forall n m:nat, n <> m -> S n <> S m. Definition IsSucc (n:nat) : Prop := match n with | 0 => False | S p => True end. Theorem O_S : forall n:nat, 0 <> S n. Theorem n_Sn : forall n:nat, n <> S n. Fixpoint plus (n m:nat) {struct n} : nat := match n with | 0 => m | S p => S (p + m) end where "n + m" := (plus n m) : nat_scope. Lemma plus_n_O : forall n:nat, n = n + 0. Lemma plus_n_Sm : forall n m:nat, S (n + m) = n + S m. Fixpoint mult (n m:nat) {struct n} : nat := match n with | 0 => 0 | S p => m + p * m end where "n * m" := (mult n m) : nat_scope. Lemma mult_n_O : forall n:nat, 0 = n * 0. Lemma mult_n_Sm : forall n m:nat, n * m + n = n * (S m). Finally, it gives the definition of the usual orderings ``le``, ``lt``, ``ge`` and ``gt``. .. index:: single: le (term) single: le_n (term) single: le_S (term) single: lt (term) single: ge (term) single: gt (term) .. This emits a notation already used warning but it won't be shown to the user. .. coqtop:: in warn Inductive le (n:nat) : nat -> Prop := | le_n : le n n | le_S : forall m:nat, n <= m -> n <= (S m) where "n <= m" := (le n m) : nat_scope. Definition lt (n m:nat) := S n <= m. Definition ge (n m:nat) := m <= n. Definition gt (n m:nat) := m < n. Properties of these relations are not initially known, but may be required by the user from modules ``Le`` and ``Lt``. Finally, ``Peano`` gives some lemmas allowing pattern matching, and a double induction principle. .. index:: single: nat_case (term) single: nat_double_ind (term) .. coqdoc:: Theorem nat_case : forall (n:nat) (P:nat -> Prop), P 0 -> (forall m:nat, P (S m)) -> P n. Theorem nat_double_ind : forall R:nat -> nat -> Prop, (forall n:nat, R 0 n) -> (forall n:nat, R (S n) 0) -> (forall n m:nat, R n m -> R (S n) (S m)) -> forall n m:nat, R n m. Well-founded recursion ~~~~~~~~~~~~~~~~~~~~~~ The basic library contains the basics of well-founded recursion and well-founded induction, in module ``Wf.v``. .. index:: single: Well foundedness single: Recursion single: Well founded induction single: Acc (term) single: Acc_inv (term) single: Acc_rect (term) single: well_founded (term) .. coqdoc:: Section Well_founded. Variable A : Type. Variable R : A -> A -> Prop. Inductive Acc (x:A) : Prop := Acc_intro : (forall y:A, R y x -> Acc y) -> Acc x. Lemma Acc_inv x : Acc x -> forall y:A, R y x -> Acc y. Definition well_founded := forall a:A, Acc a. Hypothesis Rwf : well_founded. Theorem well_founded_induction : forall P:A -> Set, (forall x:A, (forall y:A, R y x -> P y) -> P x) -> forall a:A, P a. Theorem well_founded_ind : forall P:A -> Prop, (forall x:A, (forall y:A, R y x -> P y) -> P x) -> forall a:A, P a. The automatically generated scheme ``Acc_rect`` can be used to define functions by fixpoints using well-founded relations to justify termination. Assuming extensionality of the functional used for the recursive call, the fixpoint equation can be proved. .. index:: single: Fix_F (term) single: Fix_eq (term) single: Fix_F_inv (term) single: Fix_F_eq (term) .. coqdoc:: Section FixPoint. Variable P : A -> Type. Variable F : forall x:A, (forall y:A, R y x -> P y) -> P x. Fixpoint Fix_F (x:A) (r:Acc x) {struct r} : P x := F x (fun (y:A) (p:R y x) => Fix_F y (Acc_inv x r y p)). Definition Fix (x:A) := Fix_F x (Rwf x). Hypothesis F_ext : forall (x:A) (f g:forall y:A, R y x -> P y), (forall (y:A) (p:R y x), f y p = g y p) -> F x f = F x g. Lemma Fix_F_eq : forall (x:A) (r:Acc x), F x (fun (y:A) (p:R y x) => Fix_F y (Acc_inv x r y p)) = Fix_F x r. Lemma Fix_F_inv : forall (x:A) (r s:Acc x), Fix_F x r = Fix_F x s. Lemma Fix_eq : forall x:A, Fix x = F x (fun (y:A) (p:R y x) => Fix y). End FixPoint. End Well_founded. Accessing the Type level ~~~~~~~~~~~~~~~~~~~~~~~~ The standard library includes ``Type`` level definitions of counterparts of some logic concepts and basic lemmas about them. The module ``Datatypes`` defines ``identity``, which is the ``Type`` level counterpart of equality: .. index:: single: identity (term) .. coqtop:: in Inductive identity (A:Type) (a:A) : A -> Type := identity_refl : identity A a a. Some properties of ``identity`` are proved in the module ``Logic_Type``, which also provides the definition of ``Type`` level negation: .. index:: single: notT (term) .. coqtop:: in Definition notT (A:Type) := A -> False. Tactics ~~~~~~~ A few tactics defined at the user level are provided in the initial state, in module ``Tactics.v``. They are listed at http://coq.inria.fr/stdlib, in paragraph ``Init``, link ``Tactics``. The standard library -------------------- Survey ~~~~~~ The rest of the standard library is structured into the following subdirectories: * **Logic** : Classical logic and dependent equality * **Arith** : Basic Peano arithmetic * **PArith** : Basic positive integer arithmetic * **NArith** : Basic binary natural number arithmetic * **ZArith** : Basic relative integer arithmetic * **Numbers** : Various approaches to natural, integer and cyclic numbers (currently axiomatically and on top of 2^31 binary words) * **Bool** : Booleans (basic functions and results) * **Lists** : Monomorphic and polymorphic lists (basic functions and results), Streams (infinite sequences defined with coinductive types) * **Sets** : Sets (classical, constructive, finite, infinite, power set, etc.) * **FSets** : Specification and implementations of finite sets and finite maps (by lists and by AVL trees) * **Reals** : Axiomatization of real numbers (classical, basic functions, integer part, fractional part, limit, derivative, Cauchy series, power series and results,...) * **Floats** : Machine implementation of floating-point arithmetic (for the binary64 format) * **Relations** : Relations (definitions and basic results) * **Sorting** : Sorted list (basic definitions and heapsort correctness) * **Strings** : 8-bits characters and strings * **Wellfounded** : Well-founded relations (basic results) These directories belong to the initial load path of the system, and the modules they provide are compiled at installation time. So they are directly accessible with the command ``Require`` (see Section :ref:`compiled-files`). The different modules of the Coq standard library are documented online at https://coq.inria.fr/stdlib. Peano’s arithmetic (nat) ~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: single: Peano's arithmetic single: nat_scope While in the initial state, many operations and predicates of Peano's arithmetic are defined, further operations and results belong to other modules. For instance, the decidability of the basic predicates are defined here. This is provided by requiring the module ``Arith``. The following table describes the notations available in scope ``nat_scope`` : =============== =================== Notation Interpretation =============== =================== ``_ < _`` ``lt`` ``_ <= _`` ``le`` ``_ > _`` ``gt`` ``_ >= _`` ``ge`` ``x < y < z`` ``x < y /\ y < z`` ``x < y <= z`` ``x < y /\ y <= z`` ``x <= y < z`` ``x <= y /\ y < z`` ``x <= y <= z`` ``x <= y /\ y <= z`` ``_ + _`` ``plus`` ``_ - _`` ``minus`` ``_ * _`` ``mult`` =============== =================== Notations for integer arithmetic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: single: Arithmetical notations single: + (term) single: * (term) single: - (term) singel: / (term) single: <= (term) single: >= (term) single: < (term) single: > (term) single: ?= (term) single: mod (term) The following table describes the syntax of expressions for integer arithmetic. It is provided by requiring and opening the module ``ZArith`` and opening scope ``Z_scope``. It specifies how notations are interpreted and, when not already reserved, the precedence and associativity. =============== ==================== ========== ============= Notation Interpretation Precedence Associativity =============== ==================== ========== ============= ``_ < _`` ``Z.lt`` ``_ <= _`` ``Z.le`` ``_ > _`` ``Z.gt`` ``_ >= _`` ``Z.ge`` ``x < y < z`` ``x < y /\ y < z`` ``x < y <= z`` ``x < y /\ y <= z`` ``x <= y < z`` ``x <= y /\ y < z`` ``x <= y <= z`` ``x <= y /\ y <= z`` ``_ ?= _`` ``Z.compare`` 70 no ``_ + _`` ``Z.add`` ``_ - _`` ``Z.sub`` ``_ * _`` ``Z.mul`` ``_ / _`` ``Z.div`` ``_ mod _`` ``Z.modulo`` 40 no ``- _`` ``Z.opp`` ``_ ^ _`` ``Z.pow`` =============== ==================== ========== ============= .. example:: .. coqtop:: all reset Require Import ZArith. Check (2 + 3)%Z. Open Scope Z_scope. Check 2 + 3. Real numbers library ~~~~~~~~~~~~~~~~~~~~ Notations for real numbers ++++++++++++++++++++++++++ This is provided by requiring and opening the module ``Reals`` and opening scope ``R_scope``. This set of notations is very similar to the notation for integer arithmetic. The inverse function was added. =============== =================== Notation Interpretation =============== =================== ``_ < _`` ``Rlt`` ``_ <= _`` ``Rle`` ``_ > _`` ``Rgt`` ``_ >= _`` ``Rge`` ``x < y < z`` ``x < y /\ y < z`` ``x < y <= z`` ``x < y /\ y <= z`` ``x <= y < z`` ``x <= y /\ y < z`` ``x <= y <= z`` ``x <= y /\ y <= z`` ``_ + _`` ``Rplus`` ``_ - _`` ``Rminus`` ``_ * _`` ``Rmult`` ``_ / _`` ``Rdiv`` ``- _`` ``Ropp`` ``/ _`` ``Rinv`` ``_ ^ _`` ``pow`` =============== =================== .. example:: .. coqtop:: all reset Require Import Reals. Check (2 + 3)%R. Open Scope R_scope. Check 2 + 3. Some tactics for real numbers +++++++++++++++++++++++++++++ In addition to the powerful ``ring``, ``field`` and ``lra`` tactics (see Chapter :ref:`tactics`), there are also: .. tacn:: discrR Proves that two real integer constants are different. .. example:: .. coqtop:: all reset Require Import DiscrR. Open Scope R_scope. Goal 5 <> 0. discrR. .. tacn:: split_Rabs Allows unfolding the ``Rabs`` constant and splits corresponding conjunctions. .. example:: .. coqtop:: all reset Require Import Reals. Open Scope R_scope. Goal forall x:R, x <= Rabs x. intro; split_Rabs. .. tacn:: split_Rmult Splits a condition that a product is non-null into subgoals corresponding to the condition on each operand of the product. .. example:: .. coqtop:: all reset Require Import Reals. Open Scope R_scope. Goal forall x y z:R, x * y * z <> 0. intros; split_Rmult. These tactics has been written with the tactic language |Ltac| described in Chapter :ref:`ltac`. List library ~~~~~~~~~~~~ .. index:: single: Notations for lists single: length (term) single: head (term) single: tail (term) single: app (term) single: rev (term) single: nth (term) single: map (term) single: flat_map (term) single: fold_left (term) single: fold_right (term) Some elementary operations on polymorphic lists are defined here. They can be accessed by requiring module ``List``. It defines the following notions: * ``length`` * ``head`` : first element (with default) * ``tail`` : all but first element * ``app`` : concatenation * ``rev`` : reverse * ``nth`` : accessing n-th element (with default) * ``map`` : applying a function * ``flat_map`` : applying a function returning lists * ``fold_left`` : iterator (from head to tail) * ``fold_right`` : iterator (from tail to head) The following table shows notations available when opening scope ``list_scope``. ========== ============== ========== ============= Notation Interpretation Precedence Associativity ========== ============== ========== ============= ``_ ++ _`` ``app`` 60 right ``_ :: _`` ``cons`` 60 right ========== ============== ========== ============= .. _floats_library: Floats library ~~~~~~~~~~~~~~ The library of primitive floating-point arithmetic can be loaded by requiring module ``Floats``: .. coqtop:: in Require Import Floats. It exports the module ``PrimFloat`` that provides a primitive type named ``float``, defined in the kernel (see section :ref:`primitive-floats`), as well as two variant types ``float_comparison`` and ``float_class``: .. coqtop:: all Print float. Print float_comparison. Print float_class. It then defines the primitive operators below, using the processor floating-point operators for binary64 in rounding-to-nearest even: * ``abs`` * ``opp`` * ``sub`` * ``add`` * ``mul`` * ``div`` * ``sqrt`` * ``compare`` : compare two floats and return a ``float_comparison`` * ``classify`` : analyze a float and return a ``float_class`` * ``of_int63`` : round a primitive integer and convert it into a float * ``normfr_mantissa`` : take a float in ``[0.5; 1.0)`` and return its mantissa * ``frshiftexp`` : convert a float to fractional part in ``[0.5; 1.0)`` and integer part * ``ldshiftexp`` : multiply a float by an integral power of ``2`` * ``next_up`` : return the next float towards positive infinity * ``next_down`` : return the next float towards negative infinity For special floating-point values, the following constants are also defined: * ``zero`` * ``neg_zero`` * ``one`` * ``two`` * ``infinity`` * ``neg_infinity`` * ``nan`` : Not a Number (assumed to be unique: the "payload" of NaNs is ignored) The following table shows the notations available when opening scope ``float_scope``. =========== ============== Notation Interpretation =========== ============== ``- _`` ``opp`` ``_ - _`` ``sub`` ``_ + _`` ``add`` ``_ * _`` ``mul`` ``_ / _`` ``div`` ``_ =? _`` ``eqb`` ``_  t` can be shortened in :g:`fun x y z : A => t`). .. index:: fun .. index:: forall Functions (fun) and function types (forall) ------------------------------------------- .. insertprodn term_forall_or_fun term_forall_or_fun .. prodn:: term_forall_or_fun ::= forall @open_binders , @type | fun @open_binders => @term The expression :n:`fun @ident : @type => @term` defines the *abstraction* of the variable :n:`@ident`, of type :n:`@type`, over the term :n:`@term`. It denotes a function of the variable :n:`@ident` that evaluates to the expression :n:`@term` (e.g. :g:`fun x : A => x` denotes the identity function on type :g:`A`). The keyword :g:`fun` can be followed by several binders as given in Section :ref:`binders`. Functions over several variables are equivalent to an iteration of one-variable functions. For instance the expression :n:`fun {+ @ident__i } : @type => @term` denotes the same function as :n:`{+ fun @ident__i : @type => } @term`. If a let-binder occurs in the list of binders, it is expanded to a let-in definition (see Section :ref:`let-in`). The expression :n:`forall @ident : @type__1, @type__2` denotes the :gdef:`product type ` (or *product*) of the variable :n:`@ident` of type :n:`@type__1` over the type :n:`@type__2`. If :n:`@ident` is used in :n:`@type__2`, then we say the expression is a :gdef:`dependent product`. The intention behind a dependent product :g:`forall x : A, B` is twofold. It denotes either the universal quantification of the variable :g:`x` of type :g:`A` in the proposition :g:`B` or the functional dependent product from :g:`A` to :g:`B` (a construction usually written :math:`\Pi_{x:A}.B` in set theory). Non-dependent product types have a special notation: :g:`A -> B` stands for :g:`forall _ : A, B`. *Non-dependent product* is used to denote both propositional implication and function types. These terms are also useful: * `n : nat` is a :gdef:`dependent premise` of `forall n:nat, n + 0 = n` because `n` appears both in the binder of the `forall` and in the quantified statement `n + 0 = n`. * `A` and `B` are :gdef:`non-dependent premises ` (or, often, just ":gdef:`premises `") of `A -> B -> C` because they don't appear in a `forall` binder. `C` is the *conclusion* of the type, which is a second meaning for the term :term:`conclusion`. (As noted, `A -> B` is notation for the term `forall _ : A, B`; the wildcard `_` can't be referred to in the quantified statement.) As for abstractions, :g:`forall` is followed by a binder list, and products over several variables are equivalent to an iteration of one-variable products. Function application -------------------- .. insertprodn term_application arg .. prodn:: term_application ::= @term1 {+ @arg } | @ @qualid_annotated {+ @term1 } arg ::= ( @ident := @term ) | ( @natural := @term ) | @term1 :n:`@term__fun @term` denotes applying the function :n:`@term__fun` to :token:`term`. :n:`@term__fun {+ @term__i }` denotes applying :n:`@term__fun` to the arguments :n:`@term__i`. It is equivalent to :n:`( … ( @term__fun @term__1 ) … ) @term__n`: associativity is to the left. The notations :n:`(@ident := @term)` and :n:`(@natural := @term)` for arguments are used for making explicit the value of implicit arguments (see Section :ref:`explicit-applications`). .. _gallina-assumptions: Assumptions ----------- Assumptions extend the global environment with axioms, parameters, hypotheses or variables. An assumption binds an :n:`@ident` to a :n:`@type`. It is accepted by Coq only if :n:`@type` is a correct type in the global environment before the declaration and if :n:`@ident` was not previously defined in the same module. This :n:`@type` is considered to be the type (or specification, or statement) assumed by :n:`@ident` and we say that :n:`@ident` has type :n:`@type`. .. _Axiom: .. cmd:: @assumption_token {? Inline {? ( @natural ) } } {| {+ ( @assumpt ) } | @assumpt } :name: Axiom; Axioms; Conjecture; Conjectures; Hypothesis; Hypotheses; Parameter; Parameters; Variable; Variables .. insertprodn assumption_token of_type .. prodn:: assumption_token ::= {| Axiom | Axioms } | {| Conjecture | Conjectures } | {| Parameter | Parameters } | {| Hypothesis | Hypotheses } | {| Variable | Variables } assumpt ::= {+ @ident_decl } @of_type ident_decl ::= @ident {? @univ_decl } of_type ::= {| : | :> } @type These commands bind one or more :n:`@ident`\(s) to specified :n:`@type`\(s) as their specifications in the global environment. The fact asserted by :n:`@type` (or, equivalently, the existence of an object of this type) is accepted as a postulate. They accept the :attr:`program` attribute. :cmd:`Axiom`, :cmd:`Conjecture`, :cmd:`Parameter` and their plural forms are equivalent. They can take the :attr:`local` :term:`attribute`, which makes the defined :n:`@ident`\s accessible by :cmd:`Import` and its variants only through their fully qualified names. Similarly, :cmd:`Hypothesis`, :cmd:`Variable` and their plural forms are equivalent. Outside of a section, these are equivalent to :n:`Local Parameter`. Inside a section, the :n:`@ident`\s defined are only accessible within the section. When the current section is closed, the :n:`@ident`\(s) become undefined and every object depending on them will be explicitly parameterized (i.e., the variables are *discharged*). See Section :ref:`section-mechanism`. :n:`:>` If specified, :token:`ident_decl` is automatically declared as a coercion to the class of its type. See :ref:`coercions`. The :n:`Inline` clause is only relevant inside functors. See :cmd:`Module`. .. example:: Simple assumptions .. coqtop:: reset in Parameter X Y : Set. Parameter (R : X -> Y -> Prop) (S : Y -> X -> Prop). Axiom R_S_inv : forall x y, R x y <-> S y x. .. exn:: @ident already exists. :name: ‘ident’ already exists. (Axiom) :undocumented: .. warn:: @ident is declared as a local axiom Warning generated when using :cmd:`Variable` or its equivalent instead of :n:`Local Parameter` or its equivalent. .. note:: We advise using the commands :cmd:`Axiom`, :cmd:`Conjecture` and :cmd:`Hypothesis` (and their plural forms) for logical postulates (i.e. when the assertion :n:`@type` is of sort :g:`Prop`), and to use the commands :cmd:`Parameter` and :cmd:`Variable` (and their plural forms) in other cases (corresponding to the declaration of an abstract object of the given type). coq-8.15.0/doc/sphinx/language/core/basic.rst000066400000000000000000000506221417001151100207670ustar00rootroot00000000000000============================= Basic notions and conventions ============================= This section provides some essential notions and conventions for reading the manual. We start by explaining the syntax and lexical conventions used in the manual. Then, we present the essential vocabulary necessary to read the rest of the manual. Other terms are defined throughout the manual. The reader may refer to the :ref:`glossary index ` for a complete list of defined terms. Finally, we describe the various types of settings that Coq provides. Syntax and lexical conventions ------------------------------ .. _syntax-conventions: Syntax conventions ~~~~~~~~~~~~~~~~~~ The syntax described in this documentation is equivalent to that accepted by the Coq parser, but the grammar has been edited to improve readability and presentation. In the grammar presented in this manual, the terminal symbols are black (e.g. :n:`forall`), whereas the nonterminals are green, italic and hyperlinked (e.g. :n:`@term`). Some syntax is represented graphically using the following kinds of blocks: :n:`{? item }` An optional item. :n:`{+ item }` A list of one or more items. :n:`{* item }` An optional list of items. :n:`{+s item}` A list of one or more items separated by "s" (e.g. :n:`item__1 s item__2 s item__3`). :n:`{*s item}` An optional list of items separated by "s". :n:`{| item__1 | item__2 | ... }` Alternatives (either :n:`item__1` or :n:`item__2` or ...). `Precedence levels `_ that are implemented in the Coq parser are shown in the documentation by appending the level to the nonterminal name (as in :n:`@term100` or :n:`@ltac_expr3`). .. note:: Coq uses an extensible parser. Plugins and the :ref:`notation system ` can extend the syntax at run time. Some notations are defined in the :term:`prelude`, which is loaded by default. The documented grammar doesn't include these notations. Precedence levels not used by the base grammar are omitted from the documentation, even though they could still be populated by notations or plugins. Furthermore, some parsing rules are only activated in certain contexts (:ref:`proof mode `, :ref:`custom entries `...). .. warning:: Given the complexity of these parsing rules, it would be extremely difficult to create an external program that can properly parse a Coq document. Therefore, tool writers are advised to delegate parsing to Coq, by communicating with it, for instance through `SerAPI `_. .. seealso:: :cmd:`Print Grammar` .. _lexical-conventions: Lexical conventions ~~~~~~~~~~~~~~~~~~~ Blanks Space, newline and horizontal tab are considered blanks. Blanks are ignored but they separate tokens. Comments Comments are enclosed between ``(*`` and ``*)``. They can be nested. They can contain any character. However, embedded :n:`@string` literals must be correctly closed. Comments are treated as blanks. Identifiers Identifiers, written :n:`@ident`, are sequences of letters, digits, ``_`` and ``'``, that do not start with a digit or ``'``. That is, they are recognized by the following grammar (except that the string ``_`` is reserved; it is not a valid identifier): .. insertprodn ident subsequent_letter .. prodn:: ident ::= @first_letter {* @subsequent_letter } first_letter ::= {| a .. z | A .. Z | _ | @unicode_letter } subsequent_letter ::= {| @first_letter | @digit | ' | @unicode_id_part } All characters are meaningful. In particular, identifiers are case-sensitive. :production:`unicode_letter` non-exhaustively includes Latin, Greek, Gothic, Cyrillic, Arabic, Hebrew, Georgian, Hangul, Hiragana and Katakana characters, CJK ideographs, mathematical letter-like symbols and non-breaking space. :production:`unicode_id_part` non-exhaustively includes symbols for prime letters and subscripts. Numbers Numbers are sequences of digits with an optional fractional part and exponent, optionally preceded by a minus sign. Hexadecimal numbers start with ``0x`` or ``0X``. :n:`@bigint` are integers; numbers without fractional nor exponent parts. :n:`@bignat` are non-negative integers. Underscores embedded in the digits are ignored, for example ``1_000_000`` is the same as ``1000000``. .. insertprodn number hexdigit .. prodn:: number ::= {? - } @decnat {? . {+ {| @digit | _ } } } {? {| e | E } {? {| + | - } } @decnat } | {? - } @hexnat {? . {+ {| @hexdigit | _ } } } {? {| p | P } {? {| + | - } } @decnat } integer ::= {? - } @natural natural ::= @bignat bigint ::= {? - } @bignat bignat ::= {| @decnat | @hexnat } decnat ::= @digit {* {| @digit | _ } } digit ::= 0 .. 9 hexnat ::= {| 0x | 0X } @hexdigit {* {| @hexdigit | _ } } hexdigit ::= {| 0 .. 9 | a .. f | A .. F } :n:`@integer` and :n:`@natural` are limited to the range that fits into an OCaml integer (63-bit integers on most architectures). :n:`@bigint` and :n:`@bignat` have no range limitation. The :ref:`standard library ` provides a few :ref:`interpretations ` for :n:`@number`. Some of these interpretations support exponential notation for decimal numbers, for example ``5.02e-6`` means 5.02×10\ :sup:`-6`; and base 2 exponential notation for hexadecimal numbers denoted by ``p`` or ``P``, for example ``0xAp12`` means 10×2\ :sup:`12`. The :cmd:`Number Notation` mechanism offers the user a way to define custom parsers and printers for :n:`@number`. Strings Strings begin and end with ``"`` (double quote). Use ``""`` to represent a double quote character within a string. In the grammar, strings are identified with :production:`string`. The :cmd:`String Notation` mechanism offers the user a way to define custom parsers and printers for :token:`string`. Keywords The following character sequences are keywords defined in the main Coq grammar that cannot be used as identifiers (even when starting Coq with the `-noinit` command-line flag):: _ Axiom CoFixpoint Definition Fixpoint Hypothesis Parameter Prop SProp Set Theorem Type Variable as at cofix else end fix for forall fun if in let match return then where with The following are keywords defined in notations or plugins loaded in the :term:`prelude`:: by exists exists2 using Note that loading additional modules or plugins may expand the set of reserved keywords. Other tokens The following character sequences are tokens defined in the main Coq grammar (even when starting Coq with the `-noinit` command-line flag):: ! #[ % & ' ( () ) * + , - -> . .( .. ... / : ::= := :> ; < <+ <- <: <<: <= = => > >-> >= ? @ @{ [ ] _ `( `{ { {| | } The following character sequences are tokens defined in notations or plugins loaded in the :term:`prelude`:: ** [= |- || -> Note that loading additional modules or plugins may expand the set of defined tokens. When multiple tokens match the beginning of a sequence of characters, the longest matching token is used. Occasionally you may need to insert spaces to separate tokens. For example, if ``~`` and ``~~`` are both defined as tokens, the inputs ``~ ~`` and ``~~`` generate different tokens, whereas if `~~` is not defined, then the two inputs are equivalent. Essential vocabulary -------------------- This section presents the most essential notions to understand the rest of the Coq manual: :term:`terms ` and :term:`types ` on the one hand, :term:`commands ` and :term:`tactics ` on the other hand. .. glossary:: term Terms are the basic expressions of Coq. Terms can represent mathematical expressions, propositions and proofs, but also executable programs and program types. Here is the top-level syntax of terms. Each of the listed constructs is presented in a dedicated section. Some of these constructs (like :n:`@term_forall_or_fun`) are part of the core language that the kernel of Coq understands and are therefore described in :ref:`this chapter `, while others (like :n:`@term_if`) are language extensions that are presented in :ref:`the next chapter `. .. insertprodn term qualid_annotated .. prodn:: term ::= @term_forall_or_fun | @term_let | @term_if | @term_fix | @term_cofix | @term100 term100 ::= @term_cast | @term10 term10 ::= @term_application | @one_term one_term ::= @term_explicit | @term1 term1 ::= @term_projection | @term_scope | @term0 term0 ::= @qualid_annotated | @sort | @primitive_notations | @term_evar | @term_match | @term_record | @term_generalizing | [| {*; @term } %| @term {? : @type } |] {? @univ_annot } | @term_ltac | ( @term ) qualid_annotated ::= @qualid {? @univ_annot } .. note:: Many :term:`commands ` and :term:`tactics ` use :n:`@one_term` (in the syntax of their arguments) rather than :n:`@term`. The former need to be enclosed in parentheses unless they're very simple, such as a single identifier. This avoids confusing a space-separated list of terms or identifiers with a :n:`@term_application`. type To be valid and accepted by the Coq kernel, a term needs an associated type. We express this relationship by “:math:`x` *of type* :math:`T`”, which we write as “:math:`x:T`”. Informally, “:math:`x:T`” can be thought as “:math:`x` *belongs to* :math:`T`”. The Coq kernel is a type checker: it verifies that a term has the expected type by applying a set of typing rules (see :ref:`Typing-rules`). If that's indeed the case, we say that the term is :gdef:`well-typed`. A special feature of the Coq language is that types can depend on terms (we say that the language is `dependently-typed `_). Because of this, types and terms share a common syntax. All types are terms, but not all terms are types: .. insertprodn type type .. prodn:: type ::= @term Intuitively, types may be viewed as sets containing terms. We say that a type is :gdef:`inhabited` if it contains at least one term (i.e. if we can find a term which is associated with this type). We call such terms :gdef:`witnesses `. Note that deciding whether a type is inhabited is `undecidable `_. Formally, types can be used to construct logical foundations for mathematics alternative to the standard `"set theory" `_: we call such logical foundations `"type theories" `_. Coq is based on the Calculus of Inductive Constructions, which is a particular instance of type theory. sentence Coq documents are made of a series of sentences that contain :term:`commands ` or :term:`tactics `, generally terminated with a period and optionally decorated with :term:`attributes `. .. insertprodn document sentence .. prodn:: document ::= {* @sentence } sentence ::= {? @attributes } @command . | {? @attributes } {? @natural : } @query_command . | {? @attributes } {? @toplevel_selector : } @ltac_expr {| . | ... } | @control_command :n:`@ltac_expr` syntax supports both simple and compound :term:`tactics `. For example: ``split`` is a simple tactic while ``split; auto`` combines two simple tactics. command A :production:`command` can be used to modify the state of a Coq document, for instance by declaring a new object, or to get information about the current state. By convention, command names begin with uppercase letters. Commands appear in the HTML documentation in blue or gray boxes after the label "Command". In the pdf, they appear after the boldface label "Command:". Commands are listed in the :ref:`command_index`. Example: .. cmd:: Comments {* {| @one_term | @string | @natural } } Prints "Comments ok" and does not change the state of the document. tactic A :production:`tactic` specifies how to transform the current proof state as a step in creating a proof. They are syntactically valid only when Coq is in :term:`proof mode`, such as after a :cmd:`Theorem` command and before any subsequent proof-terminating command such as :cmd:`Qed`. See :ref:`proofhandling` for more on proof mode. By convention, tactic names begin with lowercase letters. Tactic appear in the HTML documentation in blue or gray boxes after the label "Tactic". In the pdf, they appear after the boldface label "Tactic:". Tactics are listed in the :ref:`tactic_index`. Settings -------- There are several mechanisms for changing the behavior of Coq. The :term:`attribute` mechanism is used to modify the behavior of a single :term:`sentence`. The :term:`flag`, :term:`option` and :term:`table` mechanisms are used to modify the behavior of Coq more globally in a document or project. .. _attributes: Attributes ~~~~~~~~~~ An :gdef:`attribute` modifies the behavior of a single sentence. Syntactically, most commands and tactics can be decorated with attributes (cf. :n:`@sentence`), but attributes not supported by the command or tactic will trigger :warn:`This command does not support this attribute`. .. insertprodn attributes legacy_attr .. prodn:: attributes ::= {* #[ {*, @attribute } ] } {* @legacy_attr } attribute ::= @ident {? @attr_value } attr_value ::= = @string | = @ident | ( {*, @attribute } ) legacy_attr ::= {| Local | Global } | {| Polymorphic | Monomorphic } | {| Cumulative | NonCumulative } | Private | Program The order of top-level attributes doesn't affect their meaning. ``#[foo,bar]``, ``#[bar,foo]``, ``#[foo]#[bar]`` and ``#[bar]#[foo]`` are equivalent. :gdef:`Boolean attributes ` take the form :n:`@ident__attr{? = {| yes | no } }`. When the :n:`{| yes | no }` value is omitted, the default is :n:`yes`. The legacy attributes (:n:`@legacy_attr`) provide an older, alternate syntax for certain attributes. They are equivalent to new attributes as follows: ============================= ================================ Legacy attribute New attribute ============================= ================================ `Local` :attr:`local` `Global` :attr:`global` `Polymorphic`, `Monomorphic` :attr:`universes(polymorphic)` `Cumulative`, `NonCumulative` :attr:`universes(cumulative)` `Private` :attr:`private(matching)` `Program` :attr:`program` ============================= ================================ Attributes appear in the HTML documentation in blue or gray boxes after the label "Attribute". In the pdf, they appear after the boldface label "Attribute:". Attributes are listed in the :ref:`attribute_index`. .. warn:: This command does not support this attribute: @ident. :name: This command does not support this attribute This warning is configured to behave as an error by default. You may turn it into a normal warning by using the :opt:`Warnings` option: .. coqtop:: none Set Silent. .. coqtop:: all warn Set Warnings "unsupported-attributes". #[ foo ] Comments. .. _flags-options-tables: Flags, Options and Tables ~~~~~~~~~~~~~~~~~~~~~~~~~ The following types of settings can be used to change the behavior of Coq in subsequent commands and tactics (see :ref:`set_unset_scope_qualifiers` for a more precise description of the scope of these settings): * A :gdef:`flag` has a boolean value, such as :flag:`Universe Polymorphism`. * An :gdef:`option` generally has a numeric or string value, such as :opt:`Firstorder Depth`. * A :gdef:`table` contains a set of :token:`string`\s or :token:`qualid`\s. * In addition, some commands provide settings, such as :cmd:`Extraction Language`. .. FIXME Convert "Extraction Language" to an option. .. insertprodn setting_name setting_name .. prodn:: setting_name ::= {+ @ident } .. Flags, options and tables are identified by a series of identifiers. By convention, each of the identifiers start with an initial capital letter. Flags, options and tables appear in the HTML documentation in blue or gray boxes after the labels "Flag", "Option" and "Table". In the pdf, they appear after a boldface label. They are listed in the :ref:`options_index`. .. cmd:: Set @setting_name {? {| @integer | @string } } If :n:`@setting_name` is a flag, no value may be provided; the flag is set to on. If :n:`@setting_name` is an option, a value of the appropriate type must be provided; the option is set to the specified value. This command supports the :attr:`local`, :attr:`global` and :attr:`export` attributes. They are described :ref:`here `. .. warn:: There is no flag or option with this name: "@setting_name". This warning message can be raised by :cmd:`Set` and :cmd:`Unset` when :n:`@setting_name` is unknown. It is a warning rather than an error because this helps library authors produce Coq code that is compatible with several Coq versions. To preserve the same behavior, they may need to set some compatibility flags or options that did not exist in previous Coq versions. .. cmd:: Unset @setting_name If :n:`@setting_name` is a flag, it is set to off. If :n:`@setting_name` is an option, it is set to its default value. This command supports the :attr:`local`, :attr:`global` and :attr:`export` attributes. They are described :ref:`here `. .. cmd:: Add @setting_name {+ {| @qualid | @string } } Adds the specified values to the table :n:`@setting_name`. .. cmd:: Remove @setting_name {+ {| @qualid | @string } } Removes the specified value from the table :n:`@setting_name`. .. cmd:: Test @setting_name {? for {+ {| @qualid | @string } } } If :n:`@setting_name` is a flag or option, prints its current value. If :n:`@setting_name` is a table: if the `for` clause is specified, reports whether the table contains each specified value, otherwise this is equivalent to :cmd:`Print Table`. The `for` clause is not valid for flags and options. .. exn:: There is no flag, option or table with this name: "@setting_name". This error message is raised when calling the :cmd:`Test` command (without the `for` clause), or the :cmd:`Print Table` command, for an unknown :n:`@setting_name`. .. exn:: There is no qualid-valued table with this name: "@setting_name". There is no string-valued table with this name: "@setting_name". These error messages are raised when calling the :cmd:`Add` or :cmd:`Remove` commands, or the :cmd:`Test` command with the `for` clause, if :n:`@setting_name` is unknown or does not have the right type. .. cmd:: Print Options Prints the current value of all flags and options, and the names of all tables. .. cmd:: Print Table @setting_name Prints the values in the table :n:`@setting_name`. .. cmd:: Print Tables A synonym for :cmd:`Print Options`. .. _set_unset_scope_qualifiers: Locality attributes supported by :cmd:`Set` and :cmd:`Unset` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The :cmd:`Set` and :cmd:`Unset` commands support the mutually exclusive :attr:`local`, :attr:`export` and :attr:`global` locality attributes (or the ``Local``, ``Export`` or ``Global`` prefixes). If no attribute is specified, the original value of the flag or option is restored at the end of the current module but it is *not* restored at the end of the current section. Newly opened modules and sections inherit the current settings. .. note:: We discourage using the :attr:`global` locality attribute with the :cmd:`Set` and :cmd:`Unset` commands. If your goal is to define project-wide settings, you should rather use the command-line arguments ``-set`` and ``-unset`` for setting flags and options (see :ref:`command-line-options`). coq-8.15.0/doc/sphinx/language/core/coinductive.rst000066400000000000000000000172341417001151100222240ustar00rootroot00000000000000Coinductive types and corecursive functions ============================================= .. _coinductive-types: Coinductive types ------------------ The objects of an inductive type are well-founded with respect to the constructors of the type. In other words, such objects contain only a *finite* number of constructors. Coinductive types arise from relaxing this condition, and admitting types whose objects contain an infinity of constructors. Infinite objects are introduced by a non-ending (but effective) process of construction, defined in terms of the constructors of the type. More information on coinductive definitions can be found in :cite:`Gimenez95b,Gim98,GimCas05`. .. cmd:: CoInductive @inductive_definition {* with @inductive_definition } This command introduces a coinductive type. The syntax of the command is the same as the command :cmd:`Inductive`. No principle of induction is derived from the definition of a coinductive type, since such principles only make sense for inductive types. For coinductive types, the only elimination principle is case analysis. This command supports the :attr:`universes(polymorphic)`, :attr:`universes(template)`, :attr:`universes(cumulative)`, :attr:`private(matching)`, :attr:`bypass_check(universes)`, :attr:`bypass_check(positivity)` and :attr:`using` attributes. When record syntax is used, this command also supports the :attr:`projections(primitive)` :term:`attribute`. .. example:: The type of infinite sequences of natural numbers, usually called streams, is an example of a coinductive type. .. coqtop:: in CoInductive Stream : Set := Seq : nat -> Stream -> Stream. The usual destructors on streams :g:`hd:Stream->nat` and :g:`tl:Str->Str` can be defined as follows: .. coqtop:: in Definition hd (x:Stream) := let (a,s) := x in a. Definition tl (x:Stream) := let (a,s) := x in s. Definitions of coinductive predicates and blocks of mutually coinductive definitions are also allowed. .. example:: The extensional equality on streams is an example of a coinductive type: .. coqtop:: in CoInductive EqSt : Stream -> Stream -> Prop := eqst : forall s1 s2:Stream, hd s1 = hd s2 -> EqSt (tl s1) (tl s2) -> EqSt s1 s2. In order to prove the extensional equality of two streams :g:`s1` and :g:`s2` we have to construct an infinite proof of equality, that is, an infinite object of type :g:`(EqSt s1 s2)`. We will see how to introduce infinite objects in Section :ref:`cofixpoint`. Caveat ~~~~~~ The ability to define coinductive types by constructors, hereafter called *positive coinductive types*, is known to break subject reduction. The story is a bit long: this is due to dependent pattern-matching which implies propositional η-equality, which itself would require full η-conversion for subject reduction to hold, but full η-conversion is not acceptable as it would make type checking undecidable. Since the introduction of primitive records in Coq 8.5, an alternative presentation is available, called *negative coinductive types*. This consists in defining a coinductive type as a primitive record type through its projections. Such a technique is akin to the *copattern* style that can be found in e.g. Agda, and preserves subject reduction. The above example can be rewritten in the following way. .. coqtop:: none Reset Stream. .. coqtop:: all Set Primitive Projections. CoInductive Stream : Set := Seq { hd : nat; tl : Stream }. CoInductive EqSt (s1 s2: Stream) : Prop := eqst { eqst_hd : hd s1 = hd s2; eqst_tl : EqSt (tl s1) (tl s2); }. Some properties that hold over positive streams are lost when going to the negative presentation, typically when they imply equality over streams. For instance, propositional η-equality is lost when going to the negative presentation. It is nonetheless logically consistent to recover it through an axiom. .. coqtop:: all Axiom Stream_eta : forall s: Stream, s = Seq (hd s) (tl s). More generally, as in the case of positive coinductive types, it is consistent to further identify extensional equality of coinductive types with propositional equality: .. coqtop:: all Axiom Stream_ext : forall (s1 s2: Stream), EqSt s1 s2 -> s1 = s2. As of Coq 8.9, it is now advised to use negative coinductive types rather than their positive counterparts. .. seealso:: :ref:`primitive_projections` for more information about negative records and primitive projections. .. index:: single: cofix Co-recursive functions: cofix ----------------------------- .. insertprodn term_cofix cofix_body .. prodn:: term_cofix ::= let cofix @cofix_body in @term | cofix @cofix_body {? {+ with @cofix_body } for @ident } cofix_body ::= @ident {* @binder } {? : @type } := @term The expression ":n:`cofix @ident__1 @binder__1 : @type__1 with … with @ident__n @binder__n : @type__n for @ident__i`" denotes the :math:`i`-th component of a block of terms defined by a mutual guarded corecursion. It is the local counterpart of the :cmd:`CoFixpoint` command. When :math:`n=1`, the ":n:`for @ident__i`" clause is omitted. .. _cofixpoint: Top-level definitions of corecursive functions ----------------------------------------------- .. cmd:: CoFixpoint @cofix_definition {* with @cofix_definition } .. insertprodn cofix_definition cofix_definition .. prodn:: cofix_definition ::= @ident_decl {* @binder } {? : @type } {? := @term } {? @decl_notations } This command introduces a method for constructing an infinite object of a coinductive type. For example, the stream containing all natural numbers can be introduced by applying the following method to the number :g:`O` (see Section :ref:`coinductive-types` for the definition of :g:`Stream`, :g:`hd` and :g:`tl`): .. coqtop:: all CoFixpoint from (n:nat) : Stream := Seq n (from (S n)). Unlike recursive definitions, there is no decreasing argument in a corecursive definition. To be admissible, a method of construction must provide at least one extra constructor of the infinite object for each iteration. A syntactical guard condition is imposed on corecursive definitions in order to ensure this: each recursive call in the definition must be protected by at least one constructor, and only by constructors. That is the case in the former definition, where the single recursive call of :g:`from` is guarded by an application of :g:`Seq`. On the contrary, the following recursive function does not satisfy the guard condition: .. coqtop:: all Fail CoFixpoint filter (p:nat -> bool) (s:Stream) : Stream := if p (hd s) then Seq (hd s) (filter p (tl s)) else filter p (tl s). The elimination of corecursive definition is done lazily, i.e. the definition is expanded only when it occurs at the head of an application which is the argument of a case analysis expression. In any other context, it is considered as a canonical expression which is completely evaluated. We can test this using the command :cmd:`Eval`, which computes the normal forms of a term: .. coqtop:: all Eval compute in (from 0). Eval compute in (hd (from 0)). Eval compute in (tl (from 0)). As in the :cmd:`Fixpoint` command, the :n:`with` clause allows simultaneously defining several mutual cofixpoints. If :n:`@term` is omitted, :n:`@type` is required and Coq enters proof mode. This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. In this case, the proof should be terminated with :cmd:`Defined` in order to define a :term:`constant` for which the computational behavior is relevant. See :ref:`proof-editing-mode`. coq-8.15.0/doc/sphinx/language/core/conversion.rst000066400000000000000000000235551417001151100221000ustar00rootroot00000000000000.. _Conversion-rules: Conversion rules ---------------- Coq has conversion rules that can be used to determine if two terms are equal by definition in |CiC|, or :term:`convertible`. Conversion rules consist of reduction rules and expansion rules. Equality is determined by converting both terms to a normal form, then verifying they are syntactically equal (ignoring differences in the names of bound variables by :term:`alpha-conversion `). .. seealso:: :ref:`applyingconversionrules`, which describes tactics that apply these conversion rules. :gdef:`Reductions ` convert terms to something that is incrementally closer to its normal form. For example, :term:`zeta-reduction` removes :n:`let @ident := @term__1 in @term__2` constructs from a term by replacing :n:`@ident` with :n:`@term__1` wherever :n:`@ident` appears in :n:`@term__2`. The resulting term may be longer or shorter than the original. .. coqtop:: all Eval cbv zeta in let i := 1 in i + i. :gdef:`Expansions ` are reductions applied in the opposite direction, for example expanding `2 + 2` to `let i := 2 in i + i`. While applying reductions gives a unique result, the associated expansion may not be unique. For example, `2 + 2` could also be expanded to `let i := 2 in i + 2`. Reductions that have a unique inverse expansion are also referred to as :gdef:`contractions `. The normal form is defined as the result of applying a particular set of conversion rules (beta-, delta-, iota- and zeta-reduction and eta-expansion) repeatedly until it's no longer possible to apply any of them. Sometimes the result of a reduction tactic will be a simple value, for example reducing `2*3+4` with `cbv beta delta iota` to `10`, which requires applying several reduction rules repeatedly. In other cases, it may yield an expression containing variables, axioms or opaque contants that can't be reduced. The useful conversion rules are shown below. All of them except for eta-expansion can be applied with conversion tactics such as :tacn:`cbv`: .. list-table:: :header-rows: 1 * - Conversion name - Description * - beta-reduction - eliminates `fun` * - delta-reduction - replaces a defined variable or constant with its definition * - zeta-reduction - eliminates `let` * - eta-expansion - replaces a term `f` of type `forall a : A, B` with `fun x : A => f x` * - match-reduction - eliminates `match` * - fix-reduction - replaces a `fix` with a :term:`beta-redex`; recursive calls to the symbol are replaced with the `fix` term * - cofix-reduction - replaces a `cofix` with a :term:`beta-redex`; recursive calls to the symbol are replaced with the `cofix` term * - iota-reduction - match-, fix- and cofix-reduction together :ref:`applyingconversionrules` describes tactics that only apply conversion rules. (Other tactics may use conversion rules in addition to other changes to the proof state.) α-conversion ~~~~~~~~~~~~ Two terms are :gdef:`α-convertible ` if they are syntactically equal ignoring differences in the names of variables bound within the expression. For example `forall x, x + 0 = x` is α-convertible with `forall y, y + 0 = y`. β-reduction ~~~~~~~~~~~ :gdef:`β-reduction ` reduces a :gdef:`beta-redex`, which is a term in the form `(fun x => t) u`. (Beta-redex is short for "beta-reducible expression", a term from lambda calculus. See `Beta reduction `_ for more background.) Formally, in any :term:`global environment` :math:`E` and :term:`local context` :math:`Γ`, the beta-reduction rule is: .. inference:: Beta -------------- E[Γ] ⊢ ((λx:T.~t)~u)~\triangleright_β~\subst{t}{x}{u} We say that :math:`\subst{t}{x}{u}` is the *β-contraction* of :math:`((λx:T.~t)~u)` and, conversely, that :math:`((λ x:T.~t)~u)` is the *β-expansion* of :math:`\subst{t}{x}{u}`. .. todo: :term:`Calculus of Inductive Constructions` fails to build in CI for some reason :-() Terms of the *Calculus of Inductive Constructions* enjoy some fundamental properties such as confluence, strong normalization, subject reduction. These results are theoretically of great importance but we will not detail them here and refer the interested reader to :cite:`Coq85`. .. _delta-reduction-sect: δ-reduction ~~~~~~~~~~~ :gdef:`δ-reduction ` replaces variables defined in :term:`local contexts ` or :term:`constants ` defined in the :term:`global environment` with their values. :gdef:`Unfolding ` means to replace a constant by its definition. Formally, this is: .. inference:: Delta-Local \WFE{\Gamma} (x:=t:T) ∈ Γ -------------- E[Γ] ⊢ x~\triangleright_Δ~t .. inference:: Delta-Global \WFE{\Gamma} (c:=t:T) ∈ E -------------- E[Γ] ⊢ c~\triangleright_δ~t :term:`Delta-reduction ` only unfolds :term:`constants ` that are marked :gdef:`transparent`. :gdef:`Opaque ` is the opposite of transparent; :term:`delta-reduction` doesn't unfold opaque constants. ι-reduction ~~~~~~~~~~~ A specific conversion rule is associated with the inductive objects in the global environment. We shall give later on (see Section :ref:`Well-formed-inductive-definitions`) the precise rules but it just says that a destructor applied to an object built from a constructor behaves as expected. This reduction is called :gdef:`ι-reduction ` and is more precisely studied in :cite:`Moh93,Wer94`. ζ-reduction ~~~~~~~~~~~ :gdef:`ζ-reduction ` removes :ref:`let-in definitions ` in terms by replacing the defined variable by its value. One way this reduction differs from δ-reduction is that the declaration is removed from the term entirely. Formally, this is: .. inference:: Zeta \WFE{\Gamma} \WTEG{u}{U} \WTE{\Gamma::(x:=u:U)}{t}{T} -------------- E[Γ] ⊢ \letin{x}{u:U}{t}~\triangleright_ζ~\subst{t}{x}{u} .. _eta-expansion-sect: η-expansion ~~~~~~~~~~~ Another important concept is :gdef:`η-expansion `. It is legal to identify any term :math:`t` of functional type :math:`∀ x:T,~U` with its so-called η-expansion .. math:: λx:T.~(t~x) for :math:`x` an arbitrary variable name fresh in :math:`t`. .. note:: We deliberately do not define η-reduction: .. math:: λ x:T.~(t~x)~\not\triangleright_η~t This is because, in general, the type of :math:`t` need not be convertible to the type of :math:`λ x:T.~(t~x)`. E.g., if we take :math:`f` such that: .. math:: f ~:~ ∀ x:\Type(2),~\Type(1) then .. math:: λ x:\Type(1).~(f~x) ~:~ ∀ x:\Type(1),~\Type(1) We could not allow .. math:: λ x:\Type(1).~(f~x) ~\triangleright_η~ f because the type of the reduced term :math:`∀ x:\Type(2),~\Type(1)` would not be convertible to the type of the original term :math:`∀ x:\Type(1),~\Type(1)`. Examples ~~~~~~~~ .. example:: Simple delta, fix, beta and match reductions ``+`` is a :ref:`notation ` for ``Nat.add``, which is defined with a :cmd:`Fixpoint`. .. coqtop:: all abort Print Nat.add. Goal 1 + 1 = 2. cbv delta. cbv fix. cbv beta. cbv match. The term can be fully reduced with `cbv`: .. coqtop:: all abort Goal 1 + 1 = 2. cbv. .. _proof-irrelevance: Proof Irrelevance ~~~~~~~~~~~~~~~~~ It is legal to identify any two terms whose common type is a strict proposition :math:`A : \SProp`. Terms in a strict propositions are therefore called *irrelevant*. .. _convertibility: Convertibility ~~~~~~~~~~~~~~ Let us write :math:`E[Γ] ⊢ t \triangleright u` for the contextual closure of the relation :math:`t` reduces to :math:`u` in the global environment :math:`E` and local context :math:`Γ` with one of the previous reductions β, δ, ι or ζ. We say that two terms :math:`t_1` and :math:`t_2` are *βδιζη-convertible*, or simply :gdef:`convertible`, or :term:`definitionally equal `, in the global environment :math:`E` and local context :math:`Γ` iff there exist terms :math:`u_1` and :math:`u_2` such that :math:`E[Γ] ⊢ t_1 \triangleright … \triangleright u_1` and :math:`E[Γ] ⊢ t_2 \triangleright … \triangleright u_2` and either :math:`u_1` and :math:`u_2` are identical up to irrelevant subterms, or they are convertible up to η-expansion, i.e. :math:`u_1` is :math:`λ x:T.~u_1'` and :math:`u_2 x` is recursively convertible to :math:`u_1'`, or, symmetrically, :math:`u_2` is :math:`λx:T.~u_2'` and :math:`u_1 x` is recursively convertible to :math:`u_2'`. We then write :math:`E[Γ] ⊢ t_1 =_{βδιζη} t_2`. Apart from this we consider two instances of polymorphic and cumulative (see Chapter :ref:`polymorphicuniverses`) inductive types (see below) convertible .. math:: E[Γ] ⊢ t~w_1 … w_m =_{βδιζη} t~w_1' … w_m' if we have subtypings (see below) in both directions, i.e., .. math:: E[Γ] ⊢ t~w_1 … w_m ≤_{βδιζη} t~w_1' … w_m' and .. math:: E[Γ] ⊢ t~w_1' … w_m' ≤_{βδιζη} t~w_1 … w_m. Furthermore, we consider .. math:: E[Γ] ⊢ c~v_1 … v_m =_{βδιζη} c'~v_1' … v_m' convertible if .. math:: E[Γ] ⊢ v_i =_{βδιζη} v_i' and we have that :math:`c` and :math:`c'` are the same constructors of different instances of the same inductive types (differing only in universe levels) such that .. math:: E[Γ] ⊢ c~v_1 … v_m : t~w_1 … w_m and .. math:: E[Γ] ⊢ c'~v_1' … v_m' : t'~ w_1' … w_m ' and we have .. math:: E[Γ] ⊢ t~w_1 … w_m =_{βδιζη} t~w_1' … w_m'. The convertibility relation allows introducing a new typing rule which says that two convertible well-formed types have the same inhabitants. coq-8.15.0/doc/sphinx/language/core/definitions.rst000066400000000000000000000210641417001151100222170ustar00rootroot00000000000000Definitions =========== .. index:: let ... := ... (term) .. _let-in: Let-in definitions ------------------ .. insertprodn term_let term_let .. prodn:: term_let ::= let @name {? : @type } := @term in @term | let @name {+ @binder } {? : @type } := @term in @term | @destructuring_let :n:`let @ident := @term__1 in @term__2` represents the local binding of the variable :n:`@ident` to the value :n:`@term__1` in :n:`@term__2`. :n:`let @ident {+ @binder} := @term__1 in @term__2` is an abbreviation for :n:`let @ident := fun {+ @binder} => @term__1 in @term__2`. .. seealso:: Extensions of the `let ... in ...` syntax are described in :ref:`irrefutable-patterns`. .. index:: single: ... : ... (type cast) single: ... <: ... single: ... <<: ... .. _type-cast: Type cast --------- .. insertprodn term_cast term_cast .. prodn:: term_cast ::= @term10 : @type | @term10 <: @type | @term10 <<: @type The expression :n:`@term10 : @type` is a type cast expression. It enforces the type of :n:`@term10` to be :n:`@type`. :n:`@term10 <: @type` specifies that the virtual machine will be used to type check that :n:`@term10` has type :n:`@type` (see :tacn:`vm_compute`). :n:`@term10 <<: @type` specifies that compilation to OCaml will be used to type check that :n:`@term10` has type :n:`@type` (see :tacn:`native_compute`). .. _gallina-definitions: Top-level definitions --------------------- Definitions extend the global environment by associating names to terms. A definition can be seen as a way to give a meaning to a name or as a way to abbreviate a term. In any case, the name can later be replaced at any time by its definition. The operation of unfolding a name into its definition is called :term:`delta-reduction`. A definition is accepted by the system if and only if the defined term is well-typed in the current context of the definition and if the name is not already used. The name defined by the definition is called a :gdef:`constant` and the term it refers to is its :gdef:`body`. A definition has a type, which is the type of its :term:`body`. A formal presentation of constants and environments is given in Section :ref:`typing-rules`. .. cmd:: {| Definition | Example } @ident_decl @def_body :name: Definition; Example .. insertprodn def_body reduce .. prodn:: def_body ::= {* @binder } {? : @type } := {? @reduce } @term | {* @binder } : @type reduce ::= Eval @red_expr in These commands bind :n:`@term` to the name :n:`@ident` in the global environment, provided that :n:`@term` is well-typed. They can take the :attr:`local` :term:`attribute`, which makes the defined :n:`@ident` accessible by :cmd:`Import` and its variants only through their fully qualified names. If :n:`@reduce` is present then :n:`@ident` is bound to the result of the specified computation on :n:`@term`. These commands also support the :attr:`universes(polymorphic)`, :attr:`program` (see :ref:`program_definition`), :attr:`canonical`, :attr:`bypass_check(universes)`, :attr:`bypass_check(guard)`, and :attr:`using` attributes. If :n:`@term` is omitted, :n:`@type` is required and Coq enters proof mode. This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. In this case, the proof should be terminated with :cmd:`Defined` in order to define a :term:`constant` for which the computational behavior is relevant. See :ref:`proof-editing-mode`. The form :n:`Definition @ident : @type := @term` checks that the type of :n:`@term` is definitionally equal to :n:`@type`, and registers :n:`@ident` as being of type :n:`@type`, and bound to value :n:`@term`. The form :n:`Definition @ident {* @binder } : @type := @term` is equivalent to :n:`Definition @ident : forall {* @binder }, @type := fun {* @binder } => @term`. .. seealso:: :cmd:`Opaque`, :cmd:`Transparent`, :tacn:`unfold`. .. exn:: @ident already exists. :name: ‘ident’ already exists. (Definition) :undocumented: .. exn:: The term @term has type @type while it is expected to have type @type'. :undocumented: .. _Assertions: Assertions and proofs --------------------- An assertion states a proposition (or a type) for which the proof (or an inhabitant of the type) is interactively built using :term:`tactics `. Assertions cause Coq to enter :term:`proof mode` (see :ref:`proofhandling`). Common tactics are described in the :ref:`writing-proofs` chapter. The basic assertion command is: .. cmd:: @thm_token @ident_decl {* @binder } : @type {* with @ident_decl {* @binder } : @type } :name: Theorem; Lemma; Fact; Remark; Corollary; Proposition; Property .. insertprodn thm_token thm_token .. prodn:: thm_token ::= Theorem | Lemma | Fact | Remark | Corollary | Proposition | Property After the statement is asserted, Coq needs a proof. Once a proof of :n:`@type` under the assumptions represented by :n:`@binder`\s is given and validated, the proof is generalized into a proof of :n:`forall {* @binder }, @type` and the theorem is bound to the name :n:`@ident` in the global environment. These commands accept the :attr:`program` attribute. See :ref:`program_lemma`. Forms using the :n:`with` clause are useful for theorems that are proved by simultaneous induction over a mutually inductive assumption, or that assert mutually dependent statements in some mutual coinductive type. It is equivalent to :cmd:`Fixpoint` or :cmd:`CoFixpoint` but using tactics to build the proof of the statements (or the :term:`body` of the specification, depending on the point of view). The inductive or coinductive types on which the induction or coinduction has to be done is assumed to be unambiguous and is guessed by the system. Like in a :cmd:`Fixpoint` or :cmd:`CoFixpoint` definition, the induction hypotheses have to be used on *structurally smaller* arguments (for a :cmd:`Fixpoint`) or be *guarded by a constructor* (for a :cmd:`CoFixpoint`). The verification that recursive proof arguments are correct is done only at the time of registering the lemma in the global environment. To know if the use of induction hypotheses is correct at some time of the interactive development of a proof, use the command :cmd:`Guarded`. This command accepts the :attr:`bypass_check(universes)`, :attr:`bypass_check(guard)`, and :attr:`using` attributes. .. exn:: The term @term has type @type which should be Set, Prop or Type. :undocumented: .. exn:: @ident already exists. :name: ‘ident’ already exists. (Theorem) The name you provided is already defined. You have then to choose another name. .. exn:: Nested proofs are discouraged and not allowed by default. This error probably means that you forgot to close the last "Proof." with "Qed." or "Defined.". \ If you really intended to use nested proofs, you can do so by turning the "Nested Proofs Allowed" flag on. You are asserting a new statement when you're already in proof mode. This feature, called nested proofs, is disabled by default. To activate it, turn the :flag:`Nested Proofs Allowed` flag on. Proofs start with the keyword :cmd:`Proof`. Then Coq enters the proof mode until the proof is completed. In proof mode, the user primarily enters tactics (see :ref:`writing-proofs`). The user may also enter commands to manage the proof mode (see :ref:`proofhandling`). When the proof is complete, use the :cmd:`Qed` command so the kernel verifies the proof and adds it to the global environment. .. note:: #. Several statements can be simultaneously asserted provided the :flag:`Nested Proofs Allowed` flag was turned on. #. Not only other assertions but any command can be given while in the process of proving a given assertion. In this case, the command is understood as if it would have been given before the statements still to be proved. Nonetheless, this practice is discouraged and may stop working in future versions. #. Proofs ended by :cmd:`Qed` are declared :term:`opaque`. Their content cannot be unfolded (see :ref:`applyingconversionrules`), thus realizing some form of *proof-irrelevance*. Proofs that end with :cmd:`Defined` can be unfolded. #. :cmd:`Proof` is recommended but can currently be omitted. On the opposite side, :cmd:`Qed` (or :cmd:`Defined`) is mandatory to validate a proof. #. One can also use :cmd:`Admitted` in place of :cmd:`Qed` to turn the current asserted statement into an axiom and exit proof mode. coq-8.15.0/doc/sphinx/language/core/index.rst000066400000000000000000000034351417001151100210150ustar00rootroot00000000000000.. _core-language: ============= Core language ============= At the heart of the Coq proof assistant is the Coq kernel. While users have access to a language with many convenient features such as :ref:`notations `, :ref:`implicit arguments `, etc. (presented in the :ref:`next chapter `), those features are translated into the core language (the Calculus of Inductive Constructions) that the kernel understands, which we present here. Furthermore, while users can build proofs interactively using tactics (see Chapter :ref:`writing-proofs`), the role of these tactics is to incrementally build a "proof term" which the kernel will verify. More precisely, a proof term is a :term:`term` of the Calculus of Inductive Constructions whose :term:`type` corresponds to a theorem statement. The kernel is a type checker which verifies that terms have their expected types. This separation between the kernel on one hand and the :ref:`elaboration engine ` and :ref:`tactics ` on the other follows what is known as the :gdef:`de Bruijn criterion` (keeping a small and well delimited trusted code base within a proof assistant which can be much more complex). This separation makes it necessary to trust only a smaller, critical component (the kernel) instead of the entire system. In particular, users may rely on external plugins that provide advanced and complex tactics without fear of these tactics being buggy, because the kernel will have to check their output. .. toctree:: :maxdepth: 1 basic sorts assumptions definitions conversion ../cic variants records inductive coinductive sections modules primitive ../../addendum/universe-polymorphism ../../addendum/sprop coq-8.15.0/doc/sphinx/language/core/inductive.rst000066400000000000000000001770241417001151100217060ustar00rootroot00000000000000Inductive types and recursive functions ======================================= .. _gallina-inductive-definitions: Inductive types --------------- .. cmd:: Inductive @inductive_definition {* with @inductive_definition } .. insertprodn inductive_definition constructor .. prodn:: inductive_definition ::= {? > } @ident {? @cumul_univ_decl } {* @binder } {? %| {* @binder } } {? : @type } {? := {? @constructors_or_record } } {? @decl_notations } constructors_or_record ::= {? %| } {+| @constructor } | {? @ident } %{ {*; @record_field } {? ; } %} constructor ::= @ident {* @binder } {? @of_type } Defines one or more inductive types and its constructors. Coq generates :gdef:`induction principles ` depending on the universe that the inductive type belongs to. The induction principles are named :n:`@ident`\ ``_rect``, :n:`@ident`\ ``_ind``, :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_sind``, which respectively correspond to on :g:`Type`, :g:`Prop`, :g:`Set` and :g:`SProp`. Their types expresses structural induction/recursion principles over objects of type :n:`@ident`. The :term:`constant` :n:`@ident`\ ``_ind`` is always generated, whereas :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_rect`` may be impossible to derive (for example, when :n:`@ident` is a proposition). This command supports the :attr:`universes(polymorphic)`, :attr:`universes(template)`, :attr:`universes(cumulative)`, :attr:`bypass_check(positivity)`, :attr:`bypass_check(universes)` and :attr:`private(matching)` attributes. When record syntax is used, this command also supports the :attr:`projections(primitive)` :term:`attribute`. Mutually inductive types can be defined by including multiple :n:`@inductive_definition`\s. The :n:`@ident`\s are simultaneously added to the global environment before the types of constructors are checked. Each :n:`@ident` can be used independently thereafter. However, the induction principles currently generated for such types are not useful. Use the :cmd:`Scheme` command to generate useful induction principles. See :ref:`mutually_inductive_types`. If the entire inductive definition is parameterized with :n:`@binder`\s, the parameters correspond to a local context in which the entire set of inductive declarations is interpreted. For this reason, the parameters must be strictly the same for each inductive type. See :ref:`parametrized-inductive-types`. Constructor :n:`@ident`\s can come with :n:`@binder`\s, in which case the actual type of the constructor is :n:`forall {* @binder }, @type`. .. exn:: Non strictly positive occurrence of @ident in @type. The types of the constructors have to satisfy a *positivity condition* (see Section :ref:`positivity`). This condition ensures the soundness of the inductive definition. Positivity checking can be disabled using the :flag:`Positivity Checking` flag or the :attr:`bypass_check(positivity)` attribute (see :ref:`controlling-typing-flags`). .. exn:: The conclusion of @type is not valid; it must be built from @ident. The conclusion of the type of the constructors must be the inductive type :n:`@ident` being defined (or :n:`@ident` applied to arguments in the case of annotated inductive types — cf. next section). The following subsections show examples of simple inductive types, simple annotated inductive types, simple parametric inductive types, mutually inductive types and private (matching) inductive types. .. _simple-inductive-types: Simple inductive types ~~~~~~~~~~~~~~~~~~~~~~ A simple inductive type belongs to a universe that is a simple :n:`@sort`. .. example:: The set of natural numbers is defined as: .. coqtop:: reset all Inductive nat : Set := | O : nat | S : nat -> nat. The type nat is defined as the least :g:`Set` containing :g:`O` and closed by the :g:`S` constructor. The names :g:`nat`, :g:`O` and :g:`S` are added to the global environment. This definition generates four :term:`induction principles `: :g:`nat_rect`, :g:`nat_ind`, :g:`nat_rec` and :g:`nat_sind`. The type of :g:`nat_ind` is: .. coqtop:: all Check nat_ind. This is the well known structural induction principle over natural numbers, i.e. the second-order form of Peano’s induction principle. It allows proving universal properties of natural numbers (:g:`forall n:nat, P n`) by induction on :g:`n`. The types of :g:`nat_rect`, :g:`nat_rec` and :g:`nat_sind` are similar, except that they apply to, respectively, :g:`(P:nat->Type)`, :g:`(P:nat->Set)` and :g:`(P:nat->SProp)`. They correspond to primitive induction principles (allowing dependent types) respectively over sorts ``Type``, ``Set`` and ``SProp``. In the case where inductive types don't have annotations (the next section gives an example of annotations), a constructor can be defined by giving the type of its arguments alone. .. example:: .. coqtop:: reset none Reset nat. .. coqtop:: in Inductive nat : Set := O | S (_:nat). Simple annotated inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In annotated inductive types, the universe where the inductive type is defined is no longer a simple :n:`@sort`, but what is called an arity, which is a type whose conclusion is a :n:`@sort`. .. example:: As an example of annotated inductive types, let us define the :g:`even` predicate: .. coqtop:: all Inductive even : nat -> Prop := | even_0 : even O | even_SS : forall n:nat, even n -> even (S (S n)). The type :g:`nat->Prop` means that :g:`even` is a unary predicate (inductively defined) over natural numbers. The type of its two constructors are the defining clauses of the predicate :g:`even`. The type of :g:`even_ind` is: .. coqtop:: all Check even_ind. From a mathematical point of view, this asserts that the natural numbers satisfying the predicate :g:`even` are exactly in the smallest set of naturals satisfying the clauses :g:`even_0` or :g:`even_SS`. This is why, when we want to prove any predicate :g:`P` over elements of :g:`even`, it is enough to prove it for :g:`O` and to prove that if any natural number :g:`n` satisfies :g:`P` its double successor :g:`(S (S n))` satisfies also :g:`P`. This is analogous to the structural induction principle we got for :g:`nat`. .. _parametrized-inductive-types: Parameterized inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the previous example, each constructor introduces a different instance of the predicate :g:`even`. In some cases, all the constructors introduce the same generic instance of the inductive definition, in which case, instead of an annotation, we use a context of parameters which are :n:`@binder`\s shared by all the constructors of the definition. Parameters differ from inductive type annotations in that the conclusion of each type of constructor invokes the inductive type with the same parameter values of its specification. .. example:: A typical example is the definition of polymorphic lists: .. coqtop:: all Inductive list (A:Set) : Set := | nil : list A | cons : A -> list A -> list A. In the type of :g:`nil` and :g:`cons`, we write ":g:`list A`" and not just ":g:`list`". The constructors :g:`nil` and :g:`cons` have these types: .. coqtop:: all Check nil. Check cons. Observe that the induction principles are also quantified with :g:`(A:Set)`, for example: .. coqtop:: all Check list_ind. Once again, the names of the constructor arguments and the type of the conclusion can be omitted: .. coqtop:: none Reset list. .. coqtop:: in Inductive list (A:Set) : Set := nil | cons (_:A) (_:list A). .. note:: + The constructor type can recursively invoke the inductive definition on an argument which is not the parameter itself. One can define : .. coqtop:: all Inductive list2 (A:Set) : Set := | nil2 : list2 A | cons2 : A -> list2 (A*A) -> list2 A. that can also be written by specifying only the type of the arguments: .. coqtop:: all reset Inductive list2 (A:Set) : Set := | nil2 | cons2 (_:A) (_:list2 (A*A)). But the following definition will give an error: .. coqtop:: all Fail Inductive listw (A:Set) : Set := | nilw : listw (A*A) | consw : A -> listw (A*A) -> listw (A*A). because the conclusion of the type of constructors should be :g:`listw A` in both cases. + A parameterized inductive definition can be defined using annotations instead of parameters but it will sometimes give a different (bigger) sort for the inductive definition and will produce a less convenient rule for case elimination. .. flag:: Uniform Inductive Parameters When this :term:`flag` is set (it is off by default), inductive definitions are abstracted over their parameters before type checking constructors, allowing to write: .. coqtop:: all Set Uniform Inductive Parameters. Inductive list3 (A:Set) : Set := | nil3 : list3 | cons3 : A -> list3 -> list3. This behavior is essentially equivalent to starting a new section and using :cmd:`Context` to give the uniform parameters, like so (cf. :ref:`section-mechanism`): .. coqtop:: all reset Section list3. Context (A:Set). Inductive list3 : Set := | nil3 : list3 | cons3 : A -> list3 -> list3. End list3. For finer control, you can use a ``|`` between the uniform and the non-uniform parameters: .. coqtop:: in reset Inductive Acc {A:Type} (R:A->A->Prop) | (x:A) : Prop := Acc_in : (forall y, R y x -> Acc y) -> Acc x. The flag can then be seen as deciding whether the ``|`` is at the beginning (when the flag is unset) or at the end (when it is set) of the parameters when not explicitly given. .. seealso:: Section :ref:`inductive-definitions` and the :tacn:`induction` tactic. .. _mutually_inductive_types: Mutually defined inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. todo: combine with the very similar tree/forest example in reasoning-inductives.rst The induction principles currently generated for mutually defined types are not useful. Use the :cmd:`Scheme` command to generate a useful induction principle. .. example:: Mutually defined inductive types A typical example of mutually inductive data types is trees and forests. We assume two types :g:`A` and :g:`B` that are given as variables. The types can be declared like this: .. coqtop:: in Parameters A B : Set. Inductive tree : Set := node : A -> forest -> tree with forest : Set := | leaf : B -> forest | cons : tree -> forest -> forest. This declaration automatically generates eight induction principles. They are not the most general principles, but they correspond to each inductive part seen as a single inductive definition. To illustrate this point on our example, here are the types of :g:`tree_rec` and :g:`forest_rec`. .. coqtop:: all Check tree_rec. Check forest_rec. Assume we want to parameterize our mutual inductive definitions with the two type variables :g:`A` and :g:`B`, the declaration should be done as follows: .. coqdoc:: Inductive tree (A B:Set) : Set := node : A -> forest A B -> tree A B with forest (A B:Set) : Set := | leaf : B -> forest A B | cons : tree A B -> forest A B -> forest A B. Assume we define an inductive definition inside a section (cf. :ref:`section-mechanism`). When the section is closed, the variables declared in the section and occurring free in the declaration are added as parameters to the inductive definition. .. seealso:: A generic command :cmd:`Scheme` is useful to build automatically various mutual induction principles. .. index:: single: fix Recursive functions: fix ------------------------ .. insertprodn term_fix fixannot .. prodn:: term_fix ::= let fix @fix_decl in @term | fix @fix_decl {? {+ with @fix_decl } for @ident } fix_decl ::= @ident {* @binder } {? @fixannot } {? : @type } := @term fixannot ::= %{ struct @ident %} | %{ wf @one_term @ident %} | %{ measure @one_term {? @ident } {? @one_term } %} The expression ":n:`fix @ident__1 @binder__1 : @type__1 := @term__1 with … with @ident__n @binder__n : @type__n := @term__n for @ident__i`" denotes the :math:`i`-th component of a block of functions defined by mutual structural recursion. It is the local counterpart of the :cmd:`Fixpoint` command. When :math:`n=1`, the ":n:`for @ident__i`" clause is omitted. The association of a single fixpoint and a local definition have a special syntax: :n:`let fix @ident {* @binder } := @term in` stands for :n:`let @ident := fix @ident {* @binder } := @term in`. The same applies for cofixpoints. Some options of :n:`@fixannot` are only supported in specific constructs. :n:`fix` and :n:`let fix` only support the :n:`struct` option, while :n:`wf` and :n:`measure` are only supported in commands such as :cmd:`Fixpoint` (with the :attr:`program` attribute) and :cmd:`Function`. .. todo explanation of struct: see text above at the Fixpoint command, also see https://github.com/coq/coq/pull/12936#discussion_r510716268 and above. Consider whether to move the grammar for fixannot elsewhere .. _Fixpoint: Top-level recursive functions ----------------------------- This section describes the primitive form of definition by recursion over inductive objects. See the :cmd:`Function` command for more advanced constructions. .. cmd:: Fixpoint @fix_definition {* with @fix_definition } .. insertprodn fix_definition fix_definition .. prodn:: fix_definition ::= @ident_decl {* @binder } {? @fixannot } {? : @type } {? := @term } {? @decl_notations } Allows defining functions by pattern matching over inductive objects using a fixed point construction. The meaning of this declaration is to define :n:`@ident` as a recursive function with arguments specified by the :n:`@binder`\s such that :n:`@ident` applied to arguments corresponding to these :n:`@binder`\s has type :n:`@type`, and is equivalent to the expression :n:`@term`. The type of :n:`@ident` is consequently :n:`forall {* @binder }, @type` and its value is equivalent to :n:`fun {* @binder } => @term`. This command accepts the :attr:`program`, :attr:`bypass_check(universes)`, and :attr:`bypass_check(guard)` attributes. To be accepted, a :cmd:`Fixpoint` definition has to satisfy syntactical constraints on a special argument called the decreasing argument. They are needed to ensure that the :cmd:`Fixpoint` definition always terminates. The point of the :n:`{struct @ident}` annotation (see :n:`@fixannot`) is to let the user tell the system which argument decreases along the recursive calls. The :n:`{struct @ident}` annotation may be left implicit, in which case the system successively tries arguments from left to right until it finds one that satisfies the decreasing condition. :cmd:`Fixpoint` without the :attr:`program` attribute does not support the :n:`wf` or :n:`measure` clauses of :n:`@fixannot`. See :ref:`program_fixpoint`. The :n:`with` clause allows simultaneously defining several mutual fixpoints. It is especially useful when defining functions over mutually defined inductive types. Example: :ref:`Mutual Fixpoints`. If :n:`@term` is omitted, :n:`@type` is required and Coq enters proof mode. This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. In this case, the proof should be terminated with :cmd:`Defined` in order to define a :term:`constant` for which the computational behavior is relevant. See :ref:`proof-editing-mode`. This command accepts the :attr:`using` attribute. .. note:: + Some fixpoints may have several arguments that fit as decreasing arguments, and this choice influences the reduction of the fixpoint. Hence an explicit annotation must be used if the leftmost decreasing argument is not the desired one. Writing explicit annotations can also speed up type checking of large mutual fixpoints. + In order to keep the strong normalization property, the fixed point reduction will only be performed when the argument in position of the decreasing argument (which type should be in an inductive definition) starts with a constructor. .. example:: One can define the addition function as : .. coqtop:: all Fixpoint add (n m:nat) {struct n} : nat := match n with | O => m | S p => S (add p m) end. The match operator matches a value (here :g:`n`) with the various constructors of its (inductive) type. The remaining arguments give the respective values to be returned, as functions of the parameters of the corresponding constructor. Thus here when :g:`n` equals :g:`O` we return :g:`m`, and when :g:`n` equals :g:`(S p)` we return :g:`(S (add p m))`. The match operator is formally described in Section :ref:`match-construction`. The system recognizes that in the inductive call :g:`(add p m)` the first argument actually decreases because it is a *pattern variable* coming from :g:`match n with`. .. example:: The following definition is not correct and generates an error message: .. coqtop:: all Fail Fixpoint wrongplus (n m:nat) {struct n} : nat := match m with | O => n | S p => S (wrongplus n p) end. because the declared decreasing argument :g:`n` does not actually decrease in the recursive call. The function computing the addition over the second argument should rather be written: .. coqtop:: all Fixpoint plus (n m:nat) {struct m} : nat := match m with | O => n | S p => S (plus n p) end. .. example:: The recursive call may not only be on direct subterms of the recursive variable :g:`n` but also on a deeper subterm and we can directly write the function :g:`mod2` which gives the remainder modulo 2 of a natural number. .. coqtop:: all Fixpoint mod2 (n:nat) : nat := match n with | O => O | S p => match p with | O => S O | S q => mod2 q end end. .. _example_mutual_fixpoints: .. example:: Mutual fixpoints The size of trees and forests can be defined the following way: .. coqtop:: all Fixpoint tree_size (t:tree) : nat := match t with | node a f => S (forest_size f) end with forest_size (f:forest) : nat := match f with | leaf b => 1 | cons t f' => (tree_size t + forest_size f') end. .. extracted from CIC chapter .. _inductive-definitions: Theory of inductive definitions ------------------------------- Formally, we can represent any *inductive definition* as :math:`\ind{p}{Γ_I}{Γ_C}` where: + :math:`Γ_I` determines the names and types of inductive types; + :math:`Γ_C` determines the names and types of constructors of these inductive types; + :math:`p` determines the number of parameters of these inductive types. These inductive definitions, together with global assumptions and global definitions, then form the global environment. Additionally, for any :math:`p` there always exists :math:`Γ_P =[a_1 :A_1 ;~…;~a_p :A_p ]` such that each :math:`T` in :math:`(t:T)∈Γ_I \cup Γ_C` can be written as: :math:`∀Γ_P , T'` where :math:`Γ_P` is called the *context of parameters*. Furthermore, we must have that each :math:`T` in :math:`(t:T)∈Γ_I` can be written as: :math:`∀Γ_P,∀Γ_{\mathit{Arr}(t)}, S` where :math:`Γ_{\mathit{Arr}(t)}` is called the *Arity* of the inductive type :math:`t` and :math:`S` is called the sort of the inductive type :math:`t` (not to be confused with :math:`\Sort` which is the set of sorts). .. example:: The declaration for parameterized lists is: .. math:: \ind{1}{[\List:\Set→\Set]}{\left[\begin{array}{rcl} \Nil & : & ∀ A:\Set,~\List~A \\ \cons & : & ∀ A:\Set,~A→ \List~A→ \List~A \end{array} \right]} which corresponds to the result of the Coq declaration: .. coqtop:: in reset Inductive list (A:Set) : Set := | nil : list A | cons : A -> list A -> list A. .. example:: The declaration for a mutual inductive definition of tree and forest is: .. math:: \ind{0}{\left[\begin{array}{rcl}\tree&:&\Set\\\forest&:&\Set\end{array}\right]} {\left[\begin{array}{rcl} \node &:& \forest → \tree\\ \emptyf &:& \forest\\ \consf &:& \tree → \forest → \forest\\ \end{array}\right]} which corresponds to the result of the Coq declaration: .. coqtop:: in Inductive tree : Set := | node : forest -> tree with forest : Set := | emptyf : forest | consf : tree -> forest -> forest. .. example:: The declaration for a mutual inductive definition of even and odd is: .. math:: \ind{0}{\left[\begin{array}{rcl}\even&:&\nat → \Prop \\ \odd&:&\nat → \Prop \end{array}\right]} {\left[\begin{array}{rcl} \evenO &:& \even~0\\ \evenS &:& ∀ n,~\odd~n → \even~(\nS~n)\\ \oddS &:& ∀ n,~\even~n → \odd~(\nS~n) \end{array}\right]} which corresponds to the result of the Coq declaration: .. coqtop:: in Inductive even : nat -> Prop := | even_O : even 0 | even_S : forall n, odd n -> even (S n) with odd : nat -> Prop := | odd_S : forall n, even n -> odd (S n). .. _Types-of-inductive-objects: Types of inductive objects ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We have to give the type of constants in a global environment :math:`E` which contains an inductive definition. .. inference:: Ind \WFE{Γ} \ind{p}{Γ_I}{Γ_C} ∈ E (a:A)∈Γ_I --------------------- E[Γ] ⊢ a : A .. inference:: Constr \WFE{Γ} \ind{p}{Γ_I}{Γ_C} ∈ E (c:C)∈Γ_C --------------------- E[Γ] ⊢ c : C .. example:: Provided that our global environment :math:`E` contains inductive definitions we showed before, these two inference rules above enable us to conclude that: .. math:: \begin{array}{l} E[Γ] ⊢ \even : \nat→\Prop\\ E[Γ] ⊢ \odd : \nat→\Prop\\ E[Γ] ⊢ \evenO : \even~\nO\\ E[Γ] ⊢ \evenS : ∀ n:\nat,~\odd~n → \even~(\nS~n)\\ E[Γ] ⊢ \oddS : ∀ n:\nat,~\even~n → \odd~(\nS~n) \end{array} .. _Well-formed-inductive-definitions: Well-formed inductive definitions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We cannot accept any inductive definition because some of them lead to inconsistent systems. We restrict ourselves to definitions which satisfy a syntactic criterion of positivity. Before giving the formal rules, we need a few definitions: Arity of a given sort +++++++++++++++++++++ A type :math:`T` is an *arity of sort* :math:`s` if it converts to the sort :math:`s` or to a product :math:`∀ x:T,~U` with :math:`U` an arity of sort :math:`s`. .. example:: :math:`A→\Set` is an arity of sort :math:`\Set`. :math:`∀ A:\Prop,~A→ \Prop` is an arity of sort :math:`\Prop`. Arity +++++ A type :math:`T` is an *arity* if there is a :math:`s∈ \Sort` such that :math:`T` is an arity of sort :math:`s`. .. example:: :math:`A→ \Set` and :math:`∀ A:\Prop,~A→ \Prop` are arities. .. Convention in describing inductive types: k is the number of inductive types (I_i : forall params, A_i) n is the number of constructors in the whole block (c_i : forall params, C_i) r is the number of parameters l is the size of the context of parameters (p_i : P_i) m is the number of recursively non-uniform parameters among parameters s is the number of indices q = r+s is the number of parameters and indices Type of constructor +++++++++++++++++++ We say that :math:`T` is a *type of constructor of* :math:`I` in one of the following two cases: + :math:`T` is :math:`(I~t_1 … t_q )` + :math:`T` is :math:`∀ x:U,~T'` where :math:`T'` is also a type of constructor of :math:`I` .. example:: :math:`\nat` and :math:`\nat→\nat` are types of constructor of :math:`\nat`. :math:`∀ A:\Type,~\List~A` and :math:`∀ A:\Type,~A→\List~A→\List~A` are types of constructor of :math:`\List`. .. _positivity: Positivity Condition ++++++++++++++++++++ The type of constructor :math:`T` will be said to *satisfy the positivity condition* for a set of constants :math:`X_1 … X_k` in the following cases: + :math:`T=(X_j~t_1 … t_q )` for some :math:`j` and no :math:`X_1 … X_k` occur free in any :math:`t_i` + :math:`T=∀ x:U,~V` and :math:`X_1 … X_k` occur only strictly positively in :math:`U` and the type :math:`V` satisfies the positivity condition for :math:`X_1 … X_k`. Strict positivity +++++++++++++++++ The constants :math:`X_1 … X_k` *occur strictly positively* in :math:`T` in the following cases: + no :math:`X_1 … X_k` occur in :math:`T` + :math:`T` converts to :math:`(X_j~t_1 … t_q )` for some :math:`j` and no :math:`X_1 … X_k` occur in any of :math:`t_i` + :math:`T` converts to :math:`∀ x:U,~V` and :math:`X_1 … X_k` occur strictly positively in type :math:`V` but none of them occur in :math:`U` + :math:`T` converts to :math:`(I~a_1 … a_r~t_1 … t_s )` where :math:`I` is the name of an inductive definition of the form .. math:: \ind{r}{I:A}{c_1 :∀ p_1 :P_1 ,… ∀p_r :P_r ,~C_1 ;~…;~c_n :∀ p_1 :P_1 ,… ∀p_r :P_r ,~C_n} (in particular, it is not mutually defined and it has :math:`r` parameters) and no :math:`X_1 … X_k` occur in any of the :math:`t_i` nor in any of the :math:`a_j` for :math:`m < j ≤ r` where :math:`m ≤ r` is the number of recursively uniform parameters, and the (instantiated) types of constructor :math:`\subst{C_i}{p_j}{a_j}_{j=1… m}` of :math:`I` satisfy the nested positivity condition for :math:`X_1 … X_k` Nested Positivity +++++++++++++++++ If :math:`I` is a non-mutual inductive type with :math:`r` parameters, then, the type of constructor :math:`T` of :math:`I` *satisfies the nested positivity condition* for a set of constants :math:`X_1 … X_k` in the following cases: + :math:`T=(I~b_1 … b_r~u_1 … u_s)` and no :math:`X_1 … X_k` occur in any :math:`u_i` nor in any of the :math:`b_j` for :math:`m < j ≤ r` where :math:`m ≤ r` is the number of recursively uniform parameters + :math:`T=∀ x:U,~V` and :math:`X_1 … X_k` occur only strictly positively in :math:`U` and the type :math:`V` satisfies the nested positivity condition for :math:`X_1 … X_k` .. example:: For instance, if one considers the following variant of a tree type branching over the natural numbers: .. coqtop:: in Inductive nattree (A:Type) : Type := | leaf : nattree A | natnode : A -> (nat -> nattree A) -> nattree A. Then every instantiated constructor of ``nattree A`` satisfies the nested positivity condition for ``nattree``: + Type ``nattree A`` of constructor ``leaf`` satisfies the positivity condition for ``nattree`` because ``nattree`` does not appear in any (real) arguments of the type of that constructor (primarily because ``nattree`` does not have any (real) arguments) ... (bullet 1) + Type ``A → (nat → nattree A) → nattree A`` of constructor ``natnode`` satisfies the positivity condition for ``nattree`` because: - ``nattree`` occurs only strictly positively in ``A`` ... (bullet 1) - ``nattree`` occurs only strictly positively in ``nat → nattree A`` ... (bullet 3 + 2) - ``nattree`` satisfies the positivity condition for ``nattree A`` ... (bullet 1) .. _Correctness-rules: Correctness rules +++++++++++++++++ We shall now describe the rules allowing the introduction of a new inductive definition. Let :math:`E` be a global environment and :math:`Γ_P`, :math:`Γ_I`, :math:`Γ_C` be contexts such that :math:`Γ_I` is :math:`[I_1 :∀ Γ_P ,A_1 ;~…;~I_k :∀ Γ_P ,A_k]`, and :math:`Γ_C` is :math:`[c_1:∀ Γ_P ,C_1 ;~…;~c_n :∀ Γ_P ,C_n ]`. Then .. inference:: W-Ind \WFE{Γ_P} (E[Γ_I ;Γ_P ] ⊢ C_i : s_{q_i} )_{i=1… n} ------------------------------------------ \WF{E;~\ind{l}{Γ_I}{Γ_C}}{} provided that the following side conditions hold: + :math:`k>0` and all of :math:`I_j` and :math:`c_i` are distinct names for :math:`j=1… k` and :math:`i=1… n`, + :math:`l` is the size of :math:`Γ_P` which is called the context of parameters, + for :math:`j=1… k` we have that :math:`A_j` is an arity of sort :math:`s_j` and :math:`I_j ∉ E`, + for :math:`i=1… n` we have that :math:`C_i` is a type of constructor of :math:`I_{q_i}` which satisfies the positivity condition for :math:`I_1 … I_k` and :math:`c_i ∉ E`. One can remark that there is a constraint between the sort of the arity of the inductive type and the sort of the type of its constructors which will always be satisfied for the impredicative sorts :math:`\SProp` and :math:`\Prop` but may fail to define inductive type on sort :math:`\Set` and generate constraints between universes for inductive types in the Type hierarchy. .. example:: It is well known that the existential quantifier can be encoded as an inductive definition. The following declaration introduces the second-order existential quantifier :math:`∃ X.P(X)`. .. coqtop:: in Inductive exProp (P:Prop->Prop) : Prop := | exP_intro : forall X:Prop, P X -> exProp P. The same definition on :math:`\Set` is not allowed and fails: .. coqtop:: all Fail Inductive exSet (P:Set->Prop) : Set := exS_intro : forall X:Set, P X -> exSet P. It is possible to declare the same inductive definition in the universe :math:`\Type`. The :g:`exType` inductive definition has type :math:`(\Type(i)→\Prop)→\Type(j)` with the constraint that the parameter :math:`X` of :math:`\kw{exT}_{\kw{intro}}` has type :math:`\Type(k)` with :math:`kProp) : Type := exT_intro : forall X:Type, P X -> exType P. .. example:: Negative occurrence (first example) The following inductive definition is rejected because it does not satisfy the positivity condition: .. coqtop:: all Fail Inductive I : Prop := not_I_I (not_I : I -> False) : I. If we were to accept such definition, we could derive a contradiction from it (we can test this by disabling the :flag:`Positivity Checking` flag): .. coqtop:: none #[bypass_check(positivity)] Inductive I : Prop := not_I_I (not_I : I -> False) : I. .. coqtop:: all Definition I_not_I : I -> ~ I := fun i => match i with not_I_I not_I => not_I end. .. coqtop:: in Lemma contradiction : False. Proof. enough (I /\ ~ I) as [] by contradiction. split. - apply not_I_I. intro. now apply I_not_I. - intro. now apply I_not_I. Qed. .. example:: Negative occurrence (second example) Here is another example of an inductive definition which is rejected because it does not satify the positivity condition: .. coqtop:: all Fail Inductive Lam := lam (_ : Lam -> Lam). Again, if we were to accept it, we could derive a contradiction (this time through a non-terminating recursive function): .. coqtop:: none #[bypass_check(positivity)] Inductive Lam := lam (_ : Lam -> Lam). .. coqtop:: all Fixpoint infinite_loop l : False := match l with lam x => infinite_loop (x l) end. Check infinite_loop (lam (@id Lam)) : False. .. example:: Non strictly positive occurrence It is less obvious why inductive type definitions with occurences that are positive but not strictly positive are harmful. We will see that in presence of an impredicative type they are unsound: .. coqtop:: all Fail Inductive A: Type := introA: ((A -> Prop) -> Prop) -> A. If we were to accept this definition we could derive a contradiction by creating an injective function from :math:`A → \Prop` to :math:`A`. This function is defined by composing the injective constructor of the type :math:`A` with the function :math:`λx. λz. z = x` injecting any type :math:`T` into :math:`T → \Prop`. .. coqtop:: none #[bypass_check(positivity)] Inductive A: Type := introA: ((A -> Prop) -> Prop) -> A. .. coqtop:: all Definition f (x: A -> Prop): A := introA (fun z => z = x). .. coqtop:: in Lemma f_inj: forall x y, f x = f y -> x = y. Proof. unfold f; intros ? ? H; injection H. set (F := fun z => z = y); intro HF. symmetry; replace (y = x) with (F y). + unfold F; reflexivity. + rewrite <- HF; reflexivity. Qed. The type :math:`A → \Prop` can be understood as the powerset of the type :math:`A`. To derive a contradiction from the injective function :math:`f` we use Cantor's classic diagonal argument. .. coqtop:: all Definition d: A -> Prop := fun x => exists s, x = f s /\ ~s x. Definition fd: A := f d. .. coqtop:: in Lemma cantor: (d fd) <-> ~(d fd). Proof. split. + intros [s [H1 H2]]; unfold fd in H1. replace d with s. * assumption. * apply f_inj; congruence. + intro; exists d; tauto. Qed. Lemma bad: False. Proof. pose cantor; tauto. Qed. This derivation was first presented by Thierry Coquand and Christine Paulin in :cite:`CP90`. .. _Template-polymorphism: Template polymorphism +++++++++++++++++++++ Inductive types can be made polymorphic over the universes introduced by their parameters in :math:`\Type`, if the minimal inferred sort of the inductive declarations either mention some of those parameter universes or is computed to be :math:`\Prop` or :math:`\Set`. If :math:`A` is an arity of some sort and :math:`s` is a sort, we write :math:`A_{/s}` for the arity obtained from :math:`A` by replacing its sort with :math:`s`. Especially, if :math:`A` is well-typed in some global environment and local context, then :math:`A_{/s}` is typable by typability of all products in the Calculus of Inductive Constructions. The following typing rule is added to the theory. Let :math:`\ind{p}{Γ_I}{Γ_C}` be an inductive definition. Let :math:`Γ_P = [p_1 :P_1 ;~…;~p_p :P_p ]` be its context of parameters, :math:`Γ_I = [I_1:∀ Γ_P ,A_1 ;~…;~I_k :∀ Γ_P ,A_k ]` its context of definitions and :math:`Γ_C = [c_1 :∀ Γ_P ,C_1 ;~…;~c_n :∀ Γ_P ,C_n]` its context of constructors, with :math:`c_i` a constructor of :math:`I_{q_i}`. Let :math:`m ≤ p` be the length of the longest prefix of parameters such that the :math:`m` first arguments of all occurrences of all :math:`I_j` in all :math:`C_k` (even the occurrences in the hypotheses of :math:`C_k`) are exactly applied to :math:`p_1 … p_m` (:math:`m` is the number of *recursively uniform parameters* and the :math:`p−m` remaining parameters are the *recursively non-uniform parameters*). Let :math:`q_1 , …, q_r`, with :math:`0≤ r≤ m`, be a (possibly) partial instantiation of the recursively uniform parameters of :math:`Γ_P`. We have: .. inference:: Ind-Family \left\{\begin{array}{l} \ind{p}{Γ_I}{Γ_C} \in E\\ (E[] ⊢ q_l : P'_l)_{l=1\ldots r}\\ (E[] ⊢ P'_l ≤_{βδιζη} \subst{P_l}{p_u}{q_u}_{u=1\ldots l-1})_{l=1\ldots r}\\ 1 \leq j \leq k \end{array} \right. ----------------------------- E[] ⊢ I_j~q_1 … q_r :∀ [p_{r+1} :P_{r+1} ;~…;~p_p :P_p], (A_j)_{/s_j} provided that the following side conditions hold: + :math:`Γ_{P′}` is the context obtained from :math:`Γ_P` by replacing each :math:`P_l` that is an arity with :math:`P_l'` for :math:`1≤ l ≤ r` (notice that :math:`P_l` arity implies :math:`P_l'` arity since :math:`E[] ⊢ P_l' ≤_{βδιζη} \subst{P_l}{p_u}{q_u}_{u=1\ldots l-1}`); + there are sorts :math:`s_i`, for :math:`1 ≤ i ≤ k` such that, for :math:`Γ_{I'} = [I_1 :∀ Γ_{P'} ,(A_1)_{/s_1} ;~…;~I_k :∀ Γ_{P'} ,(A_k)_{/s_k}]` we have :math:`(E[Γ_{I′} ;Γ_{P′}] ⊢ C_i : s_{q_i})_{i=1… n}` ; + the sorts :math:`s_i` are all introduced by the inductive declaration and have no universe constraints beside being greater than or equal to :math:`\Prop`, and such that all eliminations, to :math:`\Prop`, :math:`\Set` and :math:`\Type(j)`, are allowed (see Section :ref:`Destructors`). Notice that if :math:`I_j~q_1 … q_r` is typable using the rules **Ind-Const** and **App**, then it is typable using the rule **Ind-Family**. Conversely, the extended theory is not stronger than the theory without **Ind-Family**. We get an equiconsistency result by mapping each :math:`\ind{p}{Γ_I}{Γ_C}` occurring into a given derivation into as many different inductive types and constructors as the number of different (partial) replacements of sorts, needed for this derivation, in the parameters that are arities (this is possible because :math:`\ind{p}{Γ_I}{Γ_C}` well-formed implies that :math:`\ind{p}{Γ_{I'}}{Γ_{C'}}` is well-formed and has the same allowed eliminations, where :math:`Γ_{I′}` is defined as above and :math:`Γ_{C′} = [c_1 :∀ Γ_{P′} ,C_1 ;~…;~c_n :∀ Γ_{P′} ,C_n ]`). That is, the changes in the types of each partial instance :math:`q_1 … q_r` can be characterized by the ordered sets of arity sorts among the types of parameters, and to each signature is associated a new inductive definition with fresh names. Conversion is preserved as any (partial) instance :math:`I_j~q_1 … q_r` or :math:`C_i~q_1 … q_r` is mapped to the names chosen in the specific instance of :math:`\ind{p}{Γ_I}{Γ_C}`. .. warning:: The restriction that sorts are introduced by the inductive declaration prevents inductive types declared in sections to be template-polymorphic on universes introduced previously in the section: they cannot parameterize over the universes introduced with section variables that become parameters at section closing time, as these may be shared with other definitions from the same section which can impose constraints on them. .. flag:: Auto Template Polymorphism This :term:`flag`, enabled by default, makes every inductive type declared at level :math:`\Type` (without annotations or hiding it behind a definition) template polymorphic if possible. This can be prevented using the :attr:`universes(template=no) ` attribute. Template polymorphism and full universe polymorphism (see Chapter :ref:`polymorphicuniverses`) are incompatible, so if the latter is enabled (through the :flag:`Universe Polymorphism` flag or the :attr:`universes(polymorphic)` attribute) it will prevail over automatic template polymorphism. .. warn:: Automatically declaring @ident as template polymorphic. Warning ``auto-template`` can be used (it is off by default) to find which types are implicitly declared template polymorphic by :flag:`Auto Template Polymorphism`. An inductive type can be forced to be template polymorphic using the :attr:`universes(template)` attribute: in this case, the warning is not emitted. .. attr:: universes(template{? = {| yes | no } }) :name: universes(template) This :term:`boolean attribute` can be used to explicitly declare an inductive type as template polymorphic, whether the :flag:`Auto Template Polymorphism` flag is on or off. .. exn:: template and polymorphism not compatible This attribute cannot be used in a full universe polymorphic context, i.e. if the :flag:`Universe Polymorphism` flag is on or if the :attr:`universes(polymorphic)` attribute is used. .. exn:: Ill-formed template inductive declaration: not polymorphic on any universe. The attribute was used but the inductive definition does not satisfy the criterion to be template polymorphic. When ``universes(template=no)`` is used, it will prevent an inductive type to be template polymorphic, even if the :flag:`Auto Template Polymorphism` flag is on. In practice, the rule **Ind-Family** is used by Coq only when all the inductive types of the inductive definition are declared with an arity whose sort is in the Type hierarchy. Then, the polymorphism is over the parameters whose type is an arity of sort in the Type hierarchy. The sorts :math:`s_j` are chosen canonically so that each :math:`s_j` is minimal with respect to the hierarchy :math:`\Prop ⊂ \Set_p ⊂ \Type` where :math:`\Set_p` is predicative :math:`\Set`. More precisely, an empty or small singleton inductive definition (i.e. an inductive definition of which all inductive types are singleton – see Section :ref:`Destructors`) is set in :math:`\Prop`, a small non-singleton inductive type is set in :math:`\Set` (even in case :math:`\Set` is impredicative – see :ref:`The-Calculus-of-Inductive-Construction-with-impredicative-Set`), and otherwise in the Type hierarchy. Note that the side-condition about allowed elimination sorts in the rule **Ind-Family** avoids to recompute the allowed elimination sorts at each instance of a pattern matching (see Section :ref:`Destructors`). As an example, let us consider the following definition: .. example:: .. coqtop:: in Inductive option (A:Type) : Type := | None : option A | Some : A -> option A. As the definition is set in the Type hierarchy, it is used polymorphically over its parameters whose types are arities of a sort in the Type hierarchy. Here, the parameter :math:`A` has this property, hence, if :g:`option` is applied to a type in :math:`\Set`, the result is in :math:`\Set`. Note that if :g:`option` is applied to a type in :math:`\Prop`, then, the result is not set in :math:`\Prop` but in :math:`\Set` still. This is because :g:`option` is not a singleton type (see Section :ref:`Destructors`) and it would lose the elimination to :math:`\Set` and :math:`\Type` if set in :math:`\Prop`. .. example:: .. coqtop:: all Check (fun A:Set => option A). Check (fun A:Prop => option A). Here is another example. .. example:: .. coqtop:: in Inductive prod (A B:Type) : Type := pair : A -> B -> prod A B. As :g:`prod` is a singleton type, it will be in :math:`\Prop` if applied twice to propositions, in :math:`\Set` if applied twice to at least one type in :math:`\Set` and none in :math:`\Type`, and in :math:`\Type` otherwise. In all cases, the three kind of eliminations schemes are allowed. .. example:: .. coqtop:: all Check (fun A:Set => prod A). Check (fun A:Prop => prod A A). Check (fun (A:Prop) (B:Set) => prod A B). Check (fun (A:Type) (B:Prop) => prod A B). .. note:: Template polymorphism used to be called “sort-polymorphism of inductive types” before universe polymorphism (see Chapter :ref:`polymorphicuniverses`) was introduced. .. _Destructors: Destructors ~~~~~~~~~~~~~~~~~ The specification of inductive definitions with arities and constructors is quite natural. But we still have to say how to use an object in an inductive type. This problem is rather delicate. There are actually several different ways to do that. Some of them are logically equivalent but not always equivalent from the computational point of view or from the user point of view. From the computational point of view, we want to be able to define a function whose domain is an inductively defined type by using a combination of case analysis over the possible constructors of the object and recursion. Because we need to keep a consistent theory and also we prefer to keep a strongly normalizing reduction, we cannot accept any sort of recursion (even terminating). So the basic idea is to restrict ourselves to primitive recursive functions and functionals. For instance, assuming a parameter :math:`A:\Set` exists in the local context, we want to build a function :math:`\length` of type :math:`\List~A → \nat` which computes the length of the list, such that :math:`(\length~(\Nil~A)) = \nO` and :math:`(\length~(\cons~A~a~l)) = (\nS~(\length~l))`. We want these equalities to be recognized implicitly and taken into account in the conversion rule. From the logical point of view, we have built a type family by giving a set of constructors. We want to capture the fact that we do not have any other way to build an object in this type. So when trying to prove a property about an object :math:`m` in an inductive type it is enough to enumerate all the cases where :math:`m` starts with a different constructor. In case the inductive definition is effectively a recursive one, we want to capture the extra property that we have built the smallest fixed point of this recursive equation. This says that we are only manipulating finite objects. This analysis provides induction principles. For instance, in order to prove :math:`∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l))` it is enough to prove: + :math:`(\kw{has}\_\kw{length}~A~(\Nil~A)~(\length~(\Nil~A)))` + :math:`∀ a:A,~∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l)) →` :math:`(\kw{has}\_\kw{length}~A~(\cons~A~a~l)~(\length~(\cons~A~a~l)))` which given the conversion equalities satisfied by :math:`\length` is the same as proving: + :math:`(\kw{has}\_\kw{length}~A~(\Nil~A)~\nO)` + :math:`∀ a:A,~∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l)) →` :math:`(\kw{has}\_\kw{length}~A~(\cons~A~a~l)~(\nS~(\length~l)))` One conceptually simple way to do that, following the basic scheme proposed by Martin-Löf in his Intuitionistic Type Theory, is to introduce for each inductive definition an elimination operator. At the logical level it is a proof of the usual induction principle and at the computational level it implements a generic operator for doing primitive recursion over the structure. But this operator is rather tedious to implement and use. We choose in this version of Coq to factorize the operator for primitive recursion into two more primitive operations as was first suggested by Th. Coquand in :cite:`Coq92`. One is the definition by pattern matching. The second one is a definition by guarded fixpoints. .. _match-construction: The match ... with ... end construction +++++++++++++++++++++++++++++++++++++++ The basic idea of this operator is that we have an object :math:`m` in an inductive type :math:`I` and we want to prove a property which possibly depends on :math:`m`. For this, it is enough to prove the property for :math:`m = (c_i~u_1 … u_{p_i} )` for each constructor of :math:`I`. The Coq term for this proof will be written: .. math:: \Match~m~\with~(c_1~x_{11} ... x_{1p_1} ) ⇒ f_1 | … | (c_n~x_{n1} ... x_{np_n} ) ⇒ f_n~\kwend In this expression, if :math:`m` eventually happens to evaluate to :math:`(c_i~u_1 … u_{p_i})` then the expression will behave as specified in its :math:`i`-th branch and it will reduce to :math:`f_i` where the :math:`x_{i1} …x_{ip_i}` are replaced by the :math:`u_1 … u_{p_i}` according to the ι-reduction. Actually, for type checking a :math:`\Match…\with…\kwend` expression we also need to know the predicate :math:`P` to be proved by case analysis. In the general case where :math:`I` is an inductively defined :math:`n`-ary relation, :math:`P` is a predicate over :math:`n+1` arguments: the :math:`n` first ones correspond to the arguments of :math:`I` (parameters excluded), and the last one corresponds to object :math:`m`. Coq can sometimes infer this predicate but sometimes not. The concrete syntax for describing this predicate uses the :math:`\as…\In…\return` construction. For instance, let us assume that :math:`I` is an unary predicate with one parameter and one argument. The predicate is made explicit using the syntax: .. math:: \Match~m~\as~x~\In~I~\_~a~\return~P~\with~ (c_1~x_{11} ... x_{1p_1} ) ⇒ f_1 | … | (c_n~x_{n1} ... x_{np_n} ) ⇒ f_n~\kwend The :math:`\as` part can be omitted if either the result type does not depend on :math:`m` (non-dependent elimination) or :math:`m` is a variable (in this case, :math:`m` can occur in :math:`P` where it is considered a bound variable). The :math:`\In` part can be omitted if the result type does not depend on the arguments of :math:`I`. Note that the arguments of :math:`I` corresponding to parameters *must* be :math:`\_`, because the result type is not generalized to all possible values of the parameters. The other arguments of :math:`I` (sometimes called indices in the literature) have to be variables (:math:`a` above) and these variables can occur in :math:`P`. The expression after :math:`\In` must be seen as an *inductive type pattern*. Notice that expansion of implicit arguments and notations apply to this pattern. For the purpose of presenting the inference rules, we use a more compact notation: .. math:: \case(m,(λ a x . P), λ x_{11} ... x_{1p_1} . f_1~| … |~λ x_{n1} ...x_{np_n} . f_n ) .. _Allowed-elimination-sorts: **Allowed elimination sorts.** An important question for building the typing rule for :math:`\Match` is what can be the type of :math:`λ a x . P` with respect to the type of :math:`m`. If :math:`m:I` and :math:`I:A` and :math:`λ a x . P : B` then by :math:`[I:A|B]` we mean that one can use :math:`λ a x . P` with :math:`m` in the above match-construct. .. _cic_notations: **Notations.** The :math:`[I:A|B]` is defined as the smallest relation satisfying the following rules: We write :math:`[I|B]` for :math:`[I:A|B]` where :math:`A` is the type of :math:`I`. The case of inductive types in sorts :math:`\Set` or :math:`\Type` is simple. There is no restriction on the sort of the predicate to be eliminated. .. inference:: Prod [(I~x):A′|B′] ----------------------- [I:∀ x:A,~A′|∀ x:A,~B′] .. inference:: Set & Type s_1 ∈ \{\Set,\Type(j)\} s_2 ∈ \Sort ---------------- [I:s_1 |I→ s_2 ] The case of Inductive definitions of sort :math:`\Prop` is a bit more complicated, because of our interpretation of this sort. The only harmless allowed eliminations, are the ones when predicate :math:`P` is also of sort :math:`\Prop` or is of the morally smaller sort :math:`\SProp`. .. inference:: Prop s ∈ \{\SProp,\Prop\} -------------------- [I:\Prop|I→s] :math:`\Prop` is the type of logical propositions, the proofs of properties :math:`P` in :math:`\Prop` could not be used for computation and are consequently ignored by the extraction mechanism. Assume :math:`A` and :math:`B` are two propositions, and the logical disjunction :math:`A ∨ B` is defined inductively by: .. example:: .. coqtop:: in Inductive or (A B:Prop) : Prop := or_introl : A -> or A B | or_intror : B -> or A B. The following definition which computes a boolean value by case over the proof of :g:`or A B` is not accepted: .. example:: .. coqtop:: all Fail Definition choice (A B: Prop) (x:or A B) := match x with or_introl _ _ a => true | or_intror _ _ b => false end. From the computational point of view, the structure of the proof of :g:`(or A B)` in this term is needed for computing the boolean value. In general, if :math:`I` has type :math:`\Prop` then :math:`P` cannot have type :math:`I→\Set`, because it will mean to build an informative proof of type :math:`(P~m)` doing a case analysis over a non-computational object that will disappear in the extracted program. But the other way is safe with respect to our interpretation we can have :math:`I` a computational object and :math:`P` a non-computational one, it just corresponds to proving a logical property of a computational object. In the same spirit, elimination on :math:`P` of type :math:`I→\Type` cannot be allowed because it trivially implies the elimination on :math:`P` of type :math:`I→ \Set` by cumulativity. It also implies that there are two proofs of the same property which are provably different, contradicting the proof-irrelevance property which is sometimes a useful axiom: .. example:: .. coqtop:: all Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. The elimination of an inductive type of sort :math:`\Prop` on a predicate :math:`P` of type :math:`I→ \Type` leads to a paradox when applied to impredicative inductive definition like the second-order existential quantifier :g:`exProp` defined above, because it gives access to the two projections on this type. .. _Empty-and-singleton-elimination: **Empty and singleton elimination.** There are special inductive definitions in :math:`\Prop` for which more eliminations are allowed. .. inference:: Prop-extended I~\kw{is an empty or singleton definition} s ∈ \Sort ------------------------------------- [I:\Prop|I→ s] A *singleton definition* has only one constructor and all the arguments of this constructor have type :math:`\Prop`. In that case, there is a canonical way to interpret the informative extraction on an object in that type, such that the elimination on any sort :math:`s` is legal. Typical examples are the conjunction of non-informative propositions and the equality. If there is a hypothesis :math:`h:a=b` in the local context, it can be used for rewriting not only in logical propositions but also in any type. .. example:: .. coqtop:: all Print eq_rec. Require Extraction. Extraction eq_rec. An empty definition has no constructors, in that case also, elimination on any sort is allowed. .. _Eliminaton-for-SProp: Inductive types in :math:`\SProp` must have no constructors (i.e. be empty) to be eliminated to produce relevant values. Note that thanks to proof irrelevance elimination functions can be produced for other types, for instance the elimination for a unit type is the identity. .. _Type-of-branches: **Type of branches.** Let :math:`c` be a term of type :math:`C`, we assume :math:`C` is a type of constructor for an inductive type :math:`I`. Let :math:`P` be a term that represents the property to be proved. We assume :math:`r` is the number of parameters and :math:`s` is the number of arguments. We define a new type :math:`\{c:C\}^P` which represents the type of the branch corresponding to the :math:`c:C` constructor. .. math:: \begin{array}{ll} \{c:(I~q_1\ldots q_r\ t_1 \ldots t_s)\}^P &\equiv (P~t_1\ldots ~t_s~c) \\ \{c:∀ x:T,~C\}^P &\equiv ∀ x:T,~\{(c~x):C\}^P \end{array} We write :math:`\{c\}^P` for :math:`\{c:C\}^P` with :math:`C` the type of :math:`c`. .. example:: The following term in concrete syntax:: match t as l return P' with | nil _ => t1 | cons _ hd tl => t2 end can be represented in abstract syntax as .. math:: \case(t,P,f_1 | f_2 ) where .. math:: :nowrap: \begin{eqnarray*} P & = & λ l.~P^\prime\\ f_1 & = & t_1\\ f_2 & = & λ (hd:\nat).~λ (tl:\List~\nat).~t_2 \end{eqnarray*} According to the definition: .. math:: \{(\Nil~\nat)\}^P ≡ \{(\Nil~\nat) : (\List~\nat)\}^P ≡ (P~(\Nil~\nat)) .. math:: \begin{array}{rl} \{(\cons~\nat)\}^P & ≡\{(\cons~\nat) : (\nat→\List~\nat→\List~\nat)\}^P \\ & ≡∀ n:\nat,~\{(\cons~\nat~n) : (\List~\nat→\List~\nat)\}^P \\ & ≡∀ n:\nat,~∀ l:\List~\nat,~\{(\cons~\nat~n~l) : (\List~\nat)\}^P \\ & ≡∀ n:\nat,~∀ l:\List~\nat,~(P~(\cons~\nat~n~l)). \end{array} Given some :math:`P` then :math:`\{(\Nil~\nat)\}^P` represents the expected type of :math:`f_1`, and :math:`\{(\cons~\nat)\}^P` represents the expected type of :math:`f_2`. .. _Typing-rule: **Typing rule.** Our very general destructor for inductive definitions has the following typing rule .. inference:: match \begin{array}{l} E[Γ] ⊢ c : (I~q_1 … q_r~t_1 … t_s ) \\ E[Γ] ⊢ P : B \\ [(I~q_1 … q_r)|B] \\ (E[Γ] ⊢ f_i : \{(c_{p_i}~q_1 … q_r)\}^P)_{i=1… l} \end{array} ------------------------------------------------ E[Γ] ⊢ \case(c,P,f_1 |… |f_l ) : (P~t_1 … t_s~c) provided :math:`I` is an inductive type in a definition :math:`\ind{r}{Γ_I}{Γ_C}` with :math:`Γ_C = [c_1 :C_1 ;~…;~c_n :C_n ]` and :math:`c_{p_1} … c_{p_l}` are the only constructors of :math:`I`. .. example:: Below is a typing rule for the term shown in the previous example: .. inference:: list example \begin{array}{l} E[Γ] ⊢ t : (\List ~\nat) \\ E[Γ] ⊢ P : B \\ [(\List ~\nat)|B] \\ E[Γ] ⊢ f_1 : \{(\Nil ~\nat)\}^P \\ E[Γ] ⊢ f_2 : \{(\cons ~\nat)\}^P \end{array} ------------------------------------------------ E[Γ] ⊢ \case(t,P,f_1 |f_2 ) : (P~t) .. _Definition-of-ι-reduction: **Definition of ι-reduction.** We still have to define the ι-reduction in the general case. An ι-redex is a term of the following form: .. math:: \case((c_{p_i}~q_1 … q_r~a_1 … a_m ),P,f_1 |… |f_l ) with :math:`c_{p_i}` the :math:`i`-th constructor of the inductive type :math:`I` with :math:`r` parameters. The ι-contraction of this term is :math:`(f_i~a_1 … a_m )` leading to the general reduction rule: .. math:: \case((c_{p_i}~q_1 … q_r~a_1 … a_m ),P,f_1 |… |f_l ) \triangleright_ι (f_i~a_1 … a_m ) .. _Fixpoint-definitions: Fixpoint definitions ~~~~~~~~~~~~~~~~~~~~ The second operator for elimination is fixpoint definition. This fixpoint may involve several mutually recursive definitions. The basic concrete syntax for a recursive set of mutually recursive declarations is (with :math:`Γ_i` contexts): .. math:: \fix~f_1 (Γ_1 ) :A_1 :=t_1~\with … \with~f_n (Γ_n ) :A_n :=t_n The terms are obtained by projections from this set of declarations and are written .. math:: \fix~f_1 (Γ_1 ) :A_1 :=t_1~\with … \with~f_n (Γ_n ) :A_n :=t_n~\for~f_i In the inference rules, we represent such a term by .. math:: \Fix~f_i\{f_1 :A_1':=t_1' … f_n :A_n':=t_n'\} with :math:`t_i'` (resp. :math:`A_i'`) representing the term :math:`t_i` abstracted (resp. generalized) with respect to the bindings in the context :math:`Γ_i`, namely :math:`t_i'=λ Γ_i . t_i` and :math:`A_i'=∀ Γ_i , A_i`. Typing rule +++++++++++ The typing rule is the expected one for a fixpoint. .. inference:: Fix (E[Γ] ⊢ A_i : s_i )_{i=1… n} (E[Γ;~f_1 :A_1 ;~…;~f_n :A_n ] ⊢ t_i : A_i )_{i=1… n} ------------------------------------------------------- E[Γ] ⊢ \Fix~f_i\{f_1 :A_1 :=t_1 … f_n :A_n :=t_n \} : A_i Any fixpoint definition cannot be accepted because non-normalizing terms allow proofs of absurdity. The basic scheme of recursion that should be allowed is the one needed for defining primitive recursive functionals. In that case the fixpoint enjoys a special syntactic restriction, namely one of the arguments belongs to an inductive type, the function starts with a case analysis and recursive calls are done on variables coming from patterns and representing subterms. For instance in the case of natural numbers, a proof of the induction principle of type .. math:: ∀ P:\nat→\Prop,~(P~\nO)→(∀ n:\nat,~(P~n)→(P~(\nS~n)))→ ∀ n:\nat,~(P~n) can be represented by the term: .. math:: \begin{array}{l} λ P:\nat→\Prop.~λ f:(P~\nO).~λ g:(∀ n:\nat,~(P~n)→(P~(\nS~n))).\\ \Fix~h\{h:∀ n:\nat,~(P~n):=λ n:\nat.~\case(n,P,f | λp:\nat.~(g~p~(h~p)))\} \end{array} Before accepting a fixpoint definition as being correctly typed, we check that the definition is “guarded”. A precise analysis of this notion can be found in :cite:`Gim94`. The first stage is to precise on which argument the fixpoint will be decreasing. The type of this argument should be an inductive type. For doing this, the syntax of fixpoints is extended and becomes .. math:: \Fix~f_i\{f_1/k_1 :A_1:=t_1 … f_n/k_n :A_n:=t_n\} where :math:`k_i` are positive integers. Each :math:`k_i` represents the index of parameter of :math:`f_i`, on which :math:`f_i` is decreasing. Each :math:`A_i` should be a type (reducible to a term) starting with at least :math:`k_i` products :math:`∀ y_1 :B_1 ,~… ∀ y_{k_i} :B_{k_i} ,~A_i'` and :math:`B_{k_i}` an inductive type. Now in the definition :math:`t_i`, if :math:`f_j` occurs then it should be applied to at least :math:`k_j` arguments and the :math:`k_j`-th argument should be syntactically recognized as structurally smaller than :math:`y_{k_i}`. The definition of being structurally smaller is a bit technical. One needs first to define the notion of *recursive arguments of a constructor*. For an inductive definition :math:`\ind{r}{Γ_I}{Γ_C}`, if the type of a constructor :math:`c` has the form :math:`∀ p_1 :P_1 ,~… ∀ p_r :P_r,~∀ x_1:T_1,~… ∀ x_m :T_m,~(I_j~p_1 … p_r~t_1 … t_s )`, then the recursive arguments will correspond to :math:`T_i` in which one of the :math:`I_l` occurs. The main rules for being structurally smaller are the following. Given a variable :math:`y` of an inductively defined type in a declaration :math:`\ind{r}{Γ_I}{Γ_C}` where :math:`Γ_I` is :math:`[I_1 :A_1 ;~…;~I_k :A_k]`, and :math:`Γ_C` is :math:`[c_1 :C_1 ;~…;~c_n :C_n ]`, the terms structurally smaller than :math:`y` are: + :math:`(t~u)` and :math:`λ x:U .~t` when :math:`t` is structurally smaller than :math:`y`. + :math:`\case(c,P,f_1 … f_n)` when each :math:`f_i` is structurally smaller than :math:`y`. If :math:`c` is :math:`y` or is structurally smaller than :math:`y`, its type is an inductive type :math:`I_p` part of the inductive definition corresponding to :math:`y`. Each :math:`f_i` corresponds to a type of constructor :math:`C_q ≡ ∀ p_1 :P_1 ,~…,∀ p_r :P_r ,~∀ y_1 :B_1 ,~… ∀ y_m :B_m ,~(I_p~p_1 … p_r~t_1 … t_s )` and can consequently be written :math:`λ y_1 :B_1' .~… λ y_m :B_m'.~g_i`. (:math:`B_i'` is obtained from :math:`B_i` by substituting parameters for variables) the variables :math:`y_j` occurring in :math:`g_i` corresponding to recursive arguments :math:`B_i` (the ones in which one of the :math:`I_l` occurs) are structurally smaller than :math:`y`. The following definitions are correct, we enter them using the :cmd:`Fixpoint` command and show the internal representation. .. example:: .. coqtop:: all Fixpoint plus (n m:nat) {struct n} : nat := match n with | O => m | S p => S (plus p m) end. Print plus. Fixpoint lgth (A:Set) (l:list A) {struct l} : nat := match l with | nil _ => O | cons _ a l' => S (lgth A l') end. Print lgth. Fixpoint sizet (t:tree) : nat := let (f) := t in S (sizef f) with sizef (f:forest) : nat := match f with | emptyf => O | consf t f => plus (sizet t) (sizef f) end. Print sizet. .. _Reduction-rule: Reduction rule ++++++++++++++ Let :math:`F` be the set of declarations: :math:`f_1 /k_1 :A_1 :=t_1 …f_n /k_n :A_n:=t_n`. The reduction for fixpoints is: .. math:: (\Fix~f_i \{F\}~a_1 …a_{k_i}) ~\triangleright_ι~ \subst{t_i}{f_k}{\Fix~f_k \{F\}}_{k=1… n} ~a_1 … a_{k_i} when :math:`a_{k_i}` starts with a constructor. This last restriction is needed in order to keep strong normalization and corresponds to the reduction for primitive recursive operators. The following reductions are now possible: .. math:: :nowrap: \begin{eqnarray*} \plus~(\nS~(\nS~\nO))~(\nS~\nO)~& \trii & \nS~(\plus~(\nS~\nO)~(\nS~\nO))\\ & \trii & \nS~(\nS~(\plus~\nO~(\nS~\nO)))\\ & \trii & \nS~(\nS~(\nS~\nO))\\ \end{eqnarray*} .. _Mutual-induction: **Mutual induction** The principles of mutual induction can be automatically generated using the Scheme command described in Section :ref:`proofschemes-induction-principles`. coq-8.15.0/doc/sphinx/language/core/modules.rst000066400000000000000000001056001417001151100213530ustar00rootroot00000000000000.. _themodulesystem: The Module System ================= The module system extends the Calculus of Inductive Constructions providing a convenient way to structure large developments as well as a means of massive abstraction. Modules and module types ---------------------------- **Access path.** An access path is denoted by :math:`p` and can be either a module variable :math:`X` or, if :math:`p′` is an access path and :math:`id` an identifier, then :math:`p′.id` is an access path. **Structure element.** A structure element is denoted by :math:`e` and is either a definition of a :term:`constant`, an assumption, a definition of an inductive, a definition of a module, an alias of a module or a module type abbreviation. **Structure expression.** A structure expression is denoted by :math:`S` and can be: + an access path :math:`p` + a plain structure :math:`\Struct~e ; … ; e~\End` + a functor :math:`\Functor(X:S)~S′`, where :math:`X` is a module variable, :math:`S` and :math:`S′` are structure expressions + an application :math:`S~p`, where :math:`S` is a structure expression and :math:`p` an access path + a refined structure :math:`S~\with~p := p`′ or :math:`S~\with~p := t:T` where :math:`S` is a structure expression, :math:`p` and :math:`p′` are access paths, :math:`t` is a term and :math:`T` is the type of :math:`t`. **Module definition.** A module definition is written :math:`\Mod{X}{S}{S'}` and consists of a module variable :math:`X`, a module type :math:`S` which can be any structure expression and optionally a module implementation :math:`S′` which can be any structure expression except a refined structure. **Module alias.** A module alias is written :math:`\ModA{X}{p}` and consists of a module variable :math:`X` and a module path :math:`p`. **Module type abbreviation.** A module type abbreviation is written :math:`\ModType{Y}{S}`, where :math:`Y` is an identifier and :math:`S` is any structure expression . .. extracted from Gallina extensions chapter Using modules ------------- The module system provides a way of packaging related elements together, as well as a means of massive abstraction. .. cmd:: Module {? {| Import | Export } } @ident {* @module_binder } {? @of_module_type } {? := {+<+ @module_expr_inl } } .. insertprodn module_binder module_expr_inl .. prodn:: module_binder ::= ( {? {| Import | Export } } {+ @ident } : @module_type_inl ) module_type_inl ::= ! @module_type | @module_type {? @functor_app_annot } functor_app_annot ::= [ inline at level @natural ] | [ no inline ] module_type ::= @qualid | ( @module_type ) | @module_type @module_expr_atom | @module_type with @with_declaration with_declaration ::= Definition @qualid {? @univ_decl } := @term | Module @qualid := @qualid module_expr_atom ::= @qualid | ( {+ @module_expr_atom } ) of_module_type ::= : @module_type_inl | {* <: @module_type_inl } module_expr_inl ::= ! {+ @module_expr_atom } | {+ @module_expr_atom } {? @functor_app_annot } Defines a module named :token:`ident`. See the examples :ref:`here`. The :n:`Import` and :n:`Export` flags specify whether the module should be automatically imported or exported. Specifying :n:`{* @module_binder }` starts a functor with parameters given by the :n:`@module_binder`\s. (A *functor* is a function from modules to modules.) :n:`@of_module_type` specifies the module type. :n:`{+ <: @module_type_inl }` starts a module that satisfies each :n:`@module_type_inl`. .. todo: would like to find a better term than "interactive", not very descriptive :n:`:= {+<+ @module_expr_inl }` specifies the body of a module or functor definition. If it's not specified, then the module is defined *interactively*, meaning that the module is defined as a series of commands terminated with :cmd:`End` instead of in a single :cmd:`Module` command. Interactively defining the :n:`@module_expr_inl`\s in a series of :cmd:`Include` commands is equivalent to giving them all in a single non-interactive :cmd:`Module` command. The ! prefix indicates that any assumption command (such as :cmd:`Axiom`) with an :n:`Inline` clause in the type of the functor arguments will be ignored. .. todo: What is an Inline directive? sb command but still unclear. Maybe referring to the "inline" in functor_app_annot? or assumption_token Inline assum_list? .. cmd:: Module Type @ident {* @module_binder } {* <: @module_type_inl } {? := {+<+ @module_type_inl } } Defines a module type named :n:`@ident`. See the example :ref:`here`. Specifying :n:`{* @module_binder }` starts a functor type with parameters given by the :n:`@module_binder`\s. :n:`:= {+<+ @module_type_inl }` specifies the body of a module or functor type definition. If it's not specified, then the module type is defined *interactively*, meaning that the module type is defined as a series of commands terminated with :cmd:`End` instead of in a single :cmd:`Module Type` command. Interactively defining the :n:`@module_type_inl`\s in a series of :cmd:`Include` commands is equivalent to giving them all in a single non-interactive :cmd:`Module Type` command. .. _terminating_module: **Terminating an interactive module or module type definition** Interactive modules are terminated with the :cmd:`End` command, which is also used to terminate :ref:`Sections`. :n:`End @ident` closes the interactive module or module type :token:`ident`. If the module type was given, the command verifies that the content of the module matches the module type. If the module is not a functor, its components (:term:`constants `, inductive types, submodules etc.) are now available through the dot notation. .. exn:: No such label @ident. :undocumented: .. exn:: Signature components for label @ident do not match. :undocumented: .. exn:: The field @ident is missing in @qualid. :undocumented: .. |br| raw:: html
.. note:: #. Interactive modules and module types can be nested. #. Interactive modules and module types can't be defined inside of :ref:`sections`. Sections can be defined inside of interactive modules and module types. #. Hints and notations (the :ref:`Hint ` and :cmd:`Notation` commands) can also appear inside interactive modules and module types. Note that with module definitions like: :n:`Module @ident__1 : @module_type := @ident__2.` or :n:`Module @ident__1 : @module_type.` |br| :n:`Include @ident__2.` |br| :n:`End @ident__1.` hints and the like valid for :n:`@ident__1` are the ones defined in :n:`@module_type` rather then those defined in :n:`@ident__2` (or the module body). #. Within an interactive module type definition, the :cmd:`Parameter` command declares a :term:`constant` instead of definining a new axiom (which it does when not in a module type definition). #. Assumptions such as :cmd:`Axiom` that include the :n:`Inline` clause will be automatically expanded when the functor is applied, except when the function application is prefixed by ``!``. .. cmd:: Include @module_type_inl {* <+ @module_expr_inl } Includes the content of module(s) in the current interactive module. Here :n:`@module_type_inl` can be a module expression or a module type expression. If it is a high-order module or module type expression then the system tries to instantiate :n:`@module_type_inl` with the current interactive module. Including multiple modules is a single :cmd:`Include` is equivalent to including each module in a separate :cmd:`Include` command. .. cmd:: Include Type {+<+ @module_type_inl } .. deprecated:: 8.3 Use :cmd:`Include` instead. .. cmd:: Declare Module {? {| Import | Export } } @ident {* @module_binder } : @module_type_inl Declares a module :token:`ident` of type :token:`module_type_inl`. If :n:`@module_binder`\s are specified, declares a functor with parameters given by the list of :token:`module_binder`\s. .. cmd:: Import {? @import_categories } {+ @filtered_import } .. insertprodn import_categories filtered_import .. prodn:: import_categories ::= {? - } ( {+, @qualid } ) filtered_import ::= @qualid {? ( {+, @qualid {? ( .. ) } } ) } If :token:`qualid` denotes a valid basic module (i.e. its module type is a signature), makes its components available by their short names. .. example:: .. coqtop:: reset in Module Mod. Definition T:=nat. Check T. End Mod. Check Mod.T. .. coqtop:: all Fail Check T. Import Mod. Check T. Some features defined in modules are activated only when a module is imported. This is for instance the case of notations (see :ref:`Notations`). Declarations made with the :attr:`local` attribute are never imported by the :cmd:`Import` command. Such declarations are only accessible through their fully qualified name. .. example:: .. coqtop:: in Module A. Module B. Local Definition T := nat. End B. End A. Import A. .. coqtop:: all fail Check B.T. Appending a module name with a parenthesized list of names will make only those names available with short names, not other names defined in the module nor will it activate other features. The names to import may be :term:`constants `, inductive types and constructors, and notation aliases (for instance, Ltac definitions cannot be selectively imported). If they are from an inner module to the one being imported, they must be prefixed by the inner path. The name of an inductive type may also be followed by ``(..)`` to import it, its constructors and its eliminators if they exist. For this purpose "eliminator" means a :term:`constant` in the same module whose name is the inductive type's name suffixed by one of ``_sind``, ``_ind``, ``_rec`` or ``_rect``. .. example:: .. coqtop:: reset in Module A. Module B. Inductive T := C. Definition U := nat. End B. Definition Z := Prop. End A. Import A(B.T(..), Z). .. coqtop:: all Check B.T. Check B.C. Check Z. Fail Check B.U. Check A.B.U. .. warn:: Cannot import local constant, it will be ignored. This warning is printed when a name in the list of names to import was declared as a local constant, and the name is not imported. Putting a list of :n:`@import_categories` after ``Import`` will restrict activation of features according to those categories. Currently supported categories are: - ``coercions`` corresponding to :cmd:`Coercion`. - ``hints`` corresponding to the `Hint` commands (e.g. :cmd:`Hint Resolve` or :cmd:`Hint Rewrite`) and :ref:`typeclass ` instances. - ``canonicals`` corresponding to :cmd:`Canonical Structure`. - ``notations`` corresponding to :cmd:`Notation` (including :cmd:`Reserved Notation`), scope controls (:cmd:`Delimit Scope`, :cmd:`Bind Scope`, :cmd:`Open Scope`) and :ref:`Abbreviations`. - ``ltac.notations`` corresponding to :cmd:`Tactic Notation`. - ``ltac2.notations`` corresponding to :cmd:`Ltac2 Notation` (including Ltac2 abbreviations). Plugins may define their own categories. .. cmd:: Export {? @import_categories } {+ @filtered_import } Similar to :cmd:`Import`, except that when the module containing this command is imported, the :n:`{+ @qualid }` are imported as well. The selective import syntax also works with Export. .. exn:: @qualid is not a module. :undocumented: .. warn:: Trying to mask the absolute name @qualid! :undocumented: .. cmd:: Print Module @qualid Prints the module type and (optionally) the body of the module :n:`@qualid`. .. cmd:: Print Module Type @qualid Prints the module type corresponding to :n:`@qualid`. .. flag:: Short Module Printing This :term:`flag` (off by default) disables the printing of the types of fields, leaving only their names, for the commands :cmd:`Print Module` and :cmd:`Print Module Type`. .. _module_examples: Examples ~~~~~~~~ .. example:: Defining a simple module interactively .. coqtop:: in Module M. Definition T := nat. Definition x := 0. .. coqtop:: all Definition y : bool. exact true. .. coqtop:: in Defined. End M. Inside a module one can define :term:`constants `, prove theorems and do anything else that can be done in the toplevel. Components of a closed module can be accessed using the dot notation: .. coqtop:: all Print M.x. .. _example_def_simple_module_type: .. example:: Defining a simple module type interactively .. coqtop:: in Module Type SIG. Parameter T : Set. Parameter x : T. End SIG. .. _example_filter_module: .. example:: Creating a new module that omits some items from an existing module Since :n:`SIG`, the type of the new module :n:`N`, doesn't define :n:`y` or give the body of :n:`x`, which are not included in :n:`N`. .. coqtop:: all Module N : SIG with Definition T := nat := M. Print N.T. Print N.x. Fail Print N.y. .. reset to remove N (undo in last coqtop block doesn't seem to do that), invisibly redefine M, SIG .. coqtop:: none reset Module M. Definition T := nat. Definition x := 0. Definition y : bool. exact true. Defined. End M. Module Type SIG. Parameter T : Set. Parameter x : T. End SIG. The definition of :g:`N` using the module type expression :g:`SIG` with :g:`Definition T := nat` is equivalent to the following one: .. coqtop:: in Module Type SIG'. Definition T : Set := nat. Parameter x : T. End SIG'. Module N : SIG' := M. If we just want to be sure that our implementation satisfies a given module type without restricting the interface, we can use a transparent constraint .. coqtop:: in Module P <: SIG := M. .. coqtop:: all Print P.y. .. example:: Creating a functor (a module with parameters) .. coqtop:: in Module Two (X Y: SIG). Definition T := (X.T * Y.T)%type. Definition x := (X.x, Y.x). End Two. and apply it to our modules and do some computations: .. coqtop:: in Module Q := Two M N. .. coqtop:: all Eval compute in (fst Q.x + snd Q.x). .. example:: A module type with two sub-modules, sharing some fields .. coqtop:: in Module Type SIG2. Declare Module M1 : SIG. Module M2 <: SIG. Definition T := M1.T. Parameter x : T. End M2. End SIG2. .. coqtop:: in Module Mod <: SIG2. Module M1. Definition T := nat. Definition x := 1. End M1. Module M2 := M. End Mod. Notice that ``M`` is a correct body for the component ``M2`` since its ``T`` component is ``nat`` as specified for ``M1.T``. Typing Modules ------------------ In order to introduce the typing system we first slightly extend the syntactic class of terms and environments given in section :ref:`The-terms`. The environments, apart from definitions of :term:`constants ` and inductive types now also hold any other structure elements. Terms, apart from variables, :term:`constants ` and complex terms, also include access paths. We also need additional typing judgments: + :math:`\WFT{E}{S}`, denoting that a structure :math:`S` is well-formed, + :math:`\WTM{E}{p}{S}`, denoting that the module pointed by :math:`p` has type :math:`S` in the global environment :math:`E`. + :math:`\WEV{E}{S}{\ovl{S}}`, denoting that a structure :math:`S` is evaluated to a structure :math:`\ovl{S}` in weak head normal form. + :math:`\WS{E}{S_1}{S_2}` , denoting that a structure :math:`S_1` is a subtype of a structure :math:`S_2`. + :math:`\WS{E}{e_1}{e_2}` , denoting that a structure element :math:`e_1` is more precise than a structure element :math:`e_2`. The rules for forming structures are the following: .. inference:: WF-STR \WF{E;E′}{} ------------------------ \WFT{E}{ \Struct~E′ ~\End} .. inference:: WF-FUN \WFT{E; \ModS{X}{S}}{ \ovl{S′} } -------------------------- \WFT{E}{ \Functor(X:S)~S′} Evaluation of structures to weak head normal form: .. inference:: WEVAL-APP \begin{array}{c} \WEV{E}{S}{\Functor(X:S_1 )~S_2}~~~~~\WEV{E}{S_1}{\ovl{S_1}} \\ \WTM{E}{p}{S_3}~~~~~ \WS{E}{S_3}{\ovl{S_1}} \end{array} -------------------------- \WEV{E}{S~p}{S_2 \{p/X,t_1 /p_1 .c_1 ,…,t_n /p_n.c_n \}} In the last rule, :math:`\{t_1 /p_1 .c_1 ,…,t_n /p_n .c_n \}` is the resulting substitution from the inlining mechanism. We substitute in :math:`S` the inlined fields :math:`p_i .c_i` from :math:`\ModS{X}{S_1 }` by the corresponding delta-reduced term :math:`t_i` in :math:`p`. .. inference:: WEVAL-WITH-MOD \begin{array}{c} E[] ⊢ S \lra \Struct~e_1 ;…;e_i ; \ModS{X}{S_1 };e_{i+2} ;… ;e_n ~\End \\ E;e_1 ;…;e_i [] ⊢ S_1 \lra \ovl{S_1} ~~~~~~ E[] ⊢ p : S_2 \\ E;e_1 ;…;e_i [] ⊢ S_2 <: \ovl{S_1} \end{array} ---------------------------------- \begin{array}{c} \WEV{E}{S~\with~X := p}{}\\ \Struct~e_1 ;…;e_i ; \ModA{X}{p};e_{i+2} \{p/X\} ;…;e_n \{p/X\} ~\End \end{array} .. inference:: WEVAL-WITH-MOD-REC \begin{array}{c} \WEV{E}{S}{\Struct~e_1 ;…;e_i ; \ModS{X_1}{S_1 };e_{i+2} ;… ;e_n ~\End} \\ \WEV{E;e_1 ;…;e_i }{S_1~\with~p := p_1}{\ovl{S_2}} \end{array} -------------------------- \begin{array}{c} \WEV{E}{S~\with~X_1.p := p_1}{} \\ \Struct~e_1 ;…;e_i ; \ModS{X}{\ovl{S_2}};e_{i+2} \{p_1 /X_1.p\} ;…;e_n \{p_1 /X_1.p\} ~\End \end{array} .. inference:: WEVAL-WITH-DEF \begin{array}{c} \WEV{E}{S}{\Struct~e_1 ;…;e_i ;\Assum{}{c}{T_1};e_{i+2} ;… ;e_n ~\End} \\ \WS{E;e_1 ;…;e_i }{\Def{}{c}{t}{T})}{\Assum{}{c}{T_1}} \end{array} -------------------------- \begin{array}{c} \WEV{E}{S~\with~c := t:T}{} \\ \Struct~e_1 ;…;e_i ;\Def{}{c}{t}{T};e_{i+2} ;… ;e_n ~\End \end{array} .. inference:: WEVAL-WITH-DEF-REC \begin{array}{c} \WEV{E}{S}{\Struct~e_1 ;…;e_i ; \ModS{X_1 }{S_1 };e_{i+2} ;… ;e_n ~\End} \\ \WEV{E;e_1 ;…;e_i }{S_1~\with~p := p_1}{\ovl{S_2}} \end{array} -------------------------- \begin{array}{c} \WEV{E}{S~\with~X_1.p := t:T}{} \\ \Struct~e_1 ;…;e_i ; \ModS{X}{\ovl{S_2} };e_{i+2} ;… ;e_n ~\End \end{array} .. inference:: WEVAL-PATH-MOD1 \begin{array}{c} \WEV{E}{p}{\Struct~e_1 ;…;e_i ; \Mod{X}{S}{S_1};e_{i+2} ;… ;e_n ~\End} \\ \WEV{E;e_1 ;…;e_i }{S}{\ovl{S}} \end{array} -------------------------- E[] ⊢ p.X \lra \ovl{S} .. inference:: WEVAL-PATH-MOD2 \WF{E}{} \Mod{X}{S}{S_1}∈ E \WEV{E}{S}{\ovl{S}} -------------------------- \WEV{E}{X}{\ovl{S}} .. inference:: WEVAL-PATH-ALIAS1 \begin{array}{c} \WEV{E}{p}{~\Struct~e_1 ;…;e_i ; \ModA{X}{p_1};e_{i+2} ;… ;e_n ~\End} \\ \WEV{E;e_1 ;…;e_i }{p_1}{\ovl{S}} \end{array} -------------------------- \WEV{E}{p.X}{\ovl{S}} .. inference:: WEVAL-PATH-ALIAS2 \WF{E}{} \ModA{X}{p_1 }∈ E \WEV{E}{p_1}{\ovl{S}} -------------------------- \WEV{E}{X}{\ovl{S}} .. inference:: WEVAL-PATH-TYPE1 \begin{array}{c} \WEV{E}{p}{~\Struct~e_1 ;…;e_i ; \ModType{Y}{S};e_{i+2} ;… ;e_n ~\End} \\ \WEV{E;e_1 ;…;e_i }{S}{\ovl{S}} \end{array} -------------------------- \WEV{E}{p.Y}{\ovl{S}} .. inference:: WEVAL-PATH-TYPE2 \WF{E}{} \ModType{Y}{S}∈ E \WEV{E}{S}{\ovl{S}} -------------------------- \WEV{E}{Y}{\ovl{S}} Rules for typing module: .. inference:: MT-EVAL \WEV{E}{p}{\ovl{S}} -------------------------- E[] ⊢ p : \ovl{S} .. inference:: MT-STR E[] ⊢ p : S -------------------------- E[] ⊢ p : S/p The last rule, called strengthening is used to make all module fields manifestly equal to themselves. The notation :math:`S/p` has the following meaning: + if :math:`S\lra~\Struct~e_1 ;…;e_n ~\End` then :math:`S/p=~\Struct~e_1 /p;…;e_n /p ~\End` where :math:`e/p` is defined as follows (note that opaque definitions are processed as assumptions): + :math:`\Def{}{c}{t}{T}/p = \Def{}{c}{t}{T}` + :math:`\Assum{}{c}{U}/p = \Def{}{c}{p.c}{U}` + :math:`\ModS{X}{S}/p = \ModA{X}{p.X}` + :math:`\ModA{X}{p′}/p = \ModA{X}{p′}` + :math:`\Ind{}{Γ_P}{Γ_C}{Γ_I}/p = \Indp{}{Γ_P}{Γ_C}{Γ_I}{p}` + :math:`\Indpstr{}{Γ_P}{Γ_C}{Γ_I}{p'}{p} = \Indp{}{Γ_P}{Γ_C}{Γ_I}{p'}` + if :math:`S \lra \Functor(X:S′)~S″` then :math:`S/p=S` The notation :math:`\Indp{}{Γ_P}{Γ_C}{Γ_I}{p}` denotes an inductive definition that is definitionally equal to the inductive definition in the module denoted by the path :math:`p`. All rules which have :math:`\Ind{}{Γ_P}{Γ_C}{Γ_I}` as premises are also valid for :math:`\Indp{}{Γ_P}{Γ_C}{Γ_I}{p}`. We give the formation rule for :math:`\Indp{}{Γ_P}{Γ_C}{Γ_I}{p}` below as well as the equality rules on inductive types and constructors. The module subtyping rules: .. inference:: MSUB-STR \begin{array}{c} \WS{E;e_1 ;…;e_n }{e_{σ(i)}}{e'_i ~\for~ i=1..m} \\ σ : \{1… m\} → \{1… n\} ~\injective \end{array} -------------------------- \WS{E}{\Struct~e_1 ;…;e_n ~\End}{~\Struct~e'_1 ;…;e'_m ~\End} .. inference:: MSUB-FUN \WS{E}{\ovl{S_1'}}{\ovl{S_1}} \WS{E; \ModS{X}{S_1'}}{\ovl{S_2}}{\ovl{S_2'}} -------------------------- E[] ⊢ \Functor(X:S_1 ) S_2 <: \Functor(X:S_1') S_2' Structure element subtyping rules: .. inference:: ASSUM-ASSUM E[] ⊢ T_1 ≤_{βδιζη} T_2 -------------------------- \WS{E}{\Assum{}{c}{T_1 }}{\Assum{}{c}{T_2 }} .. inference:: DEF-ASSUM E[] ⊢ T_1 ≤_{βδιζη} T_2 -------------------------- \WS{E}{\Def{}{c}{t}{T_1 }}{\Assum{}{c}{T_2 }} .. inference:: ASSUM-DEF E[] ⊢ T_1 ≤_{βδιζη} T_2 E[] ⊢ c =_{βδιζη} t_2 -------------------------- \WS{E}{\Assum{}{c}{T_1 }}{\Def{}{c}{t_2 }{T_2 }} .. inference:: DEF-DEF E[] ⊢ T_1 ≤_{βδιζη} T_2 E[] ⊢ t_1 =_{βδιζη} t_2 -------------------------- \WS{E}{\Def{}{c}{t_1 }{T_1 }}{\Def{}{c}{t_2 }{T_2 }} .. inference:: IND-IND E[] ⊢ Γ_P =_{βδιζη} Γ_P' E[Γ_P ] ⊢ Γ_C =_{βδιζη} Γ_C' E[Γ_P ;Γ_C ] ⊢ Γ_I =_{βδιζη} Γ_I' -------------------------- \WS{E}{\ind{Γ_P}{Γ_C}{Γ_I}}{\ind{Γ_P'}{Γ_C'}{Γ_I'}} .. inference:: INDP-IND E[] ⊢ Γ_P =_{βδιζη} Γ_P' E[Γ_P ] ⊢ Γ_C =_{βδιζη} Γ_C' E[Γ_P ;Γ_C ] ⊢ Γ_I =_{βδιζη} Γ_I' -------------------------- \WS{E}{\Indp{}{Γ_P}{Γ_C}{Γ_I}{p}}{\ind{Γ_P'}{Γ_C'}{Γ_I'}} .. inference:: INDP-INDP \begin{array}{c} E[] ⊢ Γ_P =_{βδιζη} Γ_P' E[Γ_P ] ⊢ Γ_C =_{βδιζη} Γ_C' \\ E[Γ_P ;Γ_C ] ⊢ Γ_I =_{βδιζη} Γ_I' E[] ⊢ p =_{βδιζη} p' \end{array} -------------------------- \WS{E}{\Indp{}{Γ_P}{Γ_C}{Γ_I}{p}}{\Indp{}{Γ_P'}{Γ_C'}{Γ_I'}{p'}} .. inference:: MOD-MOD \WS{E}{S_1}{S_2} -------------------------- \WS{E}{\ModS{X}{S_1 }}{\ModS{X}{S_2 }} .. inference:: ALIAS-MOD E[] ⊢ p : S_1 \WS{E}{S_1}{S_2} -------------------------- \WS{E}{\ModA{X}{p}}{\ModS{X}{S_2 }} .. inference:: MOD-ALIAS E[] ⊢ p : S_2 \WS{E}{S_1}{S_2} E[] ⊢ X =_{βδιζη} p -------------------------- \WS{E}{\ModS{X}{S_1 }}{\ModA{X}{p}} .. inference:: ALIAS-ALIAS E[] ⊢ p_1 =_{βδιζη} p_2 -------------------------- \WS{E}{\ModA{X}{p_1 }}{\ModA{X}{p_2 }} .. inference:: MODTYPE-MODTYPE \WS{E}{S_1}{S_2} \WS{E}{S_2}{S_1} -------------------------- \WS{E}{\ModType{Y}{S_1 }}{\ModType{Y}{S_2 }} New environment formation rules .. inference:: WF-MOD1 \WF{E}{} \WFT{E}{S} -------------------------- \WF{E; \ModS{X}{S}}{} .. inference:: WF-MOD2 \WS{E}{S_2}{S_1} \WF{E}{} \WFT{E}{S_1} \WFT{E}{S_2} -------------------------- \WF{E; \ModImp{X}{S_1}{S_2}}{} .. inference:: WF-ALIAS \WF{E}{} E[] ⊢ p : S -------------------------- \WF{E; \ModA{X}{p}}{} .. inference:: WF-MODTYPE \WF{E}{} \WFT{E}{S} -------------------------- \WF{E; \ModType{Y}{S}}{} .. inference:: WF-IND \begin{array}{c} \WF{E;\ind{Γ_P}{Γ_C}{Γ_I}}{} \\ E[] ⊢ p:~\Struct~e_1 ;…;e_n ;\ind{Γ_P'}{Γ_C'}{Γ_I'};… ~\End \\ E[] ⊢ \ind{Γ_P'}{Γ_C'}{Γ_I'} <: \ind{Γ_P}{Γ_C}{Γ_I} \end{array} -------------------------- \WF{E; \Indp{}{Γ_P}{Γ_C}{Γ_I}{p} }{} Component access rules .. inference:: ACC-TYPE1 E[Γ] ⊢ p :~\Struct~e_1 ;…;e_i ;\Assum{}{c}{T};… ~\End -------------------------- E[Γ] ⊢ p.c : T .. inference:: ACC-TYPE2 E[Γ] ⊢ p :~\Struct~e_1 ;…;e_i ;\Def{}{c}{t}{T};… ~\End -------------------------- E[Γ] ⊢ p.c : T Notice that the following rule extends the delta rule defined in section :ref:`Conversion-rules` .. inference:: ACC-DELTA E[Γ] ⊢ p :~\Struct~e_1 ;…;e_i ;\Def{}{c}{t}{U};… ~\End -------------------------- E[Γ] ⊢ p.c \triangleright_δ t In the rules below we assume :math:`Γ_P` is :math:`[p_1 :P_1 ;…;p_r :P_r ]`, :math:`Γ_I` is :math:`[I_1 :A_1 ;…;I_k :A_k ]`, and :math:`Γ_C` is :math:`[c_1 :C_1 ;…;c_n :C_n ]`. .. inference:: ACC-IND1 E[Γ] ⊢ p :~\Struct~e_1 ;…;e_i ;\ind{Γ_P}{Γ_C}{Γ_I};… ~\End -------------------------- E[Γ] ⊢ p.I_j : (p_1 :P_1 )…(p_r :P_r )A_j .. inference:: ACC-IND2 E[Γ] ⊢ p :~\Struct~e_1 ;…;e_i ;\ind{Γ_P}{Γ_C}{Γ_I};… ~\End -------------------------- E[Γ] ⊢ p.c_m : (p_1 :P_1 )…(p_r :P_r )C_m I_j (I_j~p_1 …p_r )_{j=1… k} .. inference:: ACC-INDP1 E[] ⊢ p :~\Struct~e_1 ;…;e_i ; \Indp{}{Γ_P}{Γ_C}{Γ_I}{p'} ;… ~\End -------------------------- E[] ⊢ p.I_i \triangleright_δ p'.I_i .. inference:: ACC-INDP2 E[] ⊢ p :~\Struct~e_1 ;…;e_i ; \Indp{}{Γ_P}{Γ_C}{Γ_I}{p'} ;… ~\End -------------------------- E[] ⊢ p.c_i \triangleright_δ p'.c_i .. extracted from Gallina extensions chapter Libraries and qualified names --------------------------------- .. _names-of-libraries: Names of libraries ~~~~~~~~~~~~~~~~~~ The theories developed in Coq are stored in *library files* which are hierarchically classified into *libraries* and *sublibraries*. To express this hierarchy, library names are represented by qualified identifiers qualid, i.e. as list of identifiers separated by dots (see :ref:`qualified-names`). For instance, the library file ``Mult`` of the standard Coq library ``Arith`` is named ``Coq.Arith.Mult``. The identifier that starts the name of a library is called a *library root*. All library files of the standard library of Coq have the reserved root Coq but library filenames based on other roots can be obtained by using Coq commands (coqc, coqtop, coqdep, …) options ``-Q`` or ``-R`` (see :ref:`command-line-options`). Also, when an interactive Coq session starts, a library of root ``Top`` is started, unless option ``-top`` or ``-notop`` is set (see :ref:`command-line-options`). .. _qualified-names: Qualified identifiers ~~~~~~~~~~~~~~~~~~~~~ .. insertprodn qualid field_ident .. prodn:: qualid ::= @ident {* @field_ident } field_ident ::= .@ident Library files are modules which possibly contain submodules which eventually contain constructions (axioms, parameters, definitions, lemmas, theorems, remarks or facts). The *absolute name*, or *full name*, of a construction in some library file is a qualified identifier starting with the logical name of the library file, followed by the sequence of submodules names encapsulating the construction and ended by the proper name of the construction. Typically, the absolute name ``Coq.Init.Logic.eq`` denotes Leibniz’ equality defined in the module Logic in the sublibrary ``Init`` of the standard library of Coq. The proper name that ends the name of a construction is the short name (or sometimes base name) of the construction (for instance, the short name of ``Coq.Init.Logic.eq`` is ``eq``). Any partial suffix of the absolute name is a *partially qualified name* (e.g. ``Logic.eq`` is a partially qualified name for ``Coq.Init.Logic.eq``). Especially, the short name of a construction is its shortest partially qualified name. Coq does not accept two constructions (definition, theorem, …) with the same absolute name but different constructions can have the same short name (or even same partially qualified names as soon as the full names are different). Notice that the notion of absolute, partially qualified and short names also applies to library filenames. **Visibility** Coq maintains a table called the name table which maps partially qualified names of constructions to absolute names. This table is updated by the commands :cmd:`Require`, :cmd:`Import` and :cmd:`Export` and also each time a new declaration is added to the context. An absolute name is called visible from a given short or partially qualified name when this latter name is enough to denote it. This means that the short or partially qualified name is mapped to the absolute name in Coq name table. Definitions with the :attr:`local` attribute are only accessible with their fully qualified name (see :ref:`gallina-definitions`). It may happen that a visible name is hidden by the short name or a qualified name of another construction. In this case, the name that has been hidden must be referred to using one more level of qualification. To ensure that a construction always remains accessible, absolute names can never be hidden. .. example:: .. coqtop:: all Check 0. Definition nat := bool. Check 0. Check Datatypes.nat. Locate nat. .. seealso:: Commands :cmd:`Locate`. .. _libraries-and-filesystem: Libraries and filesystem ~~~~~~~~~~~~~~~~~~~~~~~~ Compiled files (``.vo`` and ``.vio``) store sub-libraries. In order to refer to them inside Coq, a translation from file-system names to Coq names is needed. In this translation, names in the file system are called *physical* paths while Coq names are contrastingly called *logical* names. A logical prefix Lib can be associated with a physical path using either the command line option ``-Q`` `path` ``Lib`` or the command line option ``-R`` `path` ``Lib``. All subfolders of path are recursively associated with the logical path ``Lib`` extended with the corresponding suffix coming from the physical path. For instance, the folder ``path/Foo/Bar`` maps to ``Lib.Foo.Bar``. Subdirectories corresponding to invalid Coq identifiers are skipped, and, by convention, subdirectories named ``CVS`` or ``_darcs`` are skipped too. Thanks to this mechanism, ``.vo`` files are made available through the logical name of the folder they are in, extended with their own basename. For example, the name associated with the file ``path/Foo/Bar/File.vo`` is ``Lib.Foo.Bar.File``. The same caveat applies for invalid identifiers. When compiling a source file, the ``.vo`` file stores its logical name, so that an error is issued if it is loaded with the wrong loadpath afterwards. Some folders have a special status and are automatically put in the path. Coq commands automatically associate a logical path to files in the repository tree rooted at the directory from where the command is launched, ``coqlib/user-contrib/``, the directories listed in the ``$COQPATH``, ``${XDG_DATA_HOME}/coq/`` and ``${XDG_DATA_DIRS}/coq/`` environment variables (see `XDG base directory specification `_) with the same physical-to-logical translation and with an empty logical prefix. .. todo: Needs a more better explanation of COQPATH and XDG* with example(s) and suggest best practices for their use The choice between ``-Q`` and ``-R`` impacts how ambiguous names are resolved in :cmd:`Require` (see :ref:`compiled-files`). There also exists another independent loadpath mechanism attached to OCaml object files (``.cmo`` or ``.cmxs``) rather than Coq object files as described above. The OCaml loadpath is managed using the option ``-I`` `path` (in the OCaml world, there is neither a notion of logical name prefix nor a way to access files in subdirectories of path). See the command :cmd:`Declare ML Module` in :ref:`compiled-files` to understand the need of the OCaml loadpath. See :ref:`command-line-options` for a more general view over the Coq command line options. .. _controlling-locality-of-commands: Controlling the scope of commands with locality attributes ---------------------------------------------------------- Many commands have effects that apply only within a specific scope, typically the section or the module in which the command was called. Locality :term:`attributes ` can alter the scope of the effect. Below, we give the semantics of each locality attribute while noting a few exceptional commands for which :attr:`local` and :attr:`global` attributes are interpreted differently. .. attr:: local This :term:`attribute` limits the effect of the command to the current scope (section or module). The ``Local`` prefix is an alternative syntax for the :attr:`local` attribute (see :n:`@legacy_attr`). .. note:: - For some commands, this is the only locality supported within sections (e.g., for :cmd:`Notation`, :cmd:`Ltac` and :ref:`Hint ` commands). - For some commands, this is the default locality within sections even though other locality attributes are supported as well (e.g., for the :cmd:`Arguments` command). .. warning:: **Exception:** when :attr:`local` is applied to :cmd:`Definition`, :cmd:`Theorem` or their variants, its semantics are different: it makes the defined objects available only through their fully-qualified names rather than their unqualified names after an :cmd:`Import`. .. attr:: export This :term:`attribute` makes the effect of the command persist when the section is closed and applies the effect when the module containing the command is imported. Commands supporting this attribute include :cmd:`Set`, :cmd:`Unset` and the :ref:`Hint ` commands, although the latter don't support it within sections. .. attr:: global This :term:`attribute` makes the effect of the command persist even when the current section or module is closed. Loading the file containing the command (possibly transitively) applies the effect of the command. The ``Global`` prefix is an alternative syntax for the :attr:`global` attribute (see :n:`@legacy_attr`). .. warning:: **Exception:** for a few commands (like :cmd:`Notation` and :cmd:`Ltac`), this attribute behaves like :attr:`export`. .. warning:: We strongly discourage using the :attr:`global` locality attribute because the transitive nature of file loading gives the user little control. We recommend using the :attr:`export` locality attribute where it is supported. coq-8.15.0/doc/sphinx/language/core/primitive.rst000066400000000000000000000164561417001151100217250ustar00rootroot00000000000000Primitive objects ================= .. _primitive-integers: Primitive Integers ------------------ The language of terms features 63-bit machine integers as values. The type of such a value is *axiomatized*; it is declared through the following sentence (excerpt from the :g:`PrimInt63` module): .. coqdoc:: Primitive int := #int63_type. This type can be understood as representing either unsigned or signed integers, depending on which module is imported or, more generally, which scope is open. :g:`Uint63` and :g:`uint63_scope` refer to the unsigned version, while :g:`Sint63` and :g:`sint63_scope` refer to the signed one. The :g:`PrimInt63` module declares the available operators for this type. For instance, equality of two unsigned primitive integers can be determined using the :g:`Uint63.eqb` function, declared and specified as follows: .. coqdoc:: Primitive eqb := #int63_eq. Notation "m '==' n" := (eqb m n) (at level 70, no associativity) : uint63_scope. Axiom eqb_correct : forall i j, (i == j)%uint63 = true -> i = j. The complete set of such operators can be found in the :g:`PrimInt63` module. The specifications and notations are in the :g:`Uint63` and :g:`Sint63` modules. These primitive declarations are regular axioms. As such, they must be trusted and are listed by the :g:`Print Assumptions` command, as in the following example. .. coqtop:: in reset From Coq Require Import Uint63. Lemma one_minus_one_is_zero : (1 - 1 = 0)%uint63. Proof. apply eqb_correct; vm_compute; reflexivity. Qed. .. coqtop:: all Print Assumptions one_minus_one_is_zero. The reduction machines implement dedicated, efficient rules to reduce the applications of these primitive operations. The extraction of these primitives can be customized similarly to the extraction of regular axioms (see :ref:`extraction`). Nonetheless, the :g:`ExtrOCamlInt63` module can be used when extracting to OCaml: it maps the Coq primitives to types and functions of a :g:`Uint63` module (including signed functions for :g:`Sint63` despite the name). That OCaml module is not produced by extraction. Instead, it has to be provided by the user (if they want to compile or execute the extracted code). For instance, an implementation of this module can be taken from the kernel of Coq. Literal values (at type :g:`Uint63.int`) are extracted to literal OCaml values wrapped into the :g:`Uint63.of_int` (resp. :g:`Uint63.of_int64`) constructor on 64-bit (resp. 32-bit) platforms. Currently, this cannot be customized (see the function :g:`Uint63.compile` from the kernel). .. _primitive-floats: Primitive Floats ---------------- The language of terms features Binary64 floating-point numbers as values. The type of such a value is *axiomatized*; it is declared through the following sentence (excerpt from the :g:`PrimFloat` module): .. coqdoc:: Primitive float := #float64_type. This type is equipped with a few operators, that must be similarly declared. For instance, the product of two primitive floats can be computed using the :g:`PrimFloat.mul` function, declared and specified as follows: .. coqdoc:: Primitive mul := #float64_mul. Notation "x * y" := (mul x y) : float_scope. Axiom mul_spec : forall x y, Prim2SF (x * y)%float = SF64mul (Prim2SF x) (Prim2SF y). where :g:`Prim2SF` is defined in the :g:`FloatOps` module. The set of such operators is described in section :ref:`floats_library`. These primitive declarations are regular axioms. As such, they must be trusted, and are listed by the :g:`Print Assumptions` command. The reduction machines (:tacn:`vm_compute`, :tacn:`native_compute`) implement dedicated, efficient rules to reduce the applications of these primitive operations, using the floating-point processor operators that are assumed to comply with the IEEE 754 standard for floating-point arithmetic. The extraction of these primitives can be customized similarly to the extraction of regular axioms (see :ref:`extraction`). Nonetheless, the :g:`ExtrOCamlFloats` module can be used when extracting to OCaml: it maps the Coq primitives to types and functions of a :g:`Float64` module. Said OCaml module is not produced by extraction. Instead, it has to be provided by the user (if they want to compile or execute the extracted code). For instance, an implementation of this module can be taken from the kernel of Coq. Literal values (of type :g:`Float64.t`) are extracted to literal OCaml values (of type :g:`float`) written in hexadecimal notation and wrapped into the :g:`Float64.of_float` constructor, e.g.: :g:`Float64.of_float (0x1p+0)`. .. _primitive-arrays: Primitive Arrays ---------------- The language of terms features persistent arrays as values. The type of such a value is *axiomatized*; it is declared through the following sentence (excerpt from the :g:`PArray` module): .. coqdoc:: Primitive array := #array_type. This type is equipped with a few operators, that must be similarly declared. For instance, elements in an array can be accessed and updated using the :g:`PArray.get` and :g:`PArray.set` functions, declared and specified as follows: .. coqdoc:: Primitive get := #array_get. Primitive set := #array_set. Notation "t .[ i ]" := (get t i). Notation "t .[ i <- a ]" := (set t i a). Axiom get_set_same : forall A t i (a:A), (i < length t) = true -> t.[i<-a].[i] = a. Axiom get_set_other : forall A t i j (a:A), i <> j -> t.[i<-a].[j] = t.[j]. The rest of these operators can be found in the :g:`PArray` module. These primitive declarations are regular axioms. As such, they must be trusted and are listed by the :g:`Print Assumptions` command. The reduction machines (:tacn:`vm_compute`, :tacn:`native_compute`) implement dedicated, efficient rules to reduce the applications of these primitive operations. The extraction of these primitives can be customized similarly to the extraction of regular axioms (see :ref:`extraction`). Nonetheless, the :g:`ExtrOCamlPArray` module can be used when extracting to OCaml: it maps the Coq primitives to types and functions of a :g:`Parray` module. Said OCaml module is not produced by extraction. Instead, it has to be provided by the user (if they want to compile or execute the extracted code). For instance, an implementation of this module can be taken from the kernel of Coq (see ``kernel/parray.ml``). Coq's primitive arrays are persistent data structures. Semantically, a set operation ``t.[i <- a]`` represents a new array that has the same values as ``t``, except at position ``i`` where its value is ``a``. The array ``t`` still exists, can still be used and its values were not modified. Operationally, the implementation of Coq's primitive arrays is optimized so that the new array ``t.[i <- a]`` does not copy all of ``t``. The details are in section 2.3 of :cite:`ConchonFilliatre07wml`. In short, the implementation keeps one version of ``t`` as an OCaml native array and other versions as lists of modifications to ``t``. Accesses to the native array version are constant time operations. However, accesses to versions where all the cells of the array are modified have O(n) access time, the same as a list. The version that is kept as the native array changes dynamically upon each get and set call: the current list of modifications is applied to the native array and the lists of modifications of the other versions are updated so that they still represent the same values. coq-8.15.0/doc/sphinx/language/core/records.rst000066400000000000000000000325531417001151100213520ustar00rootroot00000000000000.. _record-types: Record types ---------------- The :cmd:`Record` construction is a :term:`command` allowing the definition of records as is done in many programming languages. Its syntax is described in the grammar below. In fact, the :cmd:`Record` :term:`command` is more general than the usual record types, since it allows also for “manifest”expressions. In this sense, the :cmd:`Record` construction allows defining“signatures”. .. _record_grammar: .. cmd:: {| Record | Structure } @record_definition {* with @record_definition } :name: Record; Structure .. insertprodn record_definition field_def .. prodn:: record_definition ::= {? > } @ident_decl {* @binder } {? : @sort } {? := {? @ident } %{ {*; @record_field } {? ; } %} } record_field ::= {* #[ {*, @attribute } ] } @name {? @field_body } {? %| @natural } {? @decl_notations } field_body ::= {* @binder } @of_type | {* @binder } @of_type := @term | {* @binder } := @term term_record ::= %{%| {*; @field_def } {? ; } %|%} field_def ::= @qualid {* @binder } := @term Each :n:`@record_definition` defines a record named by :n:`@ident_decl`. The constructor name is given by :n:`@ident`. If the constructor name is not specified, then the default name :n:`Build_@ident` is used, where :n:`@ident` is the record name. If :token:`sort` is omitted, the default sort is Type. Notice that the type of an identifier can depend on a previously-given identifier. Thus the order of the fields is important. :n:`@binder` parameters may be applied to the record as a whole or to individual fields. .. todo "Record foo2:Prop := { a }." gives the error "Cannot infer this placeholder of type "Type", while "Record foo2:Prop := { a:Type }." gives the output "foo2 is defined. a cannot be defined because it is informative and foo2 is not." Your thoughts? :n:`{? > }` If provided, the constructor name is automatically declared as a coercion from the class of the last field type to the record name (this may fail if the uniform inheritance condition is not satisfied). See :ref:`coercions`. Notations can be attached to fields using the :n:`@decl_notations` annotation. :cmd:`Record` and :cmd:`Structure` are synonyms. This command supports the :attr:`universes(polymorphic)`, :attr:`universes(template)`, :attr:`universes(cumulative)`, :attr:`private(matching)` and :attr:`projections(primitive)` attributes. More generally, a record may have explicitly defined (a.k.a. manifest) fields. For instance, we might have: :n:`Record @ident {* @binder } : @sort := { @ident__1 : @type__1 ; @ident__2 := @term__2 ; @ident__3 : @type__3 }`. in which case the correctness of :n:`@type__3` may rely on the instance :n:`@term__2` of :n:`@ident__2` and :n:`@term__2` may in turn depend on :n:`@ident__1`. .. example:: The set of rational numbers may be defined as: .. coqtop:: reset all Record Rat : Set := mkRat { sign : bool ; top : nat ; bottom : nat ; Rat_bottom_cond : 0 <> bottom ; Rat_irred_cond : forall x y z:nat, (x * y) = top /\ (x * z) = bottom -> x = 1 }. Note here that the fields ``Rat_bottom_cond`` depends on the field ``bottom`` and ``Rat_irred_cond`` depends on both ``top`` and ``bottom``. Let us now see the work done by the :cmd:`Record` command. First the command generates a variant type definition with just one constructor: :n:`Variant @ident {* @binder } : @sort := @ident__0 {* @binder }`. To build an object of type :token:`ident`, provide the constructor :n:`@ident__0` with the appropriate number of terms filling the fields of the record. .. example:: Let us define the rational :math:`1/2`: .. coqtop:: in Theorem one_two_irred : forall x y z:nat, x * y = 1 /\ x * z = 2 -> x = 1. Admitted. Definition half := mkRat true 1 2 (O_S 1) one_two_irred. Check half. Alternatively, the following syntax allows creating objects by using named fields, as shown in this grammar. The fields do not have to be in any particular order, nor do they have to be all present if the missing ones can be inferred or prompted for (see :ref:`programs`). .. coqtop:: all Definition half' := {| sign := true; Rat_bottom_cond := O_S 1; Rat_irred_cond := one_two_irred |}. The following settings let you control the display format for types: .. flag:: Printing Records When this :term:`flag` is on (this is the default), use the record syntax (shown above) as the default display format. You can override the display format for specified types by adding entries to these tables: .. table:: Printing Record @qualid This :term:`table` specifies a set of qualids which are displayed as records. Use the :cmd:`Add` and :cmd:`Remove` commands to update the set of qualids. .. table:: Printing Constructor @qualid This :term:`table` specifies a set of qualids which are displayed as constructors. Use the :cmd:`Add` and :cmd:`Remove` commands to update the set of qualids. This syntax can also be used for pattern matching. .. coqtop:: all Eval compute in ( match half with | {| sign := true; top := n |} => n | _ => 0 end). The :term:`command` generates also, when it is possible, the projection functions for destructuring an object of type :token:`ident`. These projection functions are given the names of the corresponding fields. If a field is named `_` then no projection is built for it. In our example: .. coqtop:: all Eval compute in top half. Eval compute in bottom half. Eval compute in Rat_bottom_cond half. An alternative syntax for projections based on a dot notation is available: .. coqtop:: all Eval compute in half.(top). .. flag:: Printing Projections This :term:`flag` activates the dot notation for printing. .. example:: .. coqtop:: all Set Printing Projections. Check top half. .. FIXME: move this to the main grammar in the spec chapter .. _record_projections_grammar: Syntax of Record Projections .. insertprodn term_projection term_projection .. prodn:: term_projection ::= @term0 .( @qualid {? @univ_annot } {* @arg } ) | @term0 .( @ @qualid {? @univ_annot } {* @term1 } ) The corresponding grammar rules are given in the preceding grammar. When :token:`qualid` denotes a projection, the syntax :n:`@term0.(@qualid)` is equivalent to :n:`@qualid @term0`, the syntax :n:`@term0.(@qualid {+ @arg })` to :n:`@qualid {+ @arg } @term0`. and the syntax :n:`@term0.(@@qualid {+ @term0 })` to :n:`@@qualid {+ @term0 } @term0`. In each case, :token:`term0` is the projected object and the other arguments are the parameters of the inductive type. Since the projected object is part of the notation, it is always considered an explicit argument of :token:`qualid`, even if it is formally declared as implicit (see :ref:`ImplicitArguments`), .. note:: Records defined with the :cmd:`Record` command are not allowed to be recursive (references to the record's name in the type of its field raises an error). To define recursive records, one can use the :cmd:`Inductive` and :cmd:`CoInductive` commands, resulting in an inductive or coinductive record. Definition of mutually inductive or coinductive records are also allowed, as long as all of the types in the block are records. .. note:: Induction schemes are automatically generated for inductive records. Automatic generation of induction schemes for non-recursive records defined with the :cmd:`Record` command can be activated with the :flag:`Nonrecursive Elimination Schemes` flag (see :ref:`proofschemes-induction-principles`). .. warn:: @ident cannot be defined. It can happen that the definition of a projection is impossible. This message is followed by an explanation of this impossibility. There may be three reasons: #. The name :token:`ident` already exists in the global environment (see :cmd:`Axiom`). #. The :term:`body` of :token:`ident` uses an incorrect elimination for :token:`ident` (see :cmd:`Fixpoint` and :ref:`Destructors`). #. The type of the projections :token:`ident` depends on previous projections which themselves could not be defined. .. exn:: Records declared with the keyword Record or Structure cannot be recursive. The record name :token:`ident` appears in the type of its fields, but uses the :cmd:`Record` command. Use the :cmd:`Inductive` or :cmd:`CoInductive` command instead. .. exn:: Cannot handle mutually (co)inductive records. Records cannot be defined as part of mutually inductive (or coinductive) definitions, whether with records only or mixed with standard definitions. During the definition of the one-constructor inductive definition, all the errors of inductive definitions, as described in Section :ref:`gallina-inductive-definitions`, may also occur. .. seealso:: Coercions and records in section :ref:`coercions-classes-as-records` of the chapter devoted to coercions. .. _primitive_projections: Primitive Projections ~~~~~~~~~~~~~~~~~~~~~ When the :flag:`Primitive Projections` flag is on or the :attr:`projections(primitive)` attribute is supplied for a :n:`Record` definition, its :g:`match` construct is disabled. To eliminate the record type, one must use its defined primitive projections. For compatibility, the parameters still appear when printing terms even though they are absent in the actual AST manipulated by the kernel. This can be changed by unsetting the :flag:`Printing Primitive Projection Parameters` flag. There are currently two ways to introduce primitive records types: #. Through the :cmd:`Record` command, in which case the type has to be non-recursive. The defined type enjoys eta-conversion definitionally, that is the generalized form of surjective pairing for records: `r` ``= Build_``\ `R` ``(``\ `r`\ ``.(``\ |p_1|\ ``) …`` `r`\ ``.(``\ |p_n|\ ``))``. Eta-conversion allows to define dependent elimination for these types as well. #. Through the :cmd:`Inductive` and :cmd:`CoInductive` commands, when the :term:`body` of the definition is a record declaration of the form ``Build_``\ `R` ``{`` |p_1| ``:`` |t_1|\ ``; … ;`` |p_n| ``:`` |t_n| ``}``. In this case the types can be recursive and eta-conversion is disallowed. Dependent elimination is not available for such types; you must use non-dependent case analysis for these. For both cases the :flag:`Primitive Projections` :term:`flag` must be set or the :attr:`projections(primitive)` :term:`attribute` must be supplied. .. flag:: Primitive Projections This :term:`flag` turns on the use of primitive projections when defining subsequent records (even through the :cmd:`Inductive` and :cmd:`CoInductive` commands). Primitive projections extend the Calculus of Inductive Constructions with a new binary term constructor `r.(p)` representing a primitive projection `p` applied to a record object `r` (i.e., primitive projections are always applied). Even if the record type has parameters, these do not appear in the internal representation of applications of the projection, considerably reducing the sizes of terms when manipulating parameterized records and type checking time. On the user level, primitive projections can be used as a replacement for the usual defined ones, although there are a few notable differences. .. attr:: projections(primitive{? = {| yes | no } }) :name: projections(primitive) This :term:`boolean attribute` can be used to override the value of the :flag:`Primitive Projections` :term:`flag` for the record type being defined. .. flag:: Printing Primitive Projection Parameters This compatibility :term:`flag` reconstructs internally omitted parameters at printing time (even though they are absent in the actual AST manipulated by the kernel). Reduction +++++++++ The basic reduction rule of a primitive projection is |p_i| ``(Build_``\ `R` |t_1| … |t_n|\ ``)`` :math:`{\rightarrow_{\iota}}` |t_i|. However, to take the δ flag into account, projections can be in two states: folded or unfolded. An unfolded primitive projection application obeys the rule above, while the folded version delta-reduces to the unfolded version. This allows to precisely mimic the usual unfolding rules of :term:`constants `. Projections obey the usual ``simpl`` flags of the :cmd:`Arguments` command in particular. There is currently no way to input unfolded primitive projections at the user-level, and there is no way to display unfolded projections differently from folded ones. Compatibility Projections and :g:`match` ++++++++++++++++++++++++++++++++++++++++ To ease compatibility with ordinary record types, each primitive projection is also defined as an ordinary :term:`constant` taking parameters and an object of the record type as arguments, and whose :term:`body` is an application of the unfolded primitive projection of the same name. These constants are used when elaborating partial applications of the projection. One can distinguish them from applications of the primitive projection if the :flag:`Printing Primitive Projection Parameters` flag is off: For a primitive projection application, parameters are printed as underscores while for the compatibility projections they are printed as usual. Additionally, user-written :g:`match` constructs on primitive records are desugared into substitution of the projections, they cannot be printed back as :g:`match` constructs. coq-8.15.0/doc/sphinx/language/core/sections.rst000066400000000000000000000077231417001151100215410ustar00rootroot00000000000000.. _section-mechanism: Section mechanism ----------------- Sections are naming scopes that permit creating section-local declarations that can be used by other declarations in the section. Declarations made with :cmd:`Variable`, :cmd:`Hypothesis`, :cmd:`Context`, :cmd:`Let`, :cmd:`Let Fixpoint` and :cmd:`Let CoFixpoint` (or the plural variants of the first two) within sections are local to the section. In proofs done within the section, section-local declarations are included in the :term:`local context` of the initial goal of the proof. They are also accessible in definitions made with the :cmd:`Definition` command. Sections are opened by the :cmd:`Section` command, and closed by :cmd:`End`. Sections can be nested. When a section is closed, its local declarations are no longer available. Global declarations that refer to them will be adjusted so they're still usable outside the section as shown in this :ref:`example `. .. cmd:: Section @ident Opens the section named :token:`ident`. Section names do not need to be unique. .. cmd:: End @ident Closes the section or module named :token:`ident`. See :ref:`Terminating an interactive module or module type definition ` for a description of its use with modules. After closing the section, the local declarations (variables and local definitions, see :cmd:`Variable`) are *discharged*, meaning that they stop being visible and that all global objects defined in the section are generalized with respect to the variables and local definitions they each depended on in the section. .. exn:: There is nothing to end. :undocumented: .. exn:: Last block to end has name @ident. :undocumented: .. note:: Most commands, such as the :ref:`Hint ` commands, :cmd:`Notation` and option management commands that appear inside a section are canceled when the section is closed. .. cmd:: Let @ident_decl @def_body Let Fixpoint @fix_definition {* with @fix_definition } Let CoFixpoint @cofix_definition {* with @cofix_definition } :name: Let; Let Fixpoint; Let CoFixpoint These are similar to :cmd:`Definition`, :cmd:`Fixpoint` and :cmd:`CoFixpoint`, except that the declared :term:`constant` is local to the current section. When the section is closed, all persistent definitions and theorems within it that depend on the constant will be wrapped with a :n:`@term_let` with the same declaration. As for :cmd:`Definition`, :cmd:`Fixpoint` and :cmd:`CoFixpoint`, if :n:`@term` is omitted, :n:`@type` is required and Coq enters proof mode. This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant for which the computational behavior is relevant. See :ref:`proof-editing-mode`. .. cmd:: Context {+ @binder } Declare variables in the context of the current section, like :cmd:`Variable`, but also allowing implicit variables, :ref:`implicit-generalization`, and let-binders. .. coqdoc:: Context {A : Type} (a b : A). Context `{EqDec A}. Context (b' := b). .. seealso:: Section :ref:`binders`. Section :ref:`contexts` in chapter :ref:`typeclasses`. .. _section_local_declarations: .. example:: Section-local declarations .. coqtop:: all Section s1. .. coqtop:: all Variables x y : nat. The command :cmd:`Let` introduces section-wide :ref:`let-in`. These definitions won't persist when the section is closed, and all persistent definitions which depend on `y'` will be prefixed with `let y' := y in`. .. coqtop:: in Let y' := y. Definition x' := S x. Definition x'' := x' + y'. .. coqtop:: all Print x'. Print x''. End s1. Print x'. Print x''. Notice the difference between the value of :g:`x'` and :g:`x''` inside section :g:`s1` and outside. coq-8.15.0/doc/sphinx/language/core/sorts.rst000066400000000000000000000102001417001151100210440ustar00rootroot00000000000000.. index:: single: Set (sort) single: SProp single: Prop single: Type .. _sorts: Sorts ~~~~~~~~~~~ .. insertprodn sort universe_expr .. prodn:: sort ::= Set | Prop | SProp | Type | Type @%{ _ %} | Type @%{ @universe %} universe ::= max ( {+, @universe_expr } ) | @universe_expr universe_expr ::= @universe_name {? + @natural } The types of types are called :gdef:`sorts `. All sorts have a type and there is an infinite well-founded typing hierarchy of sorts whose base sorts are :math:`\SProp`, :math:`\Prop` and :math:`\Set`. The sort :math:`\Prop` intends to be the type of logical propositions. If :math:`M` is a logical proposition then it denotes the class of terms representing proofs of :math:`M`. An object :math:`m` belonging to :math:`M` :term:`witnesses ` the fact that :math:`M` is provable. An object of type :math:`\Prop` is called a :gdef:`proposition`. We denote propositions by :n:`@form`. This constitutes a semantic subclass of the syntactic class :n:`@term`. The sort :math:`\SProp` is like :math:`\Prop` but the propositions in :math:`\SProp` are known to have irrelevant proofs (all proofs are equal). Objects of type :math:`\SProp` are called :gdef:`strict propositions `. See :ref:`sprop` for information about using :math:`\SProp`, and :cite:`Gilbert:POPL2019` for meta theoretical considerations. The sort :math:`\Set` intends to be the type of small sets. This includes data types such as booleans and naturals, but also products, subsets, and function types over these data types. We denote specifications (program types) by :n:`@specif`. This constitutes a semantic subclass of the syntactic class :n:`@term`. :math:`\SProp`, :math:`\Prop` and :math:`\Set` themselves can be manipulated as ordinary terms. Consequently they also have a type. Because assuming simply that :math:`\Set` has type :math:`\Set` leads to an inconsistent theory :cite:`Coq86`, the language of |Cic| has infinitely many sorts. There are, in addition to the base sorts, a hierarchy of universes :math:`\Type(i)` for any integer :math:`i ≥ 1`. Like :math:`\Set`, all of the sorts :math:`\Type(i)` contain small sets such as booleans, natural numbers, as well as products, subsets and function types over small sets. But, unlike :math:`\Set`, they also contain large sets, namely the sorts :math:`\Set` and :math:`\Type(j)` for :math:`j`_. .. example:: .. coqtop:: all Module Foo. #[ private(matching) ] Inductive my_nat := my_O : my_nat | my_S : my_nat -> my_nat. Check (fun x : my_nat => match x with my_O => true | my_S _ => false end). End Foo. Import Foo. Fail Check (fun x : my_nat => match x with my_O => true | my_S _ => false end). .. index:: match ... with ... .. _match_term: Definition by cases: match -------------------------- Objects of inductive types can be destructured by a case-analysis construction called *pattern matching* expression. A pattern matching expression is used to analyze the structure of an inductive object and to apply specific treatments accordingly. .. insertprodn term_match pattern0 .. prodn:: term_match ::= match {+, @case_item } {? return @term100 } with {? %| } {*| @eqn } end case_item ::= @term100 {? as @name } {? in @pattern } eqn ::= {+| {+, @pattern } } => @term pattern ::= @pattern10 : @term | @pattern10 pattern10 ::= @pattern1 as @name | @pattern1 {* @pattern1 } | @ @qualid {* @pattern1 } pattern1 ::= @pattern0 % @scope_key | @pattern0 pattern0 ::= @qualid | %{%| {* @qualid := @pattern } %|%} | _ | ( {+| @pattern } ) | @number | @string Note that the :n:`@pattern ::= @pattern10 : @term` production is not supported in :n:`match` patterns. Trying to use it will give this error: .. exn:: Casts are not supported in this pattern. :undocumented: This paragraph describes the basic form of pattern matching. See Section :ref:`Mult-match` and Chapter :ref:`extendedpatternmatching` for the description of the general form. The basic form of pattern matching is characterized by a single :n:`@case_item` expression, an :n:`@eqn` restricted to a single :n:`@pattern` and :n:`@pattern` restricted to the form :n:`@qualid {* @ident}`. The expression :n:`match @term {? return @term100 } with {+| @pattern__i => @term__i } end` denotes a *pattern matching* over the term :n:`@term` (expected to be of an inductive type :math:`I`). The :n:`@term__i` are the *branches* of the pattern matching expression. Each :n:`@pattern__i` has the form :n:`@qualid @ident` where :n:`@qualid` must denote a constructor. There should be exactly one branch for every constructor of :math:`I`. The :n:`return @term100` clause gives the type returned by the whole match expression. There are several cases. In the *non-dependent* case, all branches have the same type, and the :n:`return @term100` specifies that type. In this case, :n:`return @term100` can usually be omitted as it can be inferred from the type of the branches [1]_. In the *dependent* case, there are three subcases. In the first subcase, the type in each branch may depend on the exact value being matched in the branch. In this case, the whole pattern matching itself depends on the term being matched. This dependency of the term being matched in the return type is expressed with an :n:`@ident` clause where :n:`@ident` is dependent in the return type. For instance, in the following example: .. coqtop:: in Inductive bool : Type := true : bool | false : bool. Inductive eq (A:Type) (x:A) : A -> Prop := eq_refl : eq A x x. Inductive or (A:Prop) (B:Prop) : Prop := | or_introl : A -> or A B | or_intror : B -> or A B. Definition bool_case (b:bool) : or (eq bool b true) (eq bool b false) := match b as x return or (eq bool x true) (eq bool x false) with | true => or_introl (eq bool true true) (eq bool true false) (eq_refl bool true) | false => or_intror (eq bool false true) (eq bool false false) (eq_refl bool false) end. the branches have respective types ":g:`or (eq bool true true) (eq bool true false)`" and ":g:`or (eq bool false true) (eq bool false false)`" while the whole pattern matching expression has type ":g:`or (eq bool b true) (eq bool b false)`", the identifier :g:`b` being used to represent the dependency. .. note:: When the term being matched is a variable, the ``as`` clause can be omitted and the term being matched can serve itself as binding name in the return type. For instance, the following alternative definition is accepted and has the same meaning as the previous one. .. coqtop:: none Reset bool_case. .. coqtop:: in Definition bool_case (b:bool) : or (eq bool b true) (eq bool b false) := match b return or (eq bool b true) (eq bool b false) with | true => or_introl (eq bool true true) (eq bool true false) (eq_refl bool true) | false => or_intror (eq bool false true) (eq bool false false) (eq_refl bool false) end. The second subcase is only relevant for annotated inductive types such as the equality predicate (see Section :ref:`coq-equality`), the order predicate on natural numbers or the type of lists of a given length (see Section :ref:`matching-dependent`). In this configuration, the type of each branch can depend on the type dependencies specific to the branch and the whole pattern matching expression has a type determined by the specific dependencies in the type of the term being matched. This dependency of the return type in the annotations of the inductive type is expressed with a clause in the form :n:`in @qualid {+ _ } {+ @pattern }`, where - :n:`@qualid` is the inductive type of the term being matched; - the holes :n:`_` match the parameters of the inductive type: the return type is not dependent on them. - each :n:`@pattern` matches the annotations of the inductive type: the return type is dependent on them - in the basic case which we describe below, each :n:`@pattern` is a name :n:`@ident`; see :ref:`match-in-patterns` for the general case For instance, in the following example: .. coqtop:: in Definition eq_sym (A:Type) (x y:A) (H:eq A x y) : eq A y x := match H in eq _ _ z return eq A z x with | eq_refl _ _ => eq_refl A x end. the type of the branch is :g:`eq A x x` because the third argument of :g:`eq` is :g:`x` in the type of the pattern :g:`eq_refl`. On the contrary, the type of the whole pattern matching expression has type :g:`eq A y x` because the third argument of eq is y in the type of H. This dependency of the case analysis in the third argument of :g:`eq` is expressed by the identifier :g:`z` in the return type. Finally, the third subcase is a combination of the first and second subcase. In particular, it only applies to pattern matching on terms in a type with annotations. For this third subcase, both the clauses ``as`` and ``in`` are available. There are specific notations for case analysis on types with one or two constructors: ``if … then … else …`` and ``let (…,…) := … in …`` (see Sections :ref:`if-then-else` and :ref:`irrefutable-patterns`). .. [1] Except if the inductive type is empty in which case there is no equation that can be used to infer the return type. coq-8.15.0/doc/sphinx/language/extensions/000077500000000000000000000000001417001151100204165ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/language/extensions/arguments-command.rst000066400000000000000000000407711417001151100246020ustar00rootroot00000000000000.. _ArgumentsCommand: Setting properties of a function's arguments ++++++++++++++++++++++++++++++++++++++++++++ .. cmd:: Arguments @reference {* @arg_specs } {* , {* @implicits_alt } } {? : {+, @args_modifier } } .. insertprodn argument_spec args_modifier .. prodn:: argument_spec ::= {? ! } @name {? % @scope_key } arg_specs ::= @argument_spec | / | & | ( {+ @argument_spec } ) {? % @scope_key } | [ {+ @argument_spec } ] {? % @scope_key } | %{ {+ @argument_spec } %} {? % @scope_key } implicits_alt ::= @name | [ {+ @name } ] | %{ {+ @name } %} args_modifier ::= simpl nomatch | simpl never | default implicits | clear implicits | clear scopes | clear bidirectionality hint | rename | assert | extra scopes | clear scopes and implicits | clear implicits and scopes Specifies properties of the arguments of a function after the function has already been defined. It gives fine-grained control over the elaboration process (i.e. the translation of Gallina language extensions into the core language used by the kernel). The command's effects include: * Making arguments implicit. Afterward, implicit arguments must be omitted in any expression that applies :token:`reference`. * Declaring that some arguments of a given function should be interpreted in a given scope. * Affecting when the :tacn:`simpl` and :tacn:`cbn` tactics unfold the function. See :ref:`Args_effect_on_unfolding`. * Providing bidirectionality hints. See :ref:`bidirectionality_hints`. This command supports the :attr:`local` and :attr:`global` attributes. Default behavior is to limit the effect to the current section but also to extend their effect outside the current module or library file. Applying :attr:`local` limits the effect of the command to the current module if it's not in a section. Applying :attr:`global` within a section extends the effect outside the current sections and current module in which the command appears. `/` the function will be unfolded only if it's applied to at least the arguments appearing before the `/`. See :ref:`Args_effect_on_unfolding`. .. exn:: The / modifier may only occur once. :undocumented: `&` tells the type checking algorithm to first type check the arguments before the `&` and then to propagate information from that typing context to type check the remaining arguments. See :ref:`bidirectionality_hints`. .. exn:: The & modifier may only occur once. :undocumented: :n:`( ... ) {? % @scope }` :n:`(@name__1 @name__2 ...)%@scope` is shorthand for :n:`@name__1%@scope @name__2%@scope ...` :n:`[ ... ] {? % @scope }` declares the enclosed names as implicit, non-maximally inserted. :n:`[@name__1 @name__2 ... ]%@scope` is equivalent to :n:`[@name__1]%@scope [@name__2]%@scope ...` :n:`%{ ... %} {? % @scope }` declares the enclosed names as implicit, maximally inserted. :n:`%{@name__1 @name__2 ... %}%@scope` is equivalent to :n:`%{@name__1%}%@scope %{@name__2%}%@scope ...` `!` the function will be unfolded only if all the arguments marked with `!` evaluate to constructors. See :ref:`Args_effect_on_unfolding`. :n:`@name {? % @scope }` a *formal parameter* of the function :n:`@reference` (i.e. the parameter name used in the function definition). Unless `rename` is specified, the list of :n:`@name`\s must be a prefix of the formal parameters, including all implicit arguments. `_` can be used to skip over a formal parameter. The construct :n:`@name {? % @scope }` declares :n:`@name` as non-implicit if `clear implicits` is specified or at least one other name is declared implicit in the same list of :n:`@name`\s. :token:`scope` can be either a scope name or its delimiting key. See :ref:`binding_to_scope`. .. exn:: To rename arguments the 'rename' flag must be specified. :undocumented: .. exn:: Flag 'rename' expected to rename @name into @name. :undocumented: .. exn:: Arguments of section variables such as @name may not be renamed. :undocumented: `clear implicits` makes all implicit arguments into explicit arguments .. exn:: The 'clear implicits' flag must be omitted if implicit annotations are given. :undocumented: `default implicits` automatically determine the implicit arguments of the object. See :ref:`auto_decl_implicit_args`. .. exn:: The 'default implicits' flag is incompatible with implicit annotations. :undocumented: `rename` rename implicit arguments for the object. See the example :ref:`here `. `assert` assert that the object has the expected number of arguments with the expected names. See the example here: :ref:`renaming_implicit_arguments`. .. warn:: This command is just asserting the names of arguments of @qualid. If this is what you want, add ': assert' to silence the warning. If you want to clear implicit arguments, add ': clear implicits'. If you want to clear notation scopes, add ': clear scopes' :undocumented: `clear scopes` clears argument scopes of :n:`@reference` `extra scopes` defines extra argument scopes, to be used in case of coercion to ``Funclass`` (see :ref:`coercions`) or with a computed type. `simpl nomatch` prevents performing a simplification step for :n:`@reference` that would expose a match construct in the head position. See :ref:`Args_effect_on_unfolding`. `simpl never` prevents performing a simplification step for :n:`@reference`. See :ref:`Args_effect_on_unfolding`. `clear bidirectionality hint` removes the bidirectionality hint, the `&` :n:`@implicits_alt` use to specify alternative implicit argument declarations for functions that can only be applied to a fixed number of arguments (excluding, for instance, functions whose type is polymorphic). For parsing, the longest list of implicit arguments matching the function application is used to select which implicit arguments are inserted. For printing, the alternative with the most implicit arguments is used; the implict arguments will be omitted if :flag:`Printing Implicit` is not set. See the example :ref:`here`. .. todo the above feature seems a bit unnatural and doesn't play well with partial application. See https://github.com/coq/coq/pull/11718#discussion_r408841762 Use :cmd:`About` to view the current implicit arguments setting for a :token:`reference`. Or use the :cmd:`Print Implicit` command to see the implicit arguments of an object (see :ref:`displaying-implicit-args`). Manual declaration of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. example:: .. coqtop:: reset all Inductive list (A : Type) : Type := | nil : list A | cons : A -> list A -> list A. Check (cons nat 3 (nil nat)). Arguments cons [A] _ _. Arguments nil {A}. Check (cons 3 nil). Fixpoint map (A B : Type) (f : A -> B) (l : list A) : list B := match l with nil => nil | cons a t => cons (f a) (map A B f t) end. Fixpoint length (A : Type) (l : list A) : nat := match l with nil => 0 | cons _ m => S (length A m) end. Arguments map [A B] f l. Arguments length {A} l. (* A has to be maximally inserted *) Check (fun l:list (list nat) => map length l). .. _example_more_implicits: .. example:: Multiple alternatives with :n:`@implicits_alt` .. coqtop:: all Arguments map [A B] f l, [A] B f l, A B f l. Check (fun l => map length l = map (list nat) nat length l). .. _auto_decl_implicit_args: Automatic declaration of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ":n:`default implicits`" :token:`args_modifier` clause tells Coq to automatically determine the implicit arguments of the object. Auto-detection is governed by flags specifying whether strict, contextual, or reversible-pattern implicit arguments must be considered or not (see :ref:`controlling-strict-implicit-args`, :ref:`controlling-contextual-implicit-args`, :ref:`controlling-rev-pattern-implicit-args` and also :ref:`controlling-insertion-implicit-args`). .. example:: Default implicits .. coqtop:: reset all Inductive list (A:Set) : Set := | nil : list A | cons : A -> list A -> list A. Arguments cons : default implicits. Print Implicit cons. Arguments nil : default implicits. Print Implicit nil. Set Contextual Implicit. Arguments nil : default implicits. Print Implicit nil. The computation of implicit arguments takes account of the unfolding of :term:`constants `. For instance, the variable ``p`` below has type ``(Transitivity R)`` which is reducible to ``forall x,y:U, R x y -> forall z:U, R y z -> R x z``. As the variables ``x``, ``y`` and ``z`` appear strictly in the :term:`body` of the type, they are implicit. .. coqtop:: all Parameter X : Type. Definition Relation := X -> X -> Prop. Definition Transitivity (R:Relation) := forall x y:X, R x y -> forall z:X, R y z -> R x z. Parameters (R : Relation) (p : Transitivity R). Arguments p : default implicits. Print p. Print Implicit p. Parameters (a b c : X) (r1 : R a b) (r2 : R b c). Check (p r1 r2). .. _renaming_implicit_arguments: Renaming implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. example:: (continued) Renaming implicit arguments .. coqtop:: all Arguments p [s t] _ [u] _: rename. Check (p r1 (u:=c)). Check (p (s:=a) (t:=b) r1 (u:=c) r2). Fail Arguments p [s t] _ [w] _ : assert. .. _binding_to_scope: Binding arguments to a scope ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following command declares that the first two arguments of :g:`plus_fct` are in the :token:`scope` delimited by the key ``F`` (``Rfun_scope``) and the third argument is in the scope delimited by the key ``R`` (``R_scope``). .. coqdoc:: Arguments plus_fct (f1 f2)%F x%R. When interpreting a term, if some of the arguments of :token:`reference` are built from a notation, then this notation is interpreted in the scope stack extended by the scope bound (if any) to this argument. The effect of the scope is limited to the argument itself. It does not propagate to subterms but the subterms that, after interpretation of the notation, turn to be themselves arguments of a reference are interpreted accordingly to the argument scopes bound to this reference. .. note:: In notations, the subterms matching the identifiers of the notations are interpreted in the scope in which the identifiers occurred at the time of the declaration of the notation. Here is an example: .. coqtop:: all Parameter g : bool -> bool. Declare Scope mybool_scope. Notation "@@" := true (only parsing) : bool_scope. Notation "@@" := false (only parsing): mybool_scope. Bind Scope bool_scope with bool. Notation "# x #" := (g x) (at level 40). Check # @@ #. Arguments g _%mybool_scope. Check # @@ #. Delimit Scope mybool_scope with mybool. Check # @@%mybool #. .. _Args_effect_on_unfolding: Effects of :cmd:`Arguments` on unfolding ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + `simpl never` indicates that a :term:`constant` should never be unfolded by :tacn:`cbn`, :tacn:`simpl` or :tacn:`hnf`: .. example:: .. coqtop:: all Arguments minus n m : simpl never. After that command an expression like :g:`(minus (S x) y)` is left untouched by the tactics :tacn:`cbn` and :tacn:`simpl`. + A :term:`constant` can be marked to be unfolded only if it's applied to at least the arguments appearing before the `/` in a :cmd:`Arguments` command. .. example:: .. coqtop:: all Definition fcomp A B C f (g : A -> B) (x : A) : C := f (g x). Arguments fcomp {A B C} f g x /. Notation "f \o g" := (fcomp f g) (at level 50). After that command the expression :g:`(f \o g)` is left untouched by :tacn:`simpl` while :g:`((f \o g) t)` is reduced to :g:`(f (g t))`. The same mechanism can be used to make a :term:`constant` volatile, i.e. always unfolded. .. example:: .. coqtop:: all Definition volatile := fun x : nat => x. Arguments volatile / x. + A :term:`constant` can be marked to be unfolded only if an entire set of arguments evaluates to a constructor. The ``!`` symbol can be used to mark such arguments. .. example:: .. coqtop:: all Arguments minus !n !m. After that command, the expression :g:`(minus (S x) y)` is left untouched by :tacn:`simpl`, while :g:`(minus (S x) (S y))` is reduced to :g:`(minus x y)`. + `simpl nomatch` indicates that a :term:`constant` should not be unfolded if it would expose a `match` construct in the head position. This affects the :tacn:`cbn`, :tacn:`simpl` and :tacn:`hnf` tactics. .. example:: .. coqtop:: all Arguments minus n m : simpl nomatch. In this case, :g:`(minus (S (S x)) (S y))` is simplified to :g:`(minus (S x) y)` even if an extra simplification is possible. In detail: the tactic :tacn:`simpl` first applies βι-reduction. Then, it expands transparent :term:`constants ` and tries to reduce further using βι-reduction. But, when no ι rule is applied after unfolding then δ-reductions are not applied. For instance trying to use :tacn:`simpl` on :g:`(plus n O) = n` changes nothing. .. _bidirectionality_hints: Bidirectionality hints ~~~~~~~~~~~~~~~~~~~~~~ When type-checking an application, Coq normally does not use information from the context to infer the types of the arguments. It only checks after the fact that the type inferred for the application is coherent with the expected type. Bidirectionality hints make it possible to specify that after type-checking the first arguments of an application, typing information should be propagated from the context to help inferring the types of the remaining arguments. .. todo the following text is a start on better wording but not quite complete. See https://github.com/coq/coq/pull/11718#discussion_r410219992 .. Two common methods to determine the type of a construct are: * *type checking*, which is verifying that a construct matches a known type, and * *type inference*, with is inferring the type of a construct by analyzing the construct. Methods that combine these approaches are known as *bidirectional typing*. Coq normally uses only the first approach to infer the types of arguments, then later verifies that the inferred type is consistent with the expected type. *Bidirectionality hints* specify to use both methods: after type checking the first arguments of an application (appearing before the `&` in :cmd:`Arguments`), typing information from them is propagated to the remaining arguments to help infer their types. An :cmd:`Arguments` command containing :n:`@arg_specs__1 & @arg_specs__2` provides bidirectionality hints. It tells the typechecking algorithm, when type checking applications of :n:`@qualid`, to first type check the arguments in :n:`@arg_specs__1` and then propagate information from the typing context to type check the remaining arguments (in :n:`@arg_specs__2`). .. example:: Bidirectionality hints In a context where a coercion was declared from ``bool`` to ``nat``: .. coqtop:: in reset Definition b2n (b : bool) := if b then 1 else 0. Coercion b2n : bool >-> nat. Coq cannot automatically coerce existential statements over ``bool`` to statements over ``nat``, because the need for inserting a coercion is known only from the expected type of a subterm: .. coqtop:: all Fail Check (ex_intro _ true _ : exists n : nat, n > 0). However, a suitable bidirectionality hint makes the example work: .. coqtop:: all Arguments ex_intro _ _ & _ _. Check (ex_intro _ true _ : exists n : nat, n > 0). Coq will attempt to produce a term which uses the arguments you provided, but in some cases involving Program mode the arguments after the bidirectionality starts may be replaced by convertible but syntactically different terms. coq-8.15.0/doc/sphinx/language/extensions/canonical.rst000066400000000000000000000503151417001151100231030ustar00rootroot00000000000000.. _canonicalstructures: Canonical Structures ====================== :Authors: Assia Mahboubi and Enrico Tassi This chapter explains the basics of canonical structures and how they can be used to overload notations and build a hierarchy of algebraic structures. The examples are taken from :cite:`CSwcu`. We invite the interested reader to refer to this paper for all the details that are omitted here for brevity. The interested reader shall also find in :cite:`CSlessadhoc` a detailed description of another, complementary, use of canonical structures: advanced proof search. This latter papers also presents many techniques one can employ to tune the inference of canonical structures. .. extracted from implicit arguments section .. _canonical-structure-declaration: Declaration of canonical structures ----------------------------------- A canonical structure is an instance of a record/structure type that can be used to solve unification problems involving a projection applied to an unknown structure instance (an implicit argument) and a value. The complete documentation of canonical structures can be found in :ref:`canonicalstructures`; here only a simple example is given. .. cmd:: Canonical {? Structure } @reference Canonical {? Structure } @ident_decl @def_body :name: Canonical Structure; _ The first form of this command declares an existing :n:`@reference` as a canonical instance of a structure (a record). The second form defines a new :term:`constant` as if the :cmd:`Definition` command had been used, then declares it as a canonical instance as if the first form had been used on the defined object. This command supports the :attr:`local` attribute. When used, the structure is canonical only within the :cmd:`Section` containing it. :token:`qualid` (in :token:`reference`) denotes an object :n:`(Build_struct c__1 … c__n)` in the structure :g:`struct` for which the fields are :n:`x__1, …, x__n`. Then, each time an equation of the form :n:`(x__i _)` |eq_beta_delta_iota_zeta| :n:`c__i` has to be solved during the type checking process, :token:`qualid` is used as a solution. Otherwise said, :token:`qualid` is canonically used to extend the field :n:`x__i` into a complete structure built on :n:`c__i` when :n:`c__i` unifies with :n:`(x__i _)`. The following kinds of terms are supported for the fields :n:`c__i` of :token:`qualid`: * :term:`Constants ` and section variables of an active section, applied to zero or more arguments. * :token:`sort`\s. * Literal functions: `fun … => …`. * Literal, (possibly dependent) function types: `… -> …` and `forall …, …`. * Variables bound in :token:`qualid`. Only the head symbol of an existing instance's field :n:`c__i` is considered when searching for a canonical extension. We call this head symbol the *key* and we say ":token:`qualid` *keys* the field :n:`x__i` to :n:`k`" when :n:`c__i`'s head symbol is :n:`k`. Keys are the only piece of information that is used for canonical extension. The keys corresponding to the kinds of terms listed above are: * For constants and section variables, potentially applied to arguments: the constant or variable itself, disregarding any arguments. * For sorts: the sort itself. * For literal functions: skip the abstractions and use the key of the body. * For literal function types: a disembodied implication key denoted `forall _, _`, disregarding both its domain and codomain. * For variables bound in :token:`qualid`: a catch-all key denoted `_`. This means that, for example, `(some_constant x1)` and `(some_constant (other_constant y1 y2) x2)` are not distinct keys. Variables bound in :token:`qualid` match any term for the purpose of canonical extension. This has two major consequences for a field :n:`c__i` keyed to a variable of :token:`qualid`: 1. Unless another key—and, thus, instance—matches :n:`c__i`, the instance will always be considered by unification. 2. :n:`c__i` will be considered overlapping not distinct from any other canonical instance that keys :n:`x__i` to one of its own variables. A record field :n:`x__i` can only be keyed once to each key. Coq prints a warning when :token:`qualid` keys :n:`x__i` to a term whose head symbol is already keyed by an existing canonical instance. In this case, Coq will not register that :token:`qualid` as a canonical extension. (The remaining fields of the instance can still be used for canonical extension.) Canonical structures are particularly useful when mixed with coercions and strict implicit arguments. .. example:: Here is an example. .. coqtop:: all reset Require Import Relations. Require Import EqNat. Set Implicit Arguments. Unset Strict Implicit. Structure Setoid : Type := {Carrier :> Set; Equal : relation Carrier; Prf_equiv : equivalence Carrier Equal}. Definition is_law (A B:Setoid) (f:A -> B) := forall x y:A, Equal x y -> Equal (f x) (f y). Axiom eq_nat_equiv : equivalence nat eq_nat. Definition nat_setoid : Setoid := Build_Setoid eq_nat_equiv. Canonical nat_setoid. Thanks to :g:`nat_setoid` declared as canonical, the implicit arguments :g:`A` and :g:`B` can be synthesized in the next statement. .. coqtop:: all abort Lemma is_law_S : is_law S. .. note:: If a same field occurs in several canonical structures, then only the structure declared first as canonical is considered. .. attr:: canonical{? = {| yes | no } } :name: canonical This :term:`boolean attribute` can decorate a :cmd:`Definition` or :cmd:`Let` command. It is equivalent to having a :cmd:`Canonical Structure` declaration just after the command. To prevent a field from being involved in the inference of canonical instances, its declaration can be annotated with ``canonical=no`` (cf. the syntax of :n:`@record_field`). .. example:: For instance, when declaring the :g:`Setoid` structure above, the :g:`Prf_equiv` field declaration could be written as follows. .. coqdoc:: #[canonical=no] Prf_equiv : equivalence Carrier Equal See :ref:`hierarchy_of_structures` for a more realistic example. .. cmd:: Print Canonical Projections {* @reference } This displays the list of global names that are components of some canonical structure. For each of them, the canonical structure of which it is a projection is indicated. If :term:`constants ` are given as its arguments, only the unification rules that involve or are synthesized from simultaneously all given constants will be shown. .. example:: For instance, the above example gives the following output: .. coqtop:: all Print Canonical Projections. .. coqtop:: all Print Canonical Projections nat. .. note:: The last line in the first example would not show up if the corresponding projection (namely :g:`Prf_equiv`) were annotated as not canonical, as described above. Notation overloading ------------------------- We build an infix notation == for a comparison predicate. Such notation will be overloaded, and its meaning will depend on the types of the terms that are compared. .. coqtop:: all reset Module EQ. Record class (T : Type) := Class { cmp : T -> T -> Prop }. Structure type := Pack { obj : Type; class_of : class obj }. Definition op (e : type) : obj e -> obj e -> Prop := let 'Pack _ (Class _ the_cmp) := e in the_cmp. Check op. Arguments op {e} x y : simpl never. Arguments Class {T} cmp. Module theory. Notation "x == y" := (op x y) (at level 70). End theory. End EQ. We use Coq modules as namespaces. This allows us to follow the same pattern and naming convention for the rest of the chapter. The base namespace contains the definitions of the algebraic structure. To keep the example small, the algebraic structure ``EQ.type`` we are defining is very simplistic, and characterizes terms on which a binary relation is defined, without requiring such relation to validate any property. The inner theory module contains the overloaded notation ``==`` and will eventually contain lemmas holding all the instances of the algebraic structure (in this case there are no lemmas). Note that in practice the user may want to declare ``EQ.obj`` as a coercion, but we will not do that here. The following line tests that, when we assume a type ``e`` that is in theEQ class, we can relate two of its objects with ``==``. .. coqtop:: all Import EQ.theory. Check forall (e : EQ.type) (a b : EQ.obj e), a == b. Still, no concrete type is in the ``EQ`` class. .. coqtop:: all Fail Check 3 == 3. We amend that by equipping ``nat`` with a comparison relation. .. coqtop:: all Definition nat_eq (x y : nat) := Nat.compare x y = Eq. Definition nat_EQcl : EQ.class nat := EQ.Class nat_eq. Canonical Structure nat_EQty : EQ.type := EQ.Pack nat nat_EQcl. Check 3 == 3. Eval compute in 3 == 4. This last test shows that Coq is now not only able to type check ``3 == 3``, but also that the infix relation was bound to the ``nat_eq`` relation. This relation is selected whenever ``==`` is used on terms of type nat. This can be read in the line declaring the canonical structure ``nat_EQty``, where the first argument to ``Pack`` is the key and its second argument a group of canonical values associated with the key. In this case we associate with nat only one canonical value (since its class, ``nat_EQcl`` has just one member). The use of the projection ``op`` requires its argument to be in the class ``EQ``, and uses such a member (function) to actually compare its arguments. Similarly, we could equip any other type with a comparison relation, and use the ``==`` notation on terms of this type. Derived Canonical Structures ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We know how to use ``==`` on base types, like ``nat``, ``bool``, ``Z``. Here we show how to deal with type constructors, i.e. how to make the following example work: .. coqtop:: all Fail Check forall (e : EQ.type) (a b : EQ.obj e), (a, b) == (a, b). The error message is telling that Coq has no idea on how to compare pairs of objects. The following construction is telling Coq exactly how to do that. .. coqtop:: all Definition pair_eq (e1 e2 : EQ.type) (x y : EQ.obj e1 * EQ.obj e2) := fst x == fst y /\ snd x == snd y. Definition pair_EQcl e1 e2 := EQ.Class (pair_eq e1 e2). Canonical Structure pair_EQty (e1 e2 : EQ.type) : EQ.type := EQ.Pack (EQ.obj e1 * EQ.obj e2) (pair_EQcl e1 e2). Check forall (e : EQ.type) (a b : EQ.obj e), (a, b) == (a, b). Check forall n m : nat, (3, 4) == (n, m). Thanks to the ``pair_EQty`` declaration, Coq is able to build a comparison relation for pairs whenever it is able to build a comparison relation for each component of the pair. The declaration associates to the key ``*`` (the type constructor of pairs) the canonical comparison relation ``pair_eq`` whenever the type constructor ``*`` is applied to two types being themselves in the ``EQ`` class. .. _hierarchy_of_structures: Hierarchy of structures ---------------------------- To get to an interesting example we need another base class to be available. We choose the class of types that are equipped with an order relation, to which we associate the infix ``<=`` notation. .. coqtop:: all Module LE. Record class T := Class { cmp : T -> T -> Prop }. Structure type := Pack { obj : Type; class_of : class obj }. Definition op (e : type) : obj e -> obj e -> Prop := let 'Pack _ (Class _ f) := e in f. Arguments op {_} x y : simpl never. Arguments Class {T} cmp. Module theory. Notation "x <= y" := (op x y) (at level 70). End theory. End LE. As before we register a canonical ``LE`` class for ``nat``. .. coqtop:: all Import LE.theory. Definition nat_le x y := Nat.compare x y <> Gt. Definition nat_LEcl : LE.class nat := LE.Class nat_le. Canonical Structure nat_LEty : LE.type := LE.Pack nat nat_LEcl. And we enable Coq to relate pair of terms with ``<=``. .. coqtop:: all Definition pair_le e1 e2 (x y : LE.obj e1 * LE.obj e2) := fst x <= fst y /\ snd x <= snd y. Definition pair_LEcl e1 e2 := LE.Class (pair_le e1 e2). Canonical Structure pair_LEty (e1 e2 : LE.type) : LE.type := LE.Pack (LE.obj e1 * LE.obj e2) (pair_LEcl e1 e2). Check (3,4,5) <= (3,4,5). At the current stage we can use ``==`` and ``<=`` on concrete types, like tuples of natural numbers, but we can’t develop an algebraic theory over the types that are equipped with both relations. .. coqtop:: all Check 2 <= 3 /\ 2 == 2. Fail Check forall (e : EQ.type) (x y : EQ.obj e), x <= y -> y <= x -> x == y. Fail Check forall (e : LE.type) (x y : LE.obj e), x <= y -> y <= x -> x == y. We need to define a new class that inherits from both ``EQ`` and ``LE``. .. coqtop:: all Module LEQ. Record mixin (e : EQ.type) (le : EQ.obj e -> EQ.obj e -> Prop) := Mixin { compat : forall x y : EQ.obj e, le x y /\ le y x <-> x == y }. Record class T := Class { EQ_class : EQ.class T; LE_class : LE.class T; extra : mixin (EQ.Pack T EQ_class) (LE.cmp T LE_class) }. Structure type := _Pack { obj : Type; #[canonical=no] class_of : class obj }. Arguments Mixin {e le} _. Arguments Class {T} _ _ _. The mixin component of the ``LEQ`` class contains all the extra content we are adding to ``EQ`` and ``LE``. In particular it contains the requirement that the two relations we are combining are compatible. The `class_of` projection of the `type` structure is annotated as *not canonical*; it plays no role in the search for instances. Unfortunately there is still an obstacle to developing the algebraic theory of this new class. .. coqtop:: all Module theory. Fail Check forall (le : type) (n m : obj le), n <= m -> n <= m -> n == m. The problem is that the two classes ``LE`` and ``LEQ`` are not yet related by a subclass relation. In other words Coq does not see that an object of the ``LEQ`` class is also an object of the ``LE`` class. The following two constructions tell Coq how to canonically build the ``LE.type`` and ``EQ.type`` structure given an ``LEQ.type`` structure on the same type. .. coqtop:: all Definition to_EQ (e : type) : EQ.type := EQ.Pack (obj e) (EQ_class _ (class_of e)). Canonical Structure to_EQ. Definition to_LE (e : type) : LE.type := LE.Pack (obj e) (LE_class _ (class_of e)). Canonical Structure to_LE. We can now formulate out first theorem on the objects of the ``LEQ`` structure. .. coqtop:: all Lemma lele_eq (e : type) (x y : obj e) : x <= y -> y <= x -> x == y. now intros; apply (compat _ _ (extra _ (class_of e)) x y); split. Qed. Arguments lele_eq {e} x y _ _. End theory. End LEQ. Import LEQ.theory. Check lele_eq. Of course one would like to apply results proved in the algebraic setting to any concrete instate of the algebraic structure. .. coqtop:: all Example test_algebraic (n m : nat) : n <= m -> m <= n -> n == m. Fail apply (lele_eq n m). Abort. Example test_algebraic2 (l1 l2 : LEQ.type) (n m : LEQ.obj l1 * LEQ.obj l2) : n <= m -> m <= n -> n == m. Fail apply (lele_eq n m). Abort. Again one has to tell Coq that the type ``nat`` is in the ``LEQ`` class, and how the type constructor ``*`` interacts with the ``LEQ`` class. In the following proofs are omitted for brevity. .. coqtop:: all Lemma nat_LEQ_compat (n m : nat) : n <= m /\ m <= n <-> n == m. Admitted. Definition nat_LEQmx := LEQ.Mixin nat_LEQ_compat. Lemma pair_LEQ_compat (l1 l2 : LEQ.type) (n m : LEQ.obj l1 * LEQ.obj l2) : n <= m /\ m <= n <-> n == m. Admitted. Definition pair_LEQmx l1 l2 := LEQ.Mixin (pair_LEQ_compat l1 l2). The following script registers an ``LEQ`` class for ``nat`` and for the type constructor ``*``. It also tests that they work as expected. Unfortunately, these declarations are very verbose. In the following subsection we show how to make them more compact. .. coqtop:: all Module Add_instance_attempt. Canonical Structure nat_LEQty : LEQ.type := LEQ._Pack nat (LEQ.Class nat_EQcl nat_LEcl nat_LEQmx). Canonical Structure pair_LEQty (l1 l2 : LEQ.type) : LEQ.type := LEQ._Pack (LEQ.obj l1 * LEQ.obj l2) (LEQ.Class (EQ.class_of (pair_EQty (to_EQ l1) (to_EQ l2))) (LE.class_of (pair_LEty (to_LE l1) (to_LE l2))) (pair_LEQmx l1 l2)). Example test_algebraic (n m : nat) : n <= m -> m <= n -> n == m. now apply (lele_eq n m). Qed. Example test_algebraic2 (n m : nat * nat) : n <= m -> m <= n -> n == m. now apply (lele_eq n m). Qed. End Add_instance_attempt. Note that no direct proof of ``n <= m -> m <= n -> n == m`` is provided by the user for ``n`` and m of type ``nat * nat``. What the user provides is a proof of this statement for ``n`` and ``m`` of type ``nat`` and a proof that the pair constructor preserves this property. The combination of these two facts is a simple form of proof search that Coq performs automatically while inferring canonical structures. Compact declaration of Canonical Structures ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need some infrastructure for that. .. coqtop:: all Require Import Strings.String. Module infrastructure. Inductive phantom {T : Type} (t : T) : Type := Phantom. Definition unify {T1 T2} (t1 : T1) (t2 : T2) (s : option string) := phantom t1 -> phantom t2. Definition id {T} {t : T} (x : phantom t) := x. Notation "[find v | t1 ~ t2 ] p" := (fun v (_ : unify t1 t2 None) => p) (at level 50, v name, only parsing). Notation "[find v | t1 ~ t2 | s ] p" := (fun v (_ : unify t1 t2 (Some s)) => p) (at level 50, v name, only parsing). Notation "'Error : t : s" := (unify _ t (Some s)) (at level 50, format "''Error' : t : s"). Open Scope string_scope. End infrastructure. To explain the notation ``[find v | t1 ~ t2]`` let us pick one of its instances: ``[find e | EQ.obj e ~ T | "is not an EQ.type" ]``. It should be read as: “find a class e such that its objects have type T or fail with message "T is not an EQ.type"”. The other utilities are used to ask Coq to solve a specific unification problem, that will in turn require the inference of some canonical structures. They are explained in more details in :cite:`CSwcu`. We now have all we need to create a compact “packager” to declare instances of the ``LEQ`` class. .. coqtop:: all Import infrastructure. Definition packager T e0 le0 (m0 : LEQ.mixin e0 le0) := [find e | EQ.obj e ~ T | "is not an EQ.type" ] [find o | LE.obj o ~ T | "is not an LE.type" ] [find ce | EQ.class_of e ~ ce ] [find co | LE.class_of o ~ co ] [find m | m ~ m0 | "is not the right mixin" ] LEQ._Pack T (LEQ.Class ce co m). Notation Pack T m := (packager T _ _ m _ id _ id _ id _ id _ id). The object ``Pack`` takes a type ``T`` (the key) and a mixin ``m``. It infers all the other pieces of the class ``LEQ`` and declares them as canonical values associated with the ``T`` key. All in all, the only new piece of information we add in the ``LEQ`` class is the mixin, all the rest is already canonical for ``T`` and hence can be inferred by Coq. ``Pack`` is a notation, hence it is not type checked at the time of its declaration. It will be type checked when it is used, an in that case ``T`` is going to be a concrete type. The odd arguments ``_`` and ``id`` we pass to the packager represent respectively the classes to be inferred (like ``e``, ``o``, etc) and a token (``id``) to force their inference. Again, for all the details the reader can refer to :cite:`CSwcu`. The declaration of canonical instances can now be way more compact: .. coqtop:: all Canonical Structure nat_LEQty := Eval hnf in Pack nat nat_LEQmx. Canonical Structure pair_LEQty (l1 l2 : LEQ.type) := Eval hnf in Pack (LEQ.obj l1 * LEQ.obj l2) (pair_LEQmx l1 l2). Error messages are also quite intelligible (if one skips to the end of the message). .. coqtop:: all Fail Canonical Structure err := Eval hnf in Pack bool nat_LEQmx. coq-8.15.0/doc/sphinx/language/extensions/evars.rst000066400000000000000000000103601417001151100222700ustar00rootroot00000000000000.. extracted from Gallina extensions chapter .. _existential-variables: Existential variables --------------------- :gdef:`Existential variables ` represent as yet unknown values. .. insertprodn term_evar term_evar .. prodn:: term_evar ::= _ | ?[ @ident ] | ?[ ?@ident ] | ?@ident {? @%{ {+; @ident := @term } %} } Coq terms can include existential variables that represent unknown subterms that are eventually replaced with actual subterms. Existential variables are generated in place of unsolved implicit arguments or “_” placeholders when using commands such as ``Check`` (see Section :ref:`requests-to-the-environment`) or when using tactics such as :tacn:`refine`, as well as in place of unsolved instances when using tactics such that :tacn:`eapply`. An existential variable is defined in a context, which is the context of variables of the placeholder which generated the existential variable, and a type, which is the expected type of the placeholder. As a consequence of typing constraints, existential variables can be duplicated in such a way that they possibly appear in different contexts than their defining context. Thus, any occurrence of a given existential variable comes with an instance of its original context. In the simple case, when an existential variable denotes the placeholder which generated it, or is used in the same context as the one in which it was generated, the context is not displayed and the existential variable is represented by “?” followed by an identifier. .. coqtop:: all Parameter identity : forall (X:Set), X -> X. Check identity _ _. Check identity _ (fun x => _). In the general case, when an existential variable :n:`?@ident` appears outside its context of definition, its instance, written in the form :n:`{ {*; @ident := @term} }`, is appended to its name, indicating how the variables of its defining context are instantiated. Only the variables that are defined in another context are displayed: this is why an existential variable used in the same context as its context of definition is written with no instance. This behavior may be changed: see :ref:`explicit-display-existentials`. .. coqtop:: all Check (fun x y => _) 0 1. Existential variables can be named by the user upon creation using the syntax :n:`?[@ident]`. This is useful when the existential variable needs to be explicitly handled later in the script (e.g. with a named-goal selector, see :ref:`goal-selectors`). .. extracted from Gallina chapter .. index:: _ Inferable subterms ~~~~~~~~~~~~~~~~~~ Expressions often contain redundant pieces of information. Subterms that can be automatically inferred by Coq can be replaced by the symbol ``_`` and Coq will guess the missing piece of information. .. extracted from Gallina extensions chapter .. _explicit-display-existentials: Explicit displaying of existential instances for pretty-printing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. flag:: Printing Existential Instances This :term:`flag` (off by default) activates the full display of how the context of an existential variable is instantiated at each of the occurrences of the existential variable. .. coqtop:: all Check (fun x y => _) 0 1. Set Printing Existential Instances. Check (fun x y => _) 0 1. .. _tactics-in-terms: Solving existential variables using tactics ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Instead of letting the unification engine try to solve an existential variable by itself, one can also provide an explicit hole together with a tactic to solve it. Using the syntax ``ltac:(``\ `tacexpr`\ ``)``, the user can put a tactic anywhere a term is expected. The order of resolution is not specified and is implementation-dependent. The inner tactic may use any variable defined in its scope, including repeated alternations between variables introduced by term binding as well as those introduced by tactic binding. The expression `tacexpr` can be any tactic expression as described in :ref:`ltac`. .. coqtop:: all Definition foo (x : nat) : nat := ltac:(exact x). This construction is useful when one wants to define complicated terms using highly automated tactics without resorting to writing the proof-term by means of the interactive proof engine. coq-8.15.0/doc/sphinx/language/extensions/implicit-arguments.rst000066400000000000000000000530731417001151100247750ustar00rootroot00000000000000.. _ImplicitArguments: Implicit arguments ------------------ An implicit argument of a function is an argument which can be inferred from contextual knowledge. There are different kinds of implicit arguments that can be considered implicit in different ways. There are also various commands to control the setting or the inference of implicit arguments. The different kinds of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Implicit arguments inferable from the knowledge of other arguments of a function ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ The first kind of implicit arguments covers the arguments that are inferable from the knowledge of the type of other arguments of the function, or of the type of the surrounding context of the application. Especially, such implicit arguments correspond to parameters dependent in the type of the function. Typical implicit arguments are the type arguments in polymorphic functions. There are several kinds of such implicit arguments. **Strict Implicit Arguments** An implicit argument can be either strict or non-strict. An implicit argument is said to be *strict* if, whatever the other arguments of the function are, it is still inferable from the type of some other argument. Technically, an implicit argument is strict if it corresponds to a parameter which is not applied to a variable which itself is another parameter of the function (since this parameter may erase its arguments), not in the body of a match, and not itself applied or matched against patterns (since the original form of the argument can be lost by reduction). For instance, the first argument of :: cons: forall A:Set, A -> list A -> list A in module ``List.v`` is strict because :g:`list` is an inductive type and :g:`A` will always be inferable from the type :g:`list A` of the third argument of :g:`cons`. Also, the first argument of :g:`cons` is strict with respect to the second one, since the first argument is exactly the type of the second argument. On the contrary, the second argument of a term of type :: forall P:nat->Prop, forall n:nat, P n -> ex nat P is implicit but not strict, since it can only be inferred from the type :g:`P n` of the third argument and if :g:`P` is, e.g., :g:`fun _ => True`, it reduces to an expression where ``n`` does not occur any longer. The first argument :g:`P` is implicit but not strict either because it can only be inferred from :g:`P n` and :g:`P` is not canonically inferable from an arbitrary :g:`n` and the normal form of :g:`P n`. Consider, e.g., that :g:`n` is :math:`0` and the third argument has type :g:`True`, then any :g:`P` of the form :: fun n => match n with 0 => True | _ => anything end would be a solution of the inference problem. **Contextual Implicit Arguments** An implicit argument can be *contextual* or not. An implicit argument is said to be *contextual* if it can be inferred only from the knowledge of the type of the context of the current expression. For instance, the only argument of:: nil : forall A:Set, list A is contextual. Similarly, both arguments of a term of type:: forall P:nat->Prop, forall n:nat, P n \/ n = 0 are contextual (moreover, :g:`n` is strict and :g:`P` is not). **Reversible-Pattern Implicit Arguments** There is another class of implicit arguments that can be reinferred unambiguously if all the types of the remaining arguments are known. This is the class of implicit arguments occurring in the type of another argument in position of reversible pattern, which means it is at the head of an application but applied only to uninstantiated distinct variables. Such an implicit argument is called *reversible- pattern implicit argument*. A typical example is the argument :g:`P` of nat_rec in :: nat_rec : forall P : nat -> Set, P 0 -> (forall n : nat, P n -> P (S n)) -> forall x : nat, P x (:g:`P` is reinferable by abstracting over :g:`n` in the type :g:`P n`). See :ref:`controlling-rev-pattern-implicit-args` for the automatic declaration of reversible-pattern implicit arguments. Implicit arguments inferable by resolution ++++++++++++++++++++++++++++++++++++++++++ This corresponds to a class of non-dependent implicit arguments that are solved based on the structure of their type only. Maximal and non-maximal insertion of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When a function is partially applied and the next argument to apply is an implicit argument, the application can be interpreted in two ways. If the next argument is declared as *maximally inserted*, the partial application will include that argument. Otherwise, the argument is *non-maximally inserted* and the partial application will not include that argument. Each implicit argument can be declared to be inserted maximally or non maximally. In Coq, maximally inserted implicit arguments are written between curly braces "{ }" and non-maximally inserted implicit arguments are written in square brackets "[ ]". .. seealso:: :flag:`Maximal Implicit Insertion` Trailing Implicit Arguments +++++++++++++++++++++++++++ An implicit argument is considered *trailing* when all following arguments are implicit. Trailing implicit arguments must be declared as maximally inserted; otherwise they would never be inserted. .. exn:: Argument @name is a trailing implicit, so it can't be declared non maximal. Please use %{ %} instead of [ ]. For instance: .. coqtop:: all fail Fail Definition double [n] := n + n. Casual use of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If an argument of a function application can be inferred from the type of the other arguments, the user can force inference of the argument by replacing it with `_`. .. exn:: Cannot infer a term for this placeholder. :name: Cannot infer a term for this placeholder. (Casual use of implicit arguments) Coq was not able to deduce an instantiation of a “_”. .. _declare-implicit-args: Declaration of implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Implicit arguments can be declared when a function is declared or afterwards, using the :cmd:`Arguments` command. Implicit Argument Binders +++++++++++++++++++++++++ .. insertprodn implicit_binders implicit_binders .. prodn:: implicit_binders ::= %{ {+ @name } {? : @type } %} | [ {+ @name } {? : @type } ] In the context of a function definition, these forms specify that :token:`name` is an implicit argument. The first form, with curly braces, makes :token:`name` a maximally inserted implicit argument. The second form, with square brackets, makes :token:`name` a non-maximally inserted implicit argument. For example: .. coqtop:: all Definition id {A : Type} (x : A) : A := x. declares the argument `A` of `id` as a maximally inserted implicit argument. `A` may be omitted in applications of `id` but may be specified if needed: .. coqtop:: all Definition compose {A B C} (g : B -> C) (f : A -> B) := fun x => g (f x). Goal forall A, compose id id = id (A:=A). For non-maximally inserted implicit arguments, use square brackets: .. coqtop:: all Fixpoint map [A B : Type] (f : A -> B) (l : list A) : list B := match l with | nil => nil | cons a t => cons (f a) (map f t) end. Print Implicit map. For (co)inductive datatype declarations, the semantics are the following: an inductive parameter declared as an implicit argument need not be repeated in the inductive definition and will become implicit for the inductive type and the constructors. For example: .. coqtop:: all Inductive list {A : Type} : Type := | nil : list | cons : A -> list -> list. Print list. One can always specify the parameter if it is not uniform using the usual implicit arguments disambiguation syntax. The syntax is also supported in internal binders. For instance, in the following kinds of expressions, the type of each declaration present in :n:`{* @binder }` can be bracketed to mark the declaration as implicit: * :n:`fun (@ident:forall {* @binder }, @type) => @term`, * :n:`forall (@ident:forall {* @binder }, @type), @type`, * :n:`let @ident {* @binder } := @term in @term`, * :n:`fix @ident {* @binder } := @term in @term` and * :n:`cofix @ident {* @binder } := @term in @term`. Here is an example: .. coqtop:: all Axiom Ax : forall (f:forall {A} (a:A), A * A), let g {A} (x y:A) := (x,y) in f 0 = g 0 0. .. warn:: Ignoring implicit binder declaration in unexpected position This is triggered when setting an argument implicit in an expression which does not correspond to the type of an assumption or to the :term:`body` of a definition. Here is an example: .. coqtop:: all warn Definition f := forall {y}, y = 0. .. warn:: Making shadowed name of implicit argument accessible by position This is triggered when two variables of same name are set implicit in the same block of binders, in which case the first occurrence is considered to be unnamed. Here is an example: .. coqtop:: all warn Check let g {x:nat} (H:x=x) {x} (H:x=x) := x in 0. Mode for automatic declaration of implicit arguments ++++++++++++++++++++++++++++++++++++++++++++++++++++ .. flag:: Implicit Arguments This :term:`flag` (off by default) allows to systematically declare implicit the arguments detectable as such. Auto-detection of implicit arguments is governed by flags controlling whether strict and contextual implicit arguments have to be considered or not. .. _controlling-strict-implicit-args: Controlling strict implicit arguments +++++++++++++++++++++++++++++++++++++ .. flag:: Strict Implicit When the mode for automatic declaration of implicit arguments is on, the default is to automatically set implicit only the strict implicit arguments plus, for historical reasons, a small subset of the non-strict implicit arguments. To relax this constraint and to set implicit all non-strict implicit arguments by default, you can turn this :term:`flag` off. .. flag:: Strongly Strict Implicit Use this :term:`flag` (off by default) to capture exactly the strict implicit arguments and no more than the strict implicit arguments. .. _controlling-contextual-implicit-args: Controlling contextual implicit arguments +++++++++++++++++++++++++++++++++++++++++ .. flag:: Contextual Implicit By default, Coq does not automatically set implicit the contextual implicit arguments. You can turn this :term:`flag` on to tell Coq to also infer contextual implicit argument. .. _controlling-rev-pattern-implicit-args: Controlling reversible-pattern implicit arguments +++++++++++++++++++++++++++++++++++++++++++++++++ .. flag:: Reversible Pattern Implicit By default, Coq does not automatically set implicit the reversible-pattern implicit arguments. You can turn this :term:`flag` on to tell Coq to also infer reversible-pattern implicit argument. .. _controlling-insertion-implicit-args: Controlling the insertion of implicit arguments not followed by explicit arguments ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. flag:: Maximal Implicit Insertion Assuming the implicit argument mode is on, this :term:`flag` (off by default) declares implicit arguments to be automatically inserted when a function is partially applied and the next argument of the function is an implicit one. Combining manual declaration and automatic declaration ++++++++++++++++++++++++++++++++++++++++++++++++++++++ When some arguments are manually specified implicit with binders in a definition and the automatic declaration mode in on, the manual implicit arguments are added to the automatically declared ones. In that case, and when the flag :flag:`Maximal Implicit Insertion` is set to off, some trailing implicit arguments can be inferred to be non-maximally inserted. In this case, they are converted to maximally inserted ones. .. example:: .. coqtop:: all Set Implicit Arguments. Axiom eq0_le0 : forall (n : nat) (x : n = 0), n <= 0. Print Implicit eq0_le0. Axiom eq0_le0' : forall (n : nat) {x : n = 0}, n <= 0. Print Implicit eq0_le0'. .. _explicit-applications: Explicit applications ~~~~~~~~~~~~~~~~~~~~~ In presence of non-strict or contextual arguments, or in presence of partial applications, the synthesis of implicit arguments may fail, so one may have to explicitly give certain implicit arguments of an application. To instantiate a dependent implicit argument, use the :n:`(@ident := @term)` form of :token:`arg`, where :token:`ident` is the name of the implicit argument and :token:`term` is its corresponding explicit term. To instantiate a non-dependent implicit argument, use the :n:`(@natural := @term)` form of :token:`arg`, where :token:`natural` is the index of the implicit argument among all non-dependent arguments of the function (implicit or not, and starting from 1) and :token:`term` is its corresponding explicit term. Alternatively, one can deactivate the hiding of implicit arguments for a single function application using the :n:`@@qualid_annotated {+ @term1 }` form of :token:`term_application`. .. example:: Syntax for explicitly giving implicit arguments (continued) .. coqtop:: all Parameter X : Type. Definition Relation := X -> X -> Prop. Definition Transitivity (R:Relation) := forall x y:X, R x y -> forall z:X, R y z -> R x z. Parameters (R : Relation) (p : Transitivity R). Arguments p : default implicits. Print Implicit p. Parameters (a b c : X) (r1 : R a b) (r2 : R b c). Check (p r1 (z:=c)). Check (p (x:=a) (y:=b) r1 (z:=c) r2). .. exn:: Wrong argument name :undocumented: .. exn:: Wrong argument position :undocumented: .. exn:: Argument at position @natural is mentioned more than once :undocumented: .. exn:: Arguments given by name or position not supported in explicit mode :undocumented: .. exn:: Not enough non implicit arguments to accept the argument bound to @ident :undocumented: .. exn:: Not enough non implicit arguments to accept the argument bound to @natural :undocumented: .. _displaying-implicit-args: Displaying implicit arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Print Implicit @reference Displays the implicit arguments associated with an object, identifying which arguments are applied maximally or not. Displaying implicit arguments when pretty-printing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. flag:: Printing Implicit By default, the basic pretty-printing rules hide the inferable implicit arguments of an application. Turn this :term:`flag` on to force printing all implicit arguments. .. flag:: Printing Implicit Defensive By default, the basic pretty-printing rules display implicit arguments that are not detected as strict implicit arguments. This “defensive” mode can quickly make the display cumbersome so this can be deactivated by turning this :term:`flag` off. .. seealso:: :flag:`Printing All`. Interaction with subtyping ~~~~~~~~~~~~~~~~~~~~~~~~~~ When an implicit argument can be inferred from the type of more than one of the other arguments, then only the type of the first of these arguments is taken into account, and not an upper type of all of them. As a consequence, the inference of the implicit argument of “=” fails in .. coqtop:: all Fail Check nat = Prop. but succeeds in .. coqtop:: all Check Prop = nat. Deactivation of implicit arguments for parsing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. insertprodn term_explicit term_explicit .. prodn:: term_explicit ::= @ @qualid_annotated This syntax can be used to disable implicit arguments for a single function. .. example:: The function `id` has one implicit argument and one explicit argument. .. coqtop:: all reset Check (id 0). Definition id' := @id. The function `id'` has no implicit argument. .. coqtop:: all Check (id' nat 0). .. flag:: Parsing Explicit Turning this :term:`flag` on (it is off by default) deactivates the use of implicit arguments. In this case, all arguments of :term:`constants `, inductive types, constructors, etc, including the arguments declared as implicit, have to be given as if no arguments were implicit. By symmetry, this also affects printing. .. example:: We can reproduce the example above using the :flag:`Parsing Explicit` flag: .. coqtop:: all reset Set Parsing Explicit. Definition id' := id. Unset Parsing Explicit. Check (id 1). Check (id' nat 1). Implicit types of variables ~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is possible to bind variable names to a given type (e.g. in a development using arithmetic, it may be convenient to bind the names :g:`n` or :g:`m` to the type :g:`nat` of natural numbers). .. cmd:: Implicit {| Type | Types } @reserv_list :name: Implicit Type; Implicit Types .. insertprodn reserv_list simple_reserv .. prodn:: reserv_list ::= {+ ( @simple_reserv ) } | @simple_reserv simple_reserv ::= {+ @ident } : @type Sets the type of bound variables starting with :token:`ident` (either :token:`ident` itself or :token:`ident` followed by one or more single quotes, underscore or digits) to :token:`type` (unless the bound variable is already declared with an explicit type, in which case, that type will be used). .. example:: .. coqtop:: all Require Import List. Implicit Types m n : nat. Lemma cons_inj_nat : forall m n l, n :: l = m :: l -> n = m. Proof. intros m n. Abort. Lemma cons_inj_bool : forall (m n:bool) l, n :: l = m :: l -> n = m. Abort. .. flag:: Printing Use Implicit Types By default, the type of bound variables is not printed when the variable name is associated with an implicit type which matches the actual type of the variable. This feature can be deactivated by turning this :term:`flag` off. .. _implicit-generalization: Implicit generalization ~~~~~~~~~~~~~~~~~~~~~~~ .. index:: `{ } .. index:: `[ ] .. index:: `( ) .. index:: `{! } .. index:: `[! ] .. index:: `(! ) .. insertprodn generalizing_binder term_generalizing .. prodn:: generalizing_binder ::= `( {+, @typeclass_constraint } ) | `%{ {+, @typeclass_constraint } %} | `[ {+, @typeclass_constraint } ] typeclass_constraint ::= {? ! } @term | %{ @name %} : {? ! } @term | @name : {? ! } @term term_generalizing ::= `%{ @term %} | `( @term ) Implicit generalization is an automatic elaboration of a statement with free variables into a closed statement where these variables are quantified explicitly. Use the :cmd:`Generalizable` command to designate which variables should be generalized. It is activated within a binder by prefixing it with \`, and for terms by surrounding it with \`{ }, or \`[ ] or \`( ). Terms surrounded by \`{ } introduce their free variables as maximally inserted implicit arguments, terms surrounded by \`[ ] introduce them as non-maximally inserted implicit arguments and terms surrounded by \`( ) introduce them as explicit arguments. Generalizing binders always introduce their free variables as maximally inserted implicit arguments. The binder itself introduces its argument as usual. In the following statement, ``A`` and ``y`` are automatically generalized, ``A`` is implicit and ``x``, ``y`` and the anonymous equality argument are explicit. .. coqtop:: all reset Generalizable All Variables. Definition sym `(x:A) : `(x = y -> y = x) := fun _ p => eq_sym p. Print sym. Dually to normal binders, the name is optional but the type is required: .. coqtop:: all Check (forall `{x = y :> A}, y = x). When generalizing a binder whose type is a typeclass, its own class arguments are omitted from the syntax and are generalized using automatic names, without instance search. Other arguments are also generalized unless provided. This produces a fully general statement. this behavior may be disabled by prefixing the type with a ``!`` or by forcing the typeclass name to be an explicit application using ``@`` (however the later ignores implicit argument information). .. coqtop:: none Set Warnings "-deprecated-instance-without-locality". .. coqtop:: all Class Op (A:Type) := op : A -> A -> A. Class Commutative (A:Type) `(Op A) := commutative : forall x y, op x y = op y x. Instance nat_op : Op nat := plus. Set Printing Implicit. Check (forall `{Commutative }, True). Check (forall `{Commutative nat}, True). Fail Check (forall `{Commutative nat _}, True). Fail Check (forall `{!Commutative nat}, True). Arguments Commutative _ {_}. Check (forall `{!Commutative nat}, True). Check (forall `{@Commutative nat plus}, True). Multiple binders can be merged using ``,`` as a separator: .. coqtop:: all Check (forall `{Commutative A, Hnat : !Commutative nat}, True). .. cmd:: Generalizable {| {| Variable | Variables } {+ @ident } | All Variables | No Variables } Controls the set of generalizable identifiers. By default, no variables are generalizable. This command supports the :attr:`global` attribute. The :n:`{| Variable | Variables } {+ @ident }` form allows generalization of only the given :n:`@ident`\s. Using this command multiple times adds to the allowed identifiers. The other forms clear the list of :n:`@ident`\s. The :n:`All Variables` form generalizes all free variables in the context that appear under a generalization delimiter. This may result in confusing errors in case of typos. In such cases, the context will probably contain some unexpected generalized variables. The :n:`No Variables` form disables implicit generalization entirely. This is the default behavior (before any :cmd:`Generalizable` command has been entered). coq-8.15.0/doc/sphinx/language/extensions/index.rst000066400000000000000000000015761417001151100222700ustar00rootroot00000000000000.. _extensions: =================== Language extensions =================== Elaboration extends the language accepted by the Coq kernel to make it easier to use. For example, this lets the user omit most type annotations because they can be inferred, call functions with implicit arguments which will be inferred as well, extend the syntax with notations, factorize branches when pattern-matching, etc. In this chapter, we present these language extensions and we give some explanations on how this language is translated down to the core language presented in the :ref:`previous chapter `. .. toctree:: :maxdepth: 1 evars implicit-arguments match ../../user-extensions/syntax-extensions arguments-command ../../addendum/implicit-coercions ../../addendum/type-classes canonical ../../addendum/program ../../proof-engine/vernacular-commands coq-8.15.0/doc/sphinx/language/extensions/match.rst000066400000000000000000000711401417001151100222470ustar00rootroot00000000000000.. _extendedpatternmatching: Extended pattern matching ========================= :Authors: Cristina Cornes and Hugo Herbelin This section describes the full form of pattern matching in Coq terms. .. |rhs| replace:: right hand sides .. extracted from Gallina extensions chapter Variants and extensions of :g:`match` ------------------------------------- .. _mult-match: Multiple and nested pattern matching ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The basic version of :g:`match` allows pattern matching on simple patterns. As an extension, multiple nested patterns or disjunction of patterns are allowed, as in ML-like languages (cf. :ref:`multiple-patterns` and :ref:`nested-patterns`). The extension just acts as a macro that is expanded during parsing into a sequence of match on simple patterns. Especially, a construction defined using the extended match is generally printed under its expanded form (see :flag:`Printing Matching`). .. _if-then-else: Pattern-matching on boolean values: the if expression ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. insertprodn term_if term_if .. prodn:: term_if ::= if @term {? {? as @name } return @term100 } then @term else @term For inductive types with exactly two constructors and for pattern matching expressions that do not depend on the arguments of the constructors, it is possible to use a ``if … then … else`` notation. For instance, the definition .. coqtop:: all Definition not (b:bool) := match b with | true => false | false => true end. can be alternatively written .. coqtop:: reset all Definition not (b:bool) := if b then false else true. More generally, for an inductive type with constructors :n:`@ident__1` and :n:`@ident__2`, the following terms are equal: :n:`if @term__0 {? {? as @name } return @term } then @term__1 else @term__2` :n:`match @term__0 {? {? as @name } return @term } with | @ident__1 {* _ } => @term__1 | @ident__2 {* _ } => @term__2 end` .. example:: .. coqtop:: all Check (fun x (H:{x=0}+{x<>0}) => match H with | left _ => true | right _ => false end). Notice that the printing uses the :g:`if` syntax because :g:`sumbool` is declared as such (see :ref:`controlling-match-pp`). .. _irrefutable-patterns: Irrefutable patterns: the destructuring let variants ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pattern-matching on terms inhabiting inductive type having only one constructor can be alternatively written using :g:`let … in …` constructions. There are two variants of them. .. insertprodn destructuring_let destructuring_let .. prodn:: destructuring_let ::= let ( {*, @name } ) {? {? as @name } return @term100 } := @term in @term | let ' @pattern := @term {? return @term100 } in @term | let ' @pattern in @pattern := @term return @term100 in @term First destructuring let syntax ++++++++++++++++++++++++++++++ .. todo explain that this applies to all of the "let" constructs (Gallina, Ltac1 and Ltac2) also add "irrefutable pattern" to the glossary note that in Ltac2 an upper case ident is a constructor, lower case is a variable The expression :n:`let ( {*, @ident__i } ) := @term__0 in @term__1` performs case analysis on :n:`@term__0` whose type must be an inductive type with exactly one constructor. The number of variables :n:`@ident__i` must correspond to the number of arguments of this constructor. Then, in :n:`@term__1`, these variables are bound to the arguments of the constructor in :n:`@term__0`. For instance, the definition .. coqtop:: reset all Definition fst (A B:Set) (H:A * B) := match H with | pair x y => x end. can be alternatively written .. coqtop:: reset all Definition fst (A B:Set) (p:A * B) := let (x, _) := p in x. Notice that reduction is different from regular :g:`let … in …` construction since it happens only if :n:`@term__0` is in constructor form. Otherwise, the reduction is blocked. The pretty-printing of a definition by matching on a irrefutable pattern can either be done using :g:`match` or the :g:`let` construction (see Section :ref:`controlling-match-pp`). If term inhabits an inductive type with one constructor `C`, we have an equivalence between :: let (ident₁, …, identₙ) [dep_ret_type] := term in term' and :: match term [dep_ret_type] with C ident₁ … identₙ => term' end Second destructuring let syntax +++++++++++++++++++++++++++++++ Another destructuring let syntax is available for inductive types with one constructor by giving an arbitrary pattern instead of just a tuple for all the arguments. For example, the preceding example can be written: .. coqtop:: reset all Definition fst (A B:Set) (p:A*B) := let 'pair x _ := p in x. This is useful to match deeper inside tuples and also to use notations for the pattern, as the syntax :g:`let ’p := t in b` allows arbitrary patterns to do the deconstruction. For example: .. coqtop:: all Definition deep_tuple (A:Set) (x:(A*A)*(A*A)) : A*A*A*A := let '((a,b), (c, d)) := x in (a,b,c,d). Notation " x 'With' p " := (exist _ x p) (at level 20). Definition proj1_sig' (A:Set) (P:A->Prop) (t:{ x:A | P x }) : A := let 'x With p := t in x. When printing definitions which are written using this construct it takes precedence over let printing directives for the datatype under consideration (see Section :ref:`controlling-match-pp`). .. _controlling-match-pp: Controlling pretty-printing of match expressions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following commands give some control over the pretty-printing of :g:`match` expressions. Printing nested patterns +++++++++++++++++++++++++ .. flag:: Printing Matching The Calculus of Inductive Constructions knows pattern matching only over simple patterns. It is however convenient to re-factorize nested pattern matching into a single pattern matching over a nested pattern. When this :term:`flag` is on (default), Coq’s printer tries to do such limited re-factorization. Turning it off tells Coq to print only simple pattern matching problems in the same way as the Coq kernel handles them. Factorization of clauses with same right-hand side ++++++++++++++++++++++++++++++++++++++++++++++++++ .. flag:: Printing Factorizable Match Patterns When several patterns share the same right-hand side, it is additionally possible to share the clauses using disjunctive patterns. Assuming that the printing matching mode is on, this :term:`flag` (on by default) tells Coq's printer to try to do this kind of factorization. Use of a default clause +++++++++++++++++++++++ .. flag:: Printing Allow Match Default Clause When several patterns share the same right-hand side which do not depend on the arguments of the patterns, yet an extra factorization is possible: the disjunction of patterns can be replaced with a `_` default clause. Assuming that the printing matching mode and the factorization mode are on, this :term:`flag` (on by default) tells Coq's printer to use a default clause when relevant. Printing of wildcard patterns ++++++++++++++++++++++++++++++ .. flag:: Printing Wildcard Some variables in a pattern may not occur in the right-hand side of the pattern matching clause. When this :term:`flag` is on (default), the variables having no occurrences in the right-hand side of the pattern matching clause are just printed using the wildcard symbol “_”. Printing of the elimination predicate +++++++++++++++++++++++++++++++++++++ .. flag:: Printing Synth In most of the cases, the type of the result of a matched term is mechanically synthesizable. Especially, if the result type does not depend of the matched term. When this :term:`flag` is on (default), the result type is not printed when Coq knows that it can re- synthesize it. Printing matching on irrefutable patterns ++++++++++++++++++++++++++++++++++++++++++ If an inductive type has just one constructor, pattern matching can be written using the first destructuring let syntax. .. table:: Printing Let @qualid This :term:`table` specifies a set of qualids for which pattern matching is displayed using a let expression. Note that this only applies to pattern matching instances entered with :g:`match`. It doesn't affect pattern matching explicitly entered with a destructuring :g:`let`. Use the :cmd:`Add` and :cmd:`Remove` commands to update this set. Printing matching on booleans +++++++++++++++++++++++++++++ If an inductive type is isomorphic to the boolean type, pattern matching can be written using ``if`` … ``then`` … ``else`` …. This table controls which types are written this way: .. table:: Printing If @qualid This :term:`table` specifies a set of qualids for which pattern matching is displayed using ``if`` … ``then`` … ``else`` …. Use the :cmd:`Add` and :cmd:`Remove` commands to update this set. This example emphasizes what the printing settings offer. .. example:: .. coqtop:: all Definition snd (A B:Set) (H:A * B) := match H with | pair x y => y end. Test Printing Let for prod. Print snd. Remove Printing Let prod. Unset Printing Synth. Unset Printing Wildcard. Print snd. Conventions about unused pattern-matching variables ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pattern-matching variables that are not used on the right-hand side of ``=>`` are considered the sign of a potential error. For instance, it could result from an undetected misspelled constant constructor. By default, a warning is issued in such situations. .. warn:: Unused variable @ident catches more than one case. This indicates that an unused pattern variable :token:`ident` occurs in a pattern-matching clause used to complete at least two cases of the pattern-matching problem. The warning can be deactivated by using a variable name starting with ``_`` or by setting ``Set Warnings "-unused-pattern-matching-variable"``. Here is an example where the warning is activated. .. example:: .. coqtop:: none Set Warnings "-unused-pattern-matching-variable". .. coqtop:: all Definition is_zero (o : option nat) := match o with | Some 0 => true | x => false end. .. coqtop:: none Set Warnings "+unused-pattern-matching-variable". Patterns -------- The full syntax of `match` is presented in :ref:`match_term`. Identifiers in patterns are either constructor names or variables. Any identifier that is not the constructor of an inductive or coinductive type is considered to be a variable. A variable name cannot occur more than once in a given pattern. It is recommended to start variable names by a lowercase letter. If a pattern has the form ``c x`` where ``c`` is a constructor symbol and x is a linear vector of (distinct) variables, it is called *simple*: it is the kind of pattern recognized by the basic version of match. On the opposite, if it is a variable ``x`` or has the form ``c p`` with ``p`` not only made of variables, the pattern is called *nested*. A variable pattern matches any value, and the identifier is bound to that value. The pattern “``_``” (called “don't care” or “wildcard” symbol) also matches any value, but does not bind anything. It may occur an arbitrary number of times in a pattern. Alias patterns written :n:`(@pattern as @ident)` are also accepted. This pattern matches the same values as :token:`pattern` does and :token:`ident` is bound to the matched value. A pattern of the form :n:`@pattern | @pattern` is called disjunctive. A list of patterns separated with commas is also considered as a pattern and is called *multiple pattern*. However multiple patterns can only occur at the root of pattern matching equations. Disjunctions of *multiple patterns* are allowed though. Since extended ``match`` expressions are compiled into the primitive ones, the expressiveness of the theory remains the same. Once parsing has finished only simple patterns remain. The original nesting of the ``match`` expressions is recovered at printing time. An easy way to see the result of the expansion is to toggle off the nesting performed at printing (use here :flag:`Printing Matching`), then by printing the term with :cmd:`Print` if the term is a :term:`constant`, or using the command :cmd:`Check`. The extended ``match`` still accepts an optional *elimination predicate* given after the keyword ``return``. Given a pattern matching expression, if all the right-hand-sides of ``=>`` have the same type, then this type can be sometimes synthesized, and so we can omit the return part. Otherwise the predicate after return has to be provided, like for the basicmatch. Let us illustrate through examples the different aspects of extended pattern matching. Consider for example the function that computes the maximum of two natural numbers. We can write it in primitive syntax by: .. coqtop:: in Fixpoint max (n m:nat) {struct m} : nat := match n with | O => m | S n' => match m with | O => S n' | S m' => S (max n' m') end end. .. _multiple-patterns: Multiple patterns ----------------- Using multiple patterns in the definition of ``max`` lets us write: .. coqtop:: in reset Fixpoint max (n m:nat) {struct m} : nat := match n, m with | O, _ => m | S n', O => S n' | S n', S m' => S (max n' m') end. which will be compiled into the previous form. The pattern matching compilation strategy examines patterns from left to right. A match expression is generated **only** when there is at least one constructor in the column of patterns. E.g. the following example does not build a match expression. .. coqtop:: all Check (fun x:nat => match x return nat with | y => y end). Aliasing subpatterns -------------------- We can also use :n:`as @ident` to associate a name to a sub-pattern: .. coqtop:: in reset Fixpoint max (n m:nat) {struct n} : nat := match n, m with | O, _ => m | S n' as p, O => p | S n', S m' => S (max n' m') end. .. _nested-patterns: Nested patterns --------------- Here is now an example of nested patterns: .. coqtop:: in Fixpoint even (n:nat) : bool := match n with | O => true | S O => false | S (S n') => even n' end. This is compiled into: .. coqtop:: all Unset Printing Matching. Print even. .. coqtop:: none Set Printing Matching. In the previous examples patterns do not conflict with, but sometimes it is comfortable to write patterns that admit a nontrivial superposition. Consider the boolean function :g:`lef` that given two natural numbers yields :g:`true` if the first one is less or equal than the second one and :g:`false` otherwise. We can write it as follows: .. coqtop:: in Fixpoint lef (n m:nat) {struct m} : bool := match n, m with | O, x => true | x, O => false | S n, S m => lef n m end. Note that the first and the second multiple pattern overlap because the couple of values ``O O`` matches both. Thus, what is the result of the function on those values? To eliminate ambiguity we use the *textual priority rule:* we consider patterns to be ordered from top to bottom. A value is matched by the pattern at the ith row if and only if it is not matched by some pattern from a previous row. Thus in the example, ``O O`` is matched by the first pattern, and so :g:`(lef O O)` yields true. Another way to write this function is: .. coqtop:: in reset Fixpoint lef (n m:nat) {struct m} : bool := match n, m with | O, x => true | S n, S m => lef n m | _, _ => false end. Here the last pattern superposes with the first two. Because of the priority rule, the last pattern will be used only for values that do not match neither the first nor the second one. Terms with useless patterns are not accepted by the system. Here is an example: .. coqtop:: all Fail Check (fun x:nat => match x with | O => true | S _ => false | x => true end). Disjunctive patterns -------------------- Multiple patterns that share the same right-hand-side can be factorized using the notation :n:`{+| {+, @pattern } }`. For instance, :g:`max` can be rewritten as follows: .. coqtop:: in reset Fixpoint max (n m:nat) {struct m} : nat := match n, m with | S n', S m' => S (max n' m') | 0, p | p, 0 => p end. Similarly, factorization of (not necessarily multiple) patterns that share the same variables is possible by using the notation :n:`{+| @pattern}`. Here is an example: .. coqtop:: in Definition filter_2_4 (n:nat) : nat := match n with | 2 as m | 4 as m => m | _ => 0 end. Nested disjunctive patterns are allowed, inside parentheses, with the notation :n:`({+| @pattern})`, as in: .. coqtop:: in Definition filter_some_square_corners (p:nat*nat) : nat*nat := match p with | ((2 as m | 4 as m), (3 as n | 5 as n)) => (m,n) | _ => (0,0) end. About patterns of parametric types ---------------------------------- Parameters in patterns ~~~~~~~~~~~~~~~~~~~~~~ When matching objects of a parametric type, parameters do not bind in patterns. They must be substituted by “``_``”. Consider for example the type of polymorphic lists: .. coqtop:: in Inductive List (A:Set) : Set := | nil : List A | cons : A -> List A -> List A. We can check the function *tail*: .. coqtop:: all Check (fun l:List nat => match l with | nil _ => nil nat | cons _ _ l' => l' end). When we use parameters in patterns there is an error message: .. coqtop:: all Fail Check (fun l:List nat => match l with | nil A => nil nat | cons A _ l' => l' end). .. flag:: Asymmetric Patterns This :term:`flag` (off by default) removes parameters from constructors in patterns: .. coqtop:: all Set Asymmetric Patterns. Check (fun l:List nat => match l with | nil => nil _ | cons _ l' => l' end). Unset Asymmetric Patterns. Implicit arguments in patterns ------------------------------ By default, implicit arguments are omitted in patterns. So we write: .. coqtop:: all Arguments nil {A}. Arguments cons [A] _ _. Check (fun l:List nat => match l with | nil => nil | cons _ l' => l' end). But the possibility to use all the arguments is given by “``@``” implicit explicitations (as for terms, see :ref:`explicit-applications`). .. coqtop:: all Check (fun l:List nat => match l with | @nil _ => @nil nat | @cons _ _ l' => l' end). .. _matching-dependent: Matching objects of dependent types ----------------------------------- The previous examples illustrate pattern matching on objects of non- dependent types, but we can also use the expansion strategy to destructure objects of dependent types. Consider the type :g:`listn` of lists of a certain length: .. coqtop:: in reset Inductive listn : nat -> Set := | niln : listn 0 | consn : forall n:nat, nat -> listn n -> listn (S n). Understanding dependencies in patterns -------------------------------------- We can define the function length over :g:`listn` by: .. coqdoc:: Definition length (n:nat) (l:listn n) := n. Just for illustrating pattern matching, we can define it by case analysis: .. coqtop:: in Definition length (n:nat) (l:listn n) := match l with | niln => 0 | consn n _ _ => S n end. We can understand the meaning of this definition using the same notions of usual pattern matching. When the elimination predicate must be provided ----------------------------------------------- Dependent pattern matching ~~~~~~~~~~~~~~~~~~~~~~~~~~ The examples given so far do not need an explicit elimination predicate because all the |rhs| have the same type and Coq succeeds to synthesize it. Unfortunately when dealing with dependent patterns it often happens that we need to write cases where the types of the |rhs| are different instances of the elimination predicate. The function :g:`concat` for :g:`listn` is an example where the branches have different types and we need to provide the elimination predicate: .. coqtop:: in Fixpoint concat (n:nat) (l:listn n) (m:nat) (l':listn m) {struct l} : listn (n + m) := match l in listn n return listn (n + m) with | niln => l' | consn n' a y => consn (n' + m) a (concat n' y m l') end. .. coqtop:: none Reset concat. The elimination predicate is :g:`fun (n:nat) (l:listn n) => listn (n+m)`. In general if :g:`m` has type :g:`(I q1 … qr t1 … ts)` where :g:`q1, …, qr` are parameters, the elimination predicate should be of the form :g:`fun y1 … ys x : (I q1 … qr y1 … ys ) => Q`. In the concrete syntax, it should be written : ``match m as x in (I _ … _ y1 … ys) return Q with … end``. The variables which appear in the ``in`` and ``as`` clause are new and bounded in the property :g:`Q` in the return clause. The parameters of the inductive definitions should not be mentioned and are replaced by ``_``. Multiple dependent pattern matching ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Recall that a list of patterns is also a pattern. So, when we destructure several terms at the same time and the branches have different types we need to provide the elimination predicate for this multiple pattern. It is done using the same scheme: each term may be associated with an ``as`` clause and an ``in`` clause in order to introduce a dependent product. For example, an equivalent definition for :g:`concat` (even though the matching on the second term is trivial) would have been: .. coqtop:: in Fixpoint concat (n:nat) (l:listn n) (m:nat) (l':listn m) {struct l} : listn (n + m) := match l in listn n, l' return listn (n + m) with | niln, x => x | consn n' a y, x => consn (n' + m) a (concat n' y m x) end. Even without real matching over the second term, this construction can be used to keep types linked. If :g:`a` and :g:`b` are two :g:`listn` of the same length, by writing .. coqtop:: in Check (fun n (a b: listn n) => match a, b with | niln, b0 => tt | consn n' a y, bS => tt end). we have a copy of :g:`b` in type :g:`listn 0` resp. :g:`listn (S n')`. .. _match-in-patterns: Patterns in ``in`` ~~~~~~~~~~~~~~~~~~ If the type of the matched term is more precise than an inductive applied to variables, arguments of the inductive in the ``in`` branch can be more complicated patterns than a variable. Moreover, constructors whose types do not follow the same pattern will become impossible branches. In an impossible branch, you can answer anything but False_rect unit has the advantage to be subterm of anything. To be concrete: the ``tail`` function can be written: .. coqtop:: in Definition tail n (v: listn (S n)) := match v in listn (S m) return listn m with | niln => False_rect unit | consn n' a y => y end. and :g:`tail n v` will be subterm of :g:`v`. Using pattern matching to write proofs -------------------------------------- In all the previous examples the elimination predicate does not depend on the object(s) matched. But it may depend and the typical case is when we write a proof by induction or a function that yields an object of a dependent type. An example of a proof written using ``match`` is given in the description of the tactic :tacn:`refine`. For example, we can write the function :g:`buildlist` that given a natural number :g:`n` builds a list of length :g:`n` containing zeros as follows: .. coqtop:: in Fixpoint buildlist (n:nat) : listn n := match n return listn n with | O => niln | S n => consn n 0 (buildlist n) end. We can also use multiple patterns. Consider the following definition of the predicate less-equal :g:`Le`: .. coqtop:: in Inductive LE : nat -> nat -> Prop := | LEO : forall n:nat, LE 0 n | LES : forall n m:nat, LE n m -> LE (S n) (S m). We can use multiple patterns to write the proof of the lemma :g:`forall (n m:nat), (LE n m) \/ (LE m n)`: .. coqtop:: in Fixpoint dec (n m:nat) {struct n} : LE n m \/ LE m n := match n, m return LE n m \/ LE m n with | O, x => or_introl (LE x 0) (LEO x) | x, O => or_intror (LE x 0) (LEO x) | S n as n', S m as m' => match dec n m with | or_introl h => or_introl (LE m' n') (LES n m h) | or_intror h => or_intror (LE n' m') (LES m n h) end end. In the example of :g:`dec`, the first match is dependent while the second is not. The user can also use match in combination with the tactic :tacn:`refine` to build incomplete proofs beginning with a :g:`match` construction. Pattern-matching on inductive objects involving local definitions ----------------------------------------------------------------- If local definitions occur in the type of a constructor, then there are two ways to match on this constructor. Either the local definitions are skipped and matching is done only on the true arguments of the constructors, or the bindings for local definitions can also be caught in the matching. .. example:: .. coqtop:: in reset Inductive list : nat -> Set := | nil : list 0 | cons : forall n:nat, let m := (2 * n) in list m -> list (S (S m)). In the next example, the local definition is not caught. .. coqtop:: in Fixpoint length n (l:list n) {struct l} : nat := match l with | nil => 0 | cons n l0 => S (length (2 * n) l0) end. But in this example, it is. .. coqtop:: in Fixpoint length' n (l:list n) {struct l} : nat := match l with | nil => 0 | @cons _ m l0 => S (length' m l0) end. .. note:: For a given matching clause, either none of the local definitions or all of them can be caught. .. note:: You can only catch let bindings in mode where you bind all variables and so you have to use ``@`` syntax. .. note:: this feature is incoherent with the fact that parameters cannot be caught and consequently is somehow hidden. For example, there is no mention of it in error messages. Pattern-matching and coercions ------------------------------ If a mismatch occurs between the expected type of a pattern and its actual type, a coercion made from constructors is sought. If such a coercion can be found, it is automatically inserted around the pattern. .. example:: .. coqtop:: in Inductive I : Set := | C1 : nat -> I | C2 : I -> I. Coercion C1 : nat >-> I. .. coqtop:: all Check (fun x => match x with | C2 O => 0 | _ => 0 end). When does the expansion strategy fail? -------------------------------------- The strategy works very like in ML languages when treating patterns of non-dependent types. But there are new cases of failure that are due to the presence of dependencies. The error messages of the current implementation may be sometimes confusing. When the tactic fails because patterns are somehow incorrect then error messages refer to the initial expression. But the strategy may succeed to build an expression whose sub-expressions are well typed when the whole expression is not. In this situation the message makes reference to the expanded expression. We encourage users, when they have patterns with the same outer constructor in different equations, to name the variable patterns in the same positions with the same name. E.g. to write ``(cons n O x) => e1`` and ``(cons n _ x) => e2`` instead of ``(cons n O x) => e1`` and ``(cons n' _ x') => e2``. This helps to maintain certain name correspondence between the generated expression and the original. Here is a summary of the error messages corresponding to each situation: .. exn:: The constructor @ident expects @natural arguments. The variable ident is bound several times in pattern term Found a constructor of inductive type term while a constructor of term is expected Patterns are incorrect (because constructors are not applied to the correct number of arguments, because they are not linear or they are wrongly typed). .. exn:: Non exhaustive pattern matching. The pattern matching is not exhaustive. .. exn:: The elimination predicate term should be of arity @natural (for non \ dependent case) or @natural (for dependent case). The elimination predicate provided to match has not the expected arity. .. exn:: Unable to infer a match predicate Either there is a type incompatibility or the problem involves dependencies. There is a type mismatch between the different branches. The user should provide an elimination predicate. coq-8.15.0/doc/sphinx/language/gallina-extensions.rst000066400000000000000000000001361417001151100225550ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/language/gallina-specification-language.rst000066400000000000000000000001301417001151100247510ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/language/module-system.rst000066400000000000000000000001321417001151100215540ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/license.rst000066400000000000000000000004701417001151100166110ustar00rootroot00000000000000.. note:: **License** This material (the Coq Reference Manual) may be distributed only subject to the terms and conditions set forth in the Open Publication License, v1.0 or later (the latest version is presently available at http://www.opencontent.org/openpub). Options A and B are not elected. coq-8.15.0/doc/sphinx/practical-tools/000077500000000000000000000000001417001151100175345ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/practical-tools/coq-commands.rst000066400000000000000000000675621417001151100226670ustar00rootroot00000000000000.. _thecoqcommands: The Coq commands ==================== There are three Coq commands: + ``coqtop``: the Coq toplevel (interactive mode); + ``coqc``: the Coq compiler (batch compilation); + ``coqchk``: the Coq checker (validation of compiled libraries). The options are (basically) the same for the first two commands, and roughly described below. You can also look at the ``man`` pages of ``coqtop`` and ``coqc`` for more details. .. _interactive-use: Interactive use (coqtop) ------------------------ In the interactive mode, also known as the Coq toplevel, the user can develop his theories and proofs step by step. The Coq toplevel is run by the command ``coqtop``. There are two different binary images of Coq: the byte-code one and the native-code one (if OCaml provides a native-code compiler for your platform, which is supposed in the following). By default, ``coqtop`` executes the native-code version; run ``coqtop.byte`` to get the byte-code version. The byte-code toplevel is based on an OCaml toplevel (to allow dynamic linking of tactics). You can switch to the OCaml toplevel with the command ``Drop.``, and come back to the Coq toplevel with the command ``Coqloop.loop();;``. .. flag:: Coqtop Exit On Error This :term:`flag`, off by default, causes coqtop to exit with status code ``1`` if a command produces an error instead of recovering from it. Batch compilation (coqc) ------------------------ The ``coqc`` command compiles a Coq proof script file with a ".v" suffix to create a compiled file with a ".vo" suffix. (See :ref:`compiled-files`.) The last component of the filename must be a valid Coq identifier as described in :ref:`lexical-conventions`; it should contain only letters, digits or underscores (_) with a ".v" suffix on the final component. For example ``/bar/foo/toto.v`` is valid, but ``/bar/foo/to-to.v`` is not. We recommend specifying a logical directory name (which is also the module name) with the `-R` or the `-Q` options. Generally we recommend using utilities such as `make` (using `coq_makefile` to generate the `Makefile`) or `dune` to build Coq projects. See :ref:`building_coq_project`. .. example:: Compiling and loading a single file If `foo.v` is in Coq's current directory, you can use `coqc foo.v` to compile it and then `Require foo.` in your script. But this doesn't scale well for larger projects. Generally it's' better to define a new module: To compile `foo.v` as part of a module `Mod1` that is rooted at `.` (i.e. the directory containing `foo.v`), run `coqc -Q . Mod1 foo.v`. To make the module available in `CoqIDE`, include the following line in the `_CoqProject` file (see :ref:`coq_makefile`) in the directory from which you start `CoqIDE`. ** is the pathname of the directory containing the module, which can be an absolute path or relative to Coq's current directory. For now, you must close and reload a named script file for `CoqIDE` to pick up the change, or restart `CoqIDE`. The project file name is configurable in `Edit / Preferences / Project`. .. coqdoc:: -R Mod1 It's also possible to load a module within `coqtop` or `coqide` with commands like these. The drawback of this is that it adds environment-specific information (the PATH) to your script, making it non-portable, so we discourage using this approach. .. coqdoc:: Add LoadPath "PATH" as Mod1. Require Mod1.foo. in which `PATH` is the pathname of the directory containing `foo.v`, which can be absolute or relative to Coq's current directory. The :cmd:`Add LoadPath` is not needed if you provide the mapping from the logical directory (module name) to the physical directory by including the `-Q . Mod1` as command-line arguments to `coqtop` or `coqide`. Customization at launch time --------------------------------- By resource file ~~~~~~~~~~~~~~~~~~~~~~~ When Coq is launched, with either ``coqtop`` or ``coqc``, the resource file ``$XDG_CONFIG_HOME/coq/coqrc.xxx``, if it exists, will be implicitly prepended to any document read by Coq, whether it is an interactive session or a file to compile. Here, ``$XDG_CONFIG_HOME`` is the configuration directory of the user (by default it's ``~/.config``) and ``xxx`` is the version number (e.g. 8.8). If this file is not found, then the file ``$XDG_CONFIG_HOME/coqrc`` is searched. If not found, it is the file ``~/.coqrc.xxx`` which is searched, and, if still not found, the file ``~/.coqrc``. If the latter is also absent, no resource file is loaded. You can also specify an arbitrary name for the resource file (see option ``-init-file`` below). The resource file may contain, for instance, ``Add LoadPath`` commands to add directories to the load path of Coq. It is possible to skip the loading of the resource file with the option ``-q``. .. _customization-by-environment-variables: By environment variables ~~~~~~~~~~~~~~~~~~~~~~~~~ ``$COQPATH`` can be used to specify the load path. It is a list of directories separated by ``:`` (``;`` on Windows). Coq will also honor ``$XDG_DATA_HOME`` and ``$XDG_DATA_DIRS`` (see Section :ref:`libraries-and-filesystem`). Some Coq commands call other Coq commands. In this case, they look for the commands in directory specified by ``$COQBIN``. If this variable is not set, they look for the commands in the executable path. .. _COQ_COLORS: ``$COQ_COLORS`` can be used to specify the set of colors used by ``coqtop`` to highlight its output. It uses the same syntax as the ``$LS_COLORS`` variable from GNU’s ls, that is, a colon-separated list of assignments of the form :n:`name={*; attr}` where ``name`` is the name of the corresponding highlight tag and each ``attr`` is an ANSI escape code. The list of highlight tags can be retrieved with the ``-list-tags`` command-line option of ``coqtop``. The string uses ANSI escape codes to represent attributes. For example: ``export COQ_COLORS=”diff.added=4;48;2;0;0;240:diff.removed=41”`` sets the highlights for added text in diffs to underlined (the 4) with a background RGB color (0, 0, 240) and for removed text in diffs to a red background. Note that if you specify ``COQ_COLORS``, the predefined attributes are ignored. .. _OCAMLRUNPARAM: ``$OCAMLRUNPARAM``, described `here `_, can be used to specify certain runtime and memory usage parameters. In most cases, experimenting with these settings will likely not cause a significant performance difference and should be harmless. If the variable is not set, Coq uses the `default values `_, except that ``space_overhead`` is set to 120 and ``minor_heap_size`` is set to 32Mwords (256MB with 64-bit executables or 128MB with 32-bit executables). .. todo: Using the same text "here" for both of the links in the last 2 paragraphs generates an incorrect warning: coq-commands.rst:4: WARNING: Duplicate explicit target name: "here". The warning doesn't even have the right line number. :-( .. _command-line-options: By command line options ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following command-line options are recognized by the commands ``coqc`` and ``coqtop``, unless stated otherwise: :-I *directory*, -include *directory*: Add physical path *directory* to the OCaml loadpath. .. seealso:: :ref:`names-of-libraries` and the command Declare ML Module Section :ref:`compiled-files`. :-Q *directory dirpath*: Add physical path *directory* to the list of directories where Coq looks for a file and bind it to the logical directory *dirpath*. The subdirectory structure of *directory* is recursively available from Coq using absolute names (extending the :n:`@dirpath` prefix) (see Section :ref:`qualified-names`). Note that only those subdirectories and files which obey the lexical conventions of what is an :n:`@ident` are taken into account. Conversely, the underlying file systems or operating systems may be more restrictive than Coq. While Linux’s ext4 file system supports any Coq recursive layout (within the limit of 255 bytes per filename), the default on NTFS (Windows) or HFS+ (MacOS X) file systems is on the contrary to disallow two files differing only in the case in the same directory. .. seealso:: Section :ref:`names-of-libraries`. :-R *directory dirpath*: Do as ``-Q`` *directory dirpath* but make the subdirectory structure of *directory* recursively visible so that the recursive contents of physical *directory* is available from Coq using short or partially qualified names. .. seealso:: Section :ref:`names-of-libraries`. :-top *dirpath*: Set the logical module name to :n:`@dirpath` for the `coqtop` interactive session. If no module name is specified, `coqtop` will default to ``Top``. `coqc` does not accept this option because the logical module name is inferred from the name of the input file and the corresponding `-R` / `-Q` options. :-exclude-dir *directory*: Exclude any subdirectory named *directory* while processing options such as -R and -Q. By default, only the conventional version control management directories named CVS and_darcs are excluded. :-nois, -noinit: Start from an empty state instead of loading the `Init.Prelude` module. :-init-file *file*: Load *file* as the resource file instead of loading the default resource file from the standard configuration directories. :-q: Do not to load the default resource file. :-l *file*, -load-vernac-source *file*: Load and execute the Coq script from *file.v*. :-lv *file*, -load-vernac-source-verbose *file*: Load and execute the Coq script from *file.v*. Write its contents to the standard output as it is executed. :-load-vernac-object *qualid*: Load Coq compiled library :n:`@qualid`. This is equivalent to running :cmd:`Require` :n:`@qualid`. .. _interleave-command-line: .. note:: Note that the relative order of this command-line option and its variants (`-rfrom`, `-ri`, `-re`, etc.) and of the `-set` and `-unset` options matters since the various :cmd:`Require`, :cmd:`Require Import`, :cmd:`Require Export`, :cmd:`Set` and :cmd:`Unset` commands will be executed in the order specified on the command-line. :-rfrom *dirpath qualid*: Load Coq compiled library :n:`@qualid`. This is equivalent to running :cmd:`From ` :n:`@dirpath` :cmd:`Require ` :n:`@qualid`. See the :ref:`note above ` regarding the order of command-line options. :-ri *qualid*, -require-import *qualid*: Load Coq compiled library :n:`@qualid` and import it. This is equivalent to running :cmd:`Require Import` :n:`@qualid`. See the :ref:`note above ` regarding the order of command-line options. :-re *qualid*, -require-export *qualid*: Load Coq compiled library :n:`@qualid` and transitively import it. This is equivalent to running :cmd:`Require Export` :n:`@qualid`. See the :ref:`note above ` regarding the order of command-line options. :-rifrom *dirpath qualid*, -require-import-from *dirpath qualid*: Load Coq compiled library :n:`@qualid` and import it. This is equivalent to running :cmd:`From ` :n:`@dirpath` :cmd:`Require Import ` :n:`@qualid`. See the :ref:`note above ` regarding the order of command-line options. :-refrom *dirpath qualid*, -require-export-from *dirpath qualid*: Load Coq compiled library :n:`@qualid` and transitively import it. This is equivalent to running :cmd:`From ` :n:`@dirpath` :cmd:`Require Export ` :n:`@qualid`. See the :ref:`note above ` regarding the order of command-line options. :-batch: Exit just after argument parsing. Available for ``coqtop`` only. :-verbose: Output the content of the input file as it is compiled. This option is available for ``coqc`` only. :-native-compiler (yes|no|ondemand): Enable the :tacn:`native_compute` reduction machine and precompilation to ``.cmxs`` files for future use by :tacn:`native_compute`. Setting ``yes`` enables :tacn:`native_compute`; it also causes Coq to precompile the native code for future use; all dependencies need to have been precompiled beforehand. Setting ``no`` disables :tacn:`native_compute` which defaults back to :tacn:`vm_compute`; no files are precompiled. Setting ``ondemand`` enables :tacn:`native_compute` but disables precompilation; all missing dependencies will be recompiled every time :tacn:`native_compute` is called. .. _native-compiler-options: .. deprecated:: 8.14 This flag has been deprecated in favor of the :ref:`coqnative` binary. The toolchain has been adapted to transparently rely on the latter, so if you use :ref:`coq_makefile` there is nothing to do. Otherwise you should substitute calls to `coqc -native-compiler yes` to calls to `coqc` followed by `coqnative` on the resulting `vo` file. .. versionchanged:: 8.13 The default value is set at configure time, ``-config`` can be used to retrieve it. All this can be summarized in the following table: .. list-table:: :header-rows: 1 * - ``configure`` - ``coqc`` - ``native_compute`` - outcome - requirements * - yes - yes (default) - native_compute - ``.cmxs`` - ``.cmxs`` of deps * - yes - no - vm_compute - none - none * - yes - ondemand - native_compute - none - none * - no - yes, no, ondemand - vm_compute - none - none * - ondemand - yes - native_compute - ``.cmxs`` - ``.cmxs`` of deps * - ondemand - no - vm_compute - none - none * - ondemand - ondemand (default) - native_compute - none - none :-native-output-dir: Set the directory in which to put the aforementioned ``.cmxs`` for :tacn:`native_compute`. Defaults to ``.coq-native``. :-vos: Indicate Coq to skip the processing of opaque proofs (i.e., proofs ending with :cmd:`Qed` or :cmd:`Admitted`), output a ``.vos`` files instead of a ``.vo`` file, and to load ``.vos`` files instead of ``.vo`` files when interpreting :cmd:`Require` commands. :-vok: Indicate Coq to check a file completely, to load ``.vos`` files instead of ``.vo`` files when interpreting :cmd:`Require` commands, and to output an empty ``.vok`` files upon success instead of writing a ``.vo`` file. :-w (all|none|w₁,…,wₙ): Configure the display of warnings. This option expects all, none or a comma-separated list of warning names or categories (see Section :ref:`controlling-display`). :-color (on|off|auto): *Coqtop only*. Enable or disable color output. Default is auto, meaning color is shown only if the output channel supports ANSI escape sequences. :-diffs (on|off|removed): *Coqtop only*. Controls highlighting of differences between proof steps. ``on`` highlights added tokens, ``removed`` highlights both added and removed tokens. Requires that ``-color`` is enabled. (see Section :ref:`showing_diffs`). :-beautify: Pretty-print each command to *file.beautified* when compiling *file.v*, in order to get old-fashioned syntax/definitions/notations. :-emacs, -ide-slave: Start a special toplevel to communicate with a specific IDE. :-impredicative-set: Change the logical theory of Coq by declaring the sort :g:`Set` impredicative. .. warning:: This is known to be inconsistent with some standard axioms of classical mathematics such as the functional axiom of choice or the principle of description. :-type-in-type: Collapse the universe hierarchy of Coq. .. warning:: This makes the logic inconsistent. :-mangle-names *ident*: *Experimental.* Do not depend on this option. Replace Coq's auto-generated name scheme with names of the form *ident0*, *ident1*, etc. Within Coq, the :flag:`Mangle Names` flag turns this behavior on, and the :opt:`Mangle Names Prefix` option sets the prefix to use. This feature is intended to be used as a linter for developments that want to be robust to changes in the auto-generated name scheme. The options are provided to facilitate tracking down problems. :-set *string*: Enable flags and set options. *string* should be :n:`@setting_name=value`, the value is interpreted according to the type of the option. For flags :n:`@setting_name` is equivalent to :n:`@setting_name=true`. For instance ``-set "Universe Polymorphism"`` will enable :flag:`Universe Polymorphism`. Note that the quotes are shell syntax, Coq does not see them. See the :ref:`note above ` regarding the order of command-line options. :-unset *string*: As ``-set`` but used to disable options and flags. *string* must be :n:`"@setting_name"`. See the :ref:`note above ` regarding the order of command-line options. :-compat *version*: Load a file that sets a few options to maintain partial backward-compatibility with a previous version. This is equivalent to :cmd:`Require Import` `Coq.Compat.CoqXXX` with `XXX` one of the last three released versions (including the current version). Note that the :ref:`explanations above ` regarding the order of command-line options apply, and this could be relevant if you are resetting some of the compatibility options. :-dump-glob *file*: Dump references for global names in file *file* (to be used by coqdoc, see :ref:`coqdoc`). By default, if *file.v* is being compiled, *file.glob* is used. :-no-glob: Disable the dumping of references for global names. :-image *file*: Set the binary image to be used by ``coqc`` to be *file* instead of the standard one. Not of general use. :-bindir *directory*: Set the directory containing Coq binaries to be used by ``coqc``. It is equivalent to doing export COQBIN= *directory* before launching ``coqc``. :-where: Print the location of Coq’s standard library and exit. :-config: Print the locations of Coq’s binaries, dependencies, and libraries, then exit. :-filteropts: Print the list of command line arguments that `coqtop` has recognized as options and exit. :-v: Print Coq’s version and exit. :-list-tags: Print the highlight tags known by Coq as well as their currently associated color and exit. :-h, --help: Print a short usage and exit. .. _compiled-interfaces: Compiled interfaces (produced using ``-vos``) ---------------------------------------------- Compiled interfaces help saving time while developing Coq formalizations, by compiling the formal statements exported by a library independently of the proofs that it contains. .. warning:: Compiled interfaces should only be used for development purposes. At the end of the day, one still needs to proof check all files by producing standard ``.vo`` files. (Technically, when using ``-vos``, fewer universe constraints are collected.) Moreover, this feature is still experimental, it may be subject to change without prior notice. **Principle.** The compilation using ``coqc -vos foo.v`` produces a file called ``foo.vos``, which is similar to ``foo.vo`` except that all opaque proofs are skipped in the compilation process. The compilation using ``coqc -vok foo.v`` checks that the file ``foo.v`` correctly compiles, including all its opaque proofs. If the compilation succeeds, then the output is a file called ``foo.vok``, with empty contents. This file is only a placeholder indicating that ``foo.v`` has been successfully compiled. (This placeholder is useful for build systems such as ``make``.) When compiling a file ``bar.v`` that depends on ``foo.v`` (for example via a ``Require Foo.`` command), if the compilation command is ``coqc -vos bar.v`` or ``coqc -vok bar.v``, then the file ``foo.vos`` gets loaded (instead of ``foo.vo``). A special case is if file ``foo.vos`` exists and has empty contents, and ``foo.vo`` exists, then ``foo.vo`` is loaded. Appart from the aforementioned case where ``foo.vo`` can be loaded in place of ``foo.vos``, in general the ``.vos`` and ``.vok`` files live totally independently from the ``.vo`` files. **Dependencies generated by ``coq_makefile``.** The files ``foo.vos`` and ``foo.vok`` both depend on ``foo.v``. Furthermore, if a file ``foo.v`` requires ``bar.v``, then ``foo.vos`` and ``foo.vok`` also depend on ``bar.vos``. Note, however, that ``foo.vok`` does not depend on ``bar.vok``. Hence, as detailed further, parallel compilation of proofs is possible. In addition, ``coq_makefile`` generates for a file ``foo.v`` a target ``foo.required_vos`` which depends on the list of ``.vos`` files that ``foo.vos`` depends upon (excluding ``foo.vos`` itself). As explained next, the purpose of this target is to be able to request the minimal working state for editing interactively the file ``foo.v``. .. warning:: When writing a custom build system, be aware that ``coqdep`` only produces dependencies related to ``.vos`` and ``.vok`` if the ``-vos`` command line flag is passed. This is to maintain compatibility with dune (see `ocaml/dune#2642 on github `_). **Typical compilation of a set of file using a build system.** Assume a file ``foo.v`` that depends on two files ``f1.v`` and ``f2.v``. The command ``make foo.required_vos`` will compile ``f1.v`` and ``f2.v`` using the option ``-vos`` to skip the proofs, producing ``f1.vos`` and ``f2.vos``. At this point, one is ready to work interactively on the file ``foo.v``, even though it was never needed to compile the proofs involved in the files ``f1.v`` and ``f2.v``. Assume a set of files ``f1.v ... fn.v`` with linear dependencies. The command ``make vos`` enables compiling the statements (i.e. excluding the proofs) in all the files. Next, ``make -j vok`` enables compiling all the proofs in parallel. Thus, calling ``make -j vok`` directly enables taking advantage of a maximal amount of parallelism during the compilation of the set of files. Note that this comes at the cost of parsing and typechecking all definitions twice, once for the ``.vos`` file and once for the ``.vok`` file. However, if files contain nontrivial proofs, or if the files have many linear chains of dependencies, or if one has many cores available, compilation should be faster overall. **Need for ``Proof using``** When a theorem is part of a section, typechecking the statement of this theorem might be insufficient for deducing the type of this statement as of at the end of the section. Indeed, the proof of the theorem could make use of section variables or section hypotheses that are not mentioned in the statement of the theorem. For this reason, proofs inside section should begin with :cmd:`Proof using` instead of :cmd:`Proof`, where after the ``using`` clause one should provide the list of the names of the section variables that are required for the proof but are not involved in the typechecking of the statement. Note that it is safe to write ``Proof using.`` instead of ``Proof.`` also for proofs that are not within a section. .. warn:: You should use the “Proof using [...].” syntax instead of “Proof.” to enable skipping this proof which is located inside a section. Give as argument to “Proof using” the list of section variables that are not needed to typecheck the statement but that are required by the proof. If Coq is invoked using the ``-vos`` option, whenever it finds the command ``Proof.`` inside a section, it will compile the proof, that is, refuse to skip it, and it will raise a warning. To disable the warning, one may pass the flag ``-w -proof-without-using-in-section``. **Interaction with standard compilation** When compiling a file ``foo.v`` using ``coqc`` in the standard way (i.e., without ``-vos`` nor ``-vok``), an empty file ``foo.vos`` and an empty file ``foo.vok`` are created in addition to the regular output file ``foo.vo``. If ``coqc`` is subsequently invoked on some other file ``bar.v`` using option ``-vos`` or ``-vok``, and that ``bar.v`` requires ``foo.v``, if Coq finds an empty file ``foo.vos``, then it will load ``foo.vo`` instead of ``foo.vos``. The purpose of this feature is to allow users to benefit from the ``-vos`` option even if they depend on libraries that were compiled in the traditional manner (i.e., never compiled using the ``-vos`` option). Compiled libraries checker (coqchk) ---------------------------------------- The ``coqchk`` command takes a list of library paths as argument, described either by their logical name or by their physical filename, which must end in ``.vo``. The corresponding compiled libraries (``.vo`` files) are searched in the path, recursively processing the libraries they depend on. The content of all these libraries is then type checked. The effect of ``coqchk`` is only to return with normal exit code in case of success, and with positive exit code if an error has been found. Error messages are not deemed to help the user understand what is wrong. In the current version, it does not modify the compiled libraries to mark them as successfully checked. Note that non-logical information is not checked. By logical information, we mean the type and optional :term:`body` associated with names. It excludes for instance anything related to the concrete syntax of objects (customized syntax rules, association between short and long names), implicit arguments, etc. This tool can be used for several purposes. One is to check that a compiled library provided by a third-party has not been forged and that loading it cannot introduce inconsistencies [#]_. Another point is to get an even higher level of security. Since ``coqtop`` can be extended with custom tactics, possibly ill-typed code, it cannot be guaranteed that the produced compiled libraries are correct. ``coqchk`` is a standalone verifier, and thus it cannot be tainted by such malicious code. Command-line options ``-Q``, ``-R``, ``-where`` and ``-impredicative-set`` are supported by ``coqchk`` and have the same meaning as for ``coqtop``. As there is no notion of relative paths in object files ``-Q`` and ``-R`` have exactly the same meaning. :-norec *module*: Check *module* but do not check its dependencies. :-admit *module*: Do not check *module* and any of its dependencies, unless explicitly required. :-o: At exit, print a summary about the context. List the names of all assumptions and variables (constants without a :term:`body`). :-silent: Do not write progress information to the standard output. Environment variable ``$COQLIB`` can be set to override the location of the standard library. The algorithm for deciding which modules are checked or admitted is the following: assuming that ``coqchk`` is called with argument ``M``, option ``-norec N``, and ``-admit A``. Let us write :math:`\overline{S}` for the set of reflexive transitive dependencies of set :math:`S`. Then: + Modules :math:`C = \overline{M} \backslash \overline{A} \cup M \cup N` are loaded and type checked before being added to the context. + And :math:`M \cup N \backslash C` is the set of modules that are loaded and added to the context without type checking. Basic integrity checks (checksums) are nonetheless performed. As a rule of thumb, -admit can be used to tell Coq that some libraries have already been checked. So ``coqchk A B`` can be split in ``coqchk A`` && ``coqchk B -admit A`` without type checking any definition twice. Of course, the latter is slightly slower since it makes more disk access. It is also less secure since an attacker might have replaced the compiled library ``A`` after it has been read by the first command, but before it has been read by the second command. .. [#] Ill-formed non-logical information might for instance bind Coq.Init.Logic.True to short name False, so apparently False is inhabited, but using fully qualified names, Coq.Init.Logic.False will always refer to the absurd proposition, what we guarantee is that there is no proof of this latter constant. coq-8.15.0/doc/sphinx/practical-tools/coqide.rst000066400000000000000000000553151417001151100215430ustar00rootroot00000000000000.. |GtkSourceView| replace:: :smallcaps:`GtkSourceView` .. _coqintegrateddevelopmentenvironment: Coq Integrated Development Environment ======================================== The Coq Integrated Development Environment is a graphical tool, to be used as a user-friendly replacement to `coqtop`. Its main purpose is to allow the user to navigate forward and backward into a Coq file, executing corresponding commands or undoing them respectively. CoqIDE is run by typing the command `coqide` on the command line. Without argument, the main screen is displayed with an “unnamed buffer”, and with a filename as argument, another buffer displaying the contents of that file. Additionally, `coqide` accepts the same options as `coqtop`, given in :ref:`thecoqcommands`, the ones having obviously no meaning for CoqIDE being ignored. .. _coqide_mainscreen: .. image:: ../_static/coqide.png :alt: CoqIDE main screen A sample CoqIDE main screen, while navigating into a file `Fermat.v`, is shown in the figure :ref:`CoqIDE main screen `. At the top is a menu bar, and a tool bar below it. The large window on the left is displaying the various *script buffers*. The upper right window is the *goal window*, where goals to be proven are displayed. The lower right window is the *message window*, where various messages resulting from commands are displayed. At the bottom is the status bar. Managing files and buffers, basic editing ---------------------------------------------- In the script window, you may open arbitrarily many buffers to edit. The *File* menu allows you to open files or create some, save them, print or export them into various formats. Among all these buffers, there is always one which is the current *running buffer*, whose name is displayed on a background in the *processed* color (green by default), which is the one where Coq commands are currently executed. Buffers may be edited as in any text editor, and classical basic editing commands (Copy/Paste, …) are available in the *Edit* menu. CoqIDE offers only basic editing commands, so if you need more complex editing commands, you may launch your favorite text editor on the current buffer, using the *Edit/External Editor* menu. Interactive navigation into Coq scripts -------------------------------------------- The running buffer is the one where navigation takes place. The toolbar offers five basic commands for this. The first one, represented by a down arrow icon, is for going forward executing one command. If that command is successful, the part of the script that has been executed is displayed on a background with the processed color. If that command fails, the error message is displayed in the message window, and the location of the error is emphasized by an underline in the error foreground color (red by default). In the figure :ref:`CoqIDE main screen `, the running buffer is `Fermat.v`, all commands until the ``Theorem`` have been already executed, and the user tried to go forward executing ``Induction n``. That command failed because no such tactic exists (names of standard tactics are written in lowercase), and the failing command is underlined. Notice that the processed part of the running buffer is not editable. If you ever want to modify something you have to go backward using the up arrow tool, or even better, put the cursor where you want to go back and use the goto button. Unlike with `coqtop`, you should never use ``Undo`` to go backward. There are two additional buttons for navigation within the running buffer. The "down" button with a line goes directly to the end; the "up" button with a line goes back to the beginning. The handling of errors when using the go-to-the-end button depends on whether Coq is running in asynchronous mode or not (see Chapter :ref:`asynchronousandparallelproofprocessing`). If it is not running in that mode, execution stops as soon as an error is found. Otherwise, execution continues, and the error is marked with an underline in the error foreground color, with a background in the error background color (pink by default). The same characterization of error-handling applies when running several commands using the "goto" button. If you ever try to execute a command that runs for a long time and would like to abort it before it terminates, you may use the interrupt button (the white cross on a red circle). There are other buttons on the CoqIDE toolbar: a button to save the running buffer; a button to close the current buffer (an "X"); buttons to switch among buffers (left and right arrows); an "information" button; and a "gears" button. The "gears" button submits proof terms to the Coq kernel for type checking. When Coq uses asynchronous processing (see Chapter :ref:`asynchronousandparallelproofprocessing`), proofs may have been completed without kernel-checking of generated proof terms. The presence of unchecked proof terms is indicated by ``Qed`` statements that have a subdued *being-processed* color (light blue by default), rather than the processed color, though their preceding proofs have the processed color. Notice that for all these buttons, except for the "gears" button, their operations are also available in the menu, where their keyboard shortcuts are given. Commands and templates ---------------------- The Templates menu allows using shortcuts to insert commands. This is a nice way to proceed if you are not sure of the syntax of the command you want. Moreover, from this menu you can automatically insert templates of complex commands like ``Fixpoint`` that you can conveniently fill afterwards. Queries ------------ .. image:: ../_static/coqide-queries.png :alt: CoqIDE queries We call *query* any command that does not change the current state, such as ``Check``, ``Search``, etc. To run such commands interactively, without writing them in scripts, CoqIDE offers a *query pane*. The query pane can be displayed on demand by using the ``View`` menu, or using the shortcut ``F1``. Queries can also be performed by selecting a particular phrase, then choosing an item from the ``Queries`` menu. The response then appears in the message window. The image above shows the result after selecting of the phrase ``Nat.mul`` in the script window, and choosing ``Print`` from the ``Queries`` menu. Compilation ---------------- The `Compile` menu offers direct commands to: + compile the current buffer + run a compilation using `make` + go to the last compilation error + create a `Makefile` using `coq_makefile`. Customizations ------------------- You may customize your environment using the menu Edit/Preferences. A new window will be displayed, with several customization sections presented as a notebook. The first section is for selecting the text font used for scripts, goal and message windows. The second and third sections are for controlling colors and style of the three main buffers. A predefined Coq highlighting style as well as standard |GtkSourceView| styles are available. Other styles can be added e.g. in ``$HOME/.local/share/gtksourceview-3.0/styles/`` (see the general documentation about |GtkSourceView| for the various possibilities). Note that the style of the rest of graphical part of CoqIDE is not under the control of |GtkSourceView| but of GTK+ and governed by files such as ``settings.ini`` and ``gtk.css`` in ``$XDG_CONFIG_HOME/gtk-3.0`` or files in ``$HOME/.themes/NameOfTheme/gtk-3.0``, as well as the environment variable ``GTK_THEME`` (search on internet for the various possibilities). The fourth section is for customizing the editor. It includes in particular the ability to activate an Emacs mode named micro-Proof-General (use the Help menu to know more about the available bindings). The next section is devoted to file management: you may configure automatic saving of files, by periodically saving the contents into files named `#f#` for each opened file `f`. You may also activate the *revert* feature: in case a opened file is modified on the disk by a third party, CoqIDE may read it again for you. Note that in the case you edited that same file, you will be prompted to choose to either discard your changes or not. The File charset encoding choice is described below in :ref:`character-encoding-saved-files`. The `Externals` section allows customizing the external commands for compilation, printing, web browsing. In the browser command, you may use `%s` to denote the URL to open, for example: `firefox -remote "OpenURL(%s)"`. Notice that these settings are saved in the file ``coqiderc`` in the ``coq`` subdirectory of the user configuration directory which is the value of ``$XDG_CONFIG_HOME`` if this environment variable is set and which otherwise is ``$HOME/.config/``. A GTK+ accelerator keymap is saved under the name ``coqide.keys`` in the same ``coq`` subdirectory of the user configuration directory. It is not recommended to edit this file manually: to modify a given menu shortcut, go to the corresponding menu item without releasing the mouse button, press the key you want for the new shortcut, and release the mouse button afterwards. If your system does not allow it, you may still edit this configuration file by hand, but this is more involved. Using Unicode symbols -------------------------- CoqIDE is based on GTK+ and inherits from it support for Unicode in its text windows. Consequently a large set of symbols is available for notations. Furthermore, CoqIDE conveniently provides a simple way to input Unicode characters. Displaying Unicode symbols ~~~~~~~~~~~~~~~~~~~~~~~~~~ You just need to define suitable notations as described in the chapter :ref:`syntax-extensions-and-notation-scopes`. For example, to use the mathematical symbols ∀ and ∃, you may define: .. coqtop:: in Notation "∀ x .. y , P" := (forall x, .. (forall y, P) ..) (at level 200, x binder, y binder, right associativity) : type_scope. Notation "∃ x .. y , P" := (exists x, .. (exists y, P) ..) (at level 200, x binder, y binder, right associativity) : type_scope. There exists a small set of such notations already defined, in the file `utf8.v` of Coq library, so you may enable them just by ``Require Import Unicode.Utf8`` inside CoqIDE, or equivalently, by starting CoqIDE with ``coqide -l utf8``. However, there are some issues when using such Unicode symbols: you of course need to use a character font which supports them. In the Fonts section of the preferences, the Preview line displays some Unicode symbols, so you could figure out if the selected font is OK. Related to this, one thing you may need to do is choosing whether GTK+ should use antialiased fonts or not, by setting the environment variable `GDK_USE_XFT` to 1 or 0 respectively. .. _coqide-unicode: Bindings for input of Unicode symbols ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ CoqIDE supports a builtin mechanism to input non-ASCII symbols. For example, to input ``π``, it suffices to type ``\pi`` then press the combination of key ``Shift+Space`` (default key binding). Often, it suffices to type a prefix of the latex token, e.g. typing ``\p`` then ``Shift+Space`` suffices to insert a ``π``. For several symbols, ASCII art is also recognized, e.g. ``\->`` for a right arrow, or ``\>=`` for a greater than or equal sign. A larger number of latex tokens are supported by default. The full list is available here: https://github.com/coq/coq/blob/master/ide/coqide/default_bindings_src.ml Custom bindings may be added, as explained further on. The mechanism is active by default, but can be turned off in the Editor section of the preferences. .. note:: It remains possible to input non-ASCII symbols using system-wide approaches independent of CoqIDE. Adding custom bindings ~~~~~~~~~~~~~~~~~~~~~~ To extend the default set of bindings, create a file named ``coqide.bindings`` and place it in the same folder as ``coqide.keys``. This would be the folder ``$XDG_CONFIG_HOME/coq``, defaulting to ``~/.config/coq`` if ``XDG_CONFIG_HOME`` is unset. The file `coqide.bindings` should contain one binding per line, in the form ``\key value``, followed by an optional priority integer. (The key and value should not contain any space character.) .. example:: Here is an example configuration file: :: \par || \pi π 1 \le ≤ 1 \lambda λ 2 \lambdas λs Above, the priority number 1 on ``\pi`` indicates that the prefix ``\p`` should resolve to ``\pi``, and not to something else (e.g. ``\par``). Similarly, the above settings ensure than ``\l`` resolves to ``\le``, and that ``\la`` resolves to ``\lambda``. It can be useful to work with per-project binding files. For this purpose CoqIDE accepts a command line argument of the form ``-unicode-bindings file1,file2,...,fileN``. Each of the file tokens provided may consists of one of: - a path to a custom bindings file, - the token ``default``, which resolves to the default bindings file, - the token ``local``, which resolves to the `coqide.bindings` file stored in the user configuration directory. .. warning:: If a filename other than the first one includes a "~" to refer to the home directory, it won't be expanded properly. To work around that issue, one should not use comas but instead repeat the flag, in the form: ``-unicode-bindings file1 .. -unicode-bindings fileN``. .. note:: If two bindings for a same token both have the same priority value (or both have no priority value set), then the binding considered is the one from the file that comes first on the command line. .. _character-encoding-saved-files: Character encoding for saved files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the Files section of the preferences, the encoding option is related to the way files are saved. If you have no need to exchange files with non-UTF-8 aware applications, it is better to choose the UTF-8 encoding, since it guarantees that your files will be read again without problems. (This is because when CoqIDE reads a file, it tries to automatically detect its character encoding.) If you choose something else than UTF-8, then missing characters will be written encoded by `\x{....}` or `\x{........}` where each dot is an hexadecimal digit: the number between braces is the hexadecimal Unicode index for the missing character. .. _coqide-debugger: Debugger -------- Version 8.15 introduces a visual debugger for |Ltac| tactics within CoqIDE. It supports setting breakpoints visually and automatically displaying the stopping point in the source code with "continue", "step over" "step in" and "step out" operations. The call stack and variable values for each stack frame are shown in a new panel. The debugger is based on the non-visual |Ltac| :ref:`debugger `. We'd like to eventually support other scripting facilities such as Ltac2. Since the visual debugger is new in 8.15, you may encounter bugs or usability issues. The behavior and user interface will evolve as the debugger is refined. There are notes on bugs and potential enhancements at the end of `this page `_. Feel free to suggest changes and improvements by opening an issue on `GitHub `_, or contact `@jfehrle` directly through email, Zulip or Discourse. Breakpoints ~~~~~~~~~~~ This screenshot shows the debugger stopped at a breakpoint in the |Ltac| tactic `my_tac`. Breakpoints are shown with a red background and the stopping point is shown with a dark blue background. `Set Ltac Debug.` enables stopping in the debugger. .. image:: ../_static/debugger.png :alt: CoqIDE Debugger .. created with: Set Ltac Debug. (* enable the debugger *) Ltac my_tac c := let con := constr:(forall a b : nat, (a + b) * c = a * c + b * c) in idtac "A"; idtac "B"; idtac "C". Goal True. my_tac 2. You can control the debugger with function and control keys. Some messages are shown in the Messages panel. You can type :ref:`debugger commands ` in that panel when it shows the debug prompt. The script is not editable while Coq is processing tactics or stopped in the debugger. When Coq is stopped in the debugger (e.g., at a breakpoint), the blue segment in the "in progress" slider at the bottom edge of the window will be stopped at the left hand edge of its range. The function keys are listed, for the moment, with one exception, in the `Debug` menu: Toggle breakpoint (F8) Position the cursor on the first character of the tactic name in an Ltac construct, then press F8. Press again to remove the breakpoint. F8 is accepted only when all of the coqtop sessions are idle (i.e. at the debug prompt or not processing a tactic or command). Note that :term:`sentences ` containing a single built-in tactic are not Ltac constructs. A breakpoint on :n:`intros.`, for example, is ignored, while breakpoints on either tactic in :n:`intros; idtac.` work. A breakpoint on, say, :n:`my_ltac_tactic.` also works. Breakpoints on Ltac :n:`@value_tactic`\s, which compute values without changing the proof context, such as :tacn:`eval`, are ignored. You must set at least one breakpoint in order to enter the debugger. Continue (F9) Continue processing the proof. If you're not stopped in the debugger, this is equivalent to "Run to end" (Control End). Step over (Control ↓) When stopped in the debugger, execute the next tactic without stopping inside it. If the debugger reaches a breakpoint in the tactic, it will stop. This is the same key combination used for "Forward one command"—if you're stopped in the debugger then it does a "Step over" and otherwise it does a "Forward". Combining the two functions makes it easy to step through a script in a natural way when some breakpoints are set. Step in (F10) When stopped in the debugger, if next tactic is an |Ltac| tactic, stop at the first possible point in the tactic. Otherwise acts as a "step over". Step out (Shift F10) When stopped in the debugger, continue and then stop at the first possible point after exiting the current |Ltac| tactic. If the debugger reaches a breakpoint in the tactic, it will stop. Break (F11) Stops the debugger at the next possible stopping point, from which you can step or continue. (Not supported in Windows at this time.) If you step through `idtac "A"; idtac "B"; idtac "C".`, you'll notice that the steps for `my_tac` are: | `idtac "A"; idtac "B"; idtac "C"` | `idtac "A"; idtac "B"` | `idtac "A"` | `idtac "B"` | `idtac "C"` which reflects the two-phase execution process for the :n:`@tactic ; @tactic` construct. Also keep in mind that |Ltac| backtracking may cause the call stack to revert to a previous state. This may cause confusion. Currently there's no special indication that this has happened. .. unfortunately not working: Note: This `Wiki page `_ describes a way to change CoqIDE key bindings. Call Stack and Variables ~~~~~~~~~~~~~~~~~~~~~~~~ The bottom panel shows the call stack and the variables defined for the selected stack frame. Stack frames normally show the name of tactic being executed, the line number and the last component of the filename without the :n:`.v` suffix. The directory part of the module name is shown when the frame is not in the toplevel script file. For example, :n:`make_rewriter:387, AllTactics (Rewriter.Rewriter)` refers to the file with the module name :n:`Rewriter.Rewriter.AllTactics`. Note: A few stack frames aren't yet displayed in this described format (e.g. those starting with :n:`???`) and may be extraneous. In some cases, the tactic name is not shown. Click on a stack frame or press the Up (↑) or Down (↓) keys to select a stack frame. Coq will jump to the associated code and display the variables for that stack frame. You can select text with the mouse and then copy it to the clipboard with Control-C. Control-A selects the entire stack. The variables panel uses a tree control to show variables defined in the selected stack frame. To see values that don't fit on a single line, click on the triangle. You can select one or more entries from the tree in the usual way by clicking, shift-clicking and control-clicking on an entry. Control-A selects all entries. Control-C copies the selected entries to the clipboard. Note: Some variable are not displayed in a useful form. For example, the value shown for :n:`tac` in a script containing :n:`let tac = ltac:(auto)` appears only as :n:``. We hope to address this soon. The :n:`DETACH` button moves the debugger panel into a separate window, which will make it easier to examine its contents. Supported use cases ~~~~~~~~~~~~~~~~~~~ There are two main use cases for the debugger. They're not very compatible. Instead of showing warning messages or forcing the user to explicitly pick one mode or another, for now it's up to the user to know the limitations and work within them. The *single file* case is running the debugger on a single *primary* script without ever stopping in other *secondary* scripts. In this case, you can edit the primary script while Coq is not running it nor stopped in the debugger. The position of breakpoints will be updated automatically as you edit the file. It's fine to run the debugger in multiple buffers--you will not be confused. The single-file case is preferable when you can use it. The *multi-file* case is when a primary script stops in a secondary script. In this case, breakpoints in the secondary script that move due to script editing may no longer match the locations in the compiled secondary script. The debugger won't stop at these breakpoints as you expect. Also, the code highlighted for stack frames in that script may be incorrect. You will need to re-compile the secondary script and then restart the primary script (Restart, Ctrl-HOME) to get back to a consistent state. For multi-file debugging, we suggest detaching the Messages, Proof Context and Debugger panels so they are in separate windows. To do so, click on the arrow icon next to "Messages", select "Windows / Detach Proof" from the menu and click on "DETACH" in the Debugger panel. Note that the Debugger panel is initially attached to the Script panel of the toplevel script. Also note that, for now, the "in progress" slider is accurate only when the associated toplevel script panel is visible. If a debugger instance is stopped in a secondary script, the debugger function keys are directed to the debugger instance associated with the primary script. The debugger doesn't attempt to support multiple instances stopped in the same secondary script. If you have a need to do this, run each debugger instance in a separate CoqIDE process/window. Note that if you set a breakpoint in a script that may be called by multiple debugger instances, you may inadvertently find you've gotten into unsupported territory. coq-8.15.0/doc/sphinx/practical-tools/utilities.rst000066400000000000000000001011641417001151100223040ustar00rootroot00000000000000.. _utilities: --------------------- Utilities --------------------- The distribution provides utilities to simplify some tedious works beside proof development, tactics writing or documentation. Using Coq as a library ------------------------ In previous versions, ``coqmktop`` was used to build custom toplevels - for example for better debugging or custom static linking. Nowadays, the preferred method is to use ``ocamlfind``. The most basic custom toplevel is built using: :: % ocamlfind ocamlopt -thread -rectypes -linkall -linkpkg \ -package coq.toplevel \ topbin/coqtop_bin.ml -o my_toplevel.native For example, to statically link |Ltac|, you can just do: :: % ocamlfind ocamlopt -thread -rectypes -linkall -linkpkg \ -package coq.toplevel,coq.plugins.ltac \ topbin/coqtop_bin.ml -o my_toplevel.native and similarly for other plugins. .. _building_coq_project: Building a Coq project ---------------------- As of today it is possible to build Coq projects using two tools: - ``coq_makefile``, which is distributed by Coq and is based on generating a makefile, - Dune, the standard OCaml build tool, which, since version 1.9, supports building Coq libraries. .. _coq_makefile: Building a Coq project with coq_makefile ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The majority of Coq projects are very similar: a collection of ``.v`` files and eventually some ``.ml`` ones (a Coq plugin). The main piece of metadata needed in order to build the project are the command line options to ``coqc`` (e.g. ``-R``, ``-Q``, ``-I``, see :ref:`command line options `). Collecting the list of files and options is the job of the ``_CoqProject`` file. A ``_CoqProject`` file may contain the following kinds of entries in any order, separated by whitespace: * Selected options of coqc, which are forwarded directly to it. Currently these are ``-Q``, ``-I``, ``-R`` and ``-native-compiler``. * ``-arg`` options for other options of coqc that don’t fall in the above set. * Options specific to ``coq_makefile``. Currently this is only ``-docroot``. * Paths to files belonging to the project. * Comments, started with an unquoted ``#`` and continuing to the end of the line. A simple example of a ``_CoqProject`` file follows: :: -R theories/ MyCode -arg "-w all" theories/foo.v theories/bar.v -I src/ src/baz.mlg src/bazaux.ml src/qux_plugin.mlpack Lines in the form ``-arg foo`` pass the argument ``foo`` to ``coqc``: in the example, this allows to pass the two-word option ``-w all`` (see :ref:`command line options `). Note that it is mandatory to specify a ``-R/-Q`` flag for your project, so its modules are properly qualified. Omitting it will generate object files that are not usable except for expert cases. The ``-native-compiler`` option given in the ``_CoqProject`` file will override the global one passed at configure time. CoqIDE, Proof-General and VSCoq all understand ``_CoqProject`` files and can be used to invoke Coq with the desired options. The ``coq_makefile`` utility can be used to set up a build infrastructure for the Coq project based on makefiles. The recommended way of invoking ``coq_makefile`` is the following one: :: coq_makefile -f _CoqProject -o CoqMakefile Such command generates the following files: CoqMakefile is a makefile for ``GNU Make`` with targets to build the project (e.g. generate .vo or .html files from .v or compile .ml* files) and install it in the ``user-contrib`` directory where the Coq library is installed. CoqMakefile.conf contains make variables assignments that reflect the contents of the ``_CoqProject`` file as well as the path relevant to Coq. Run ``coq_makefile --help`` for a description of command line options. The recommended approach is to invoke ``CoqMakefile`` from a standard ``Makefile`` of the following form: .. example:: :: # KNOWNTARGETS will not be passed along to CoqMakefile KNOWNTARGETS := CoqMakefile extra-stuff extra-stuff2 # KNOWNFILES will not get implicit targets from the final rule, and so # depending on them won't invoke the submake # Warning: These files get declared as PHONY, so any targets depending # on them always get rebuilt KNOWNFILES := Makefile _CoqProject .DEFAULT_GOAL := invoke-coqmakefile CoqMakefile: Makefile _CoqProject $(COQBIN)coq_makefile -f _CoqProject -o CoqMakefile invoke-coqmakefile: CoqMakefile $(MAKE) --no-print-directory -f CoqMakefile $(filter-out $(KNOWNTARGETS),$(MAKECMDGOALS)) .PHONY: invoke-coqmakefile $(KNOWNFILES) #################################################################### ## Your targets here ## #################################################################### # This should be the last rule, to handle any targets not declared above %: invoke-coqmakefile @true The advantage of a wrapper, compared to directly calling the generated ``Makefile``, is that it provides a target independent of the version of Coq to regenerate a ``Makefile`` specific to the current version of Coq. Additionally, the master ``Makefile`` can be extended with targets not specific to Coq. Including the generated makefile with an include directive is discouraged, since the contents of this file, including variable names and status of rules, may change in the future. Use the optional file ``CoqMakefile.local`` to extend ``CoqMakefile``. In particular, you can declare custom actions to run before or after the build process. Similarly you can customize the install target or even provide new targets. See :ref:`coqmakefilelocal` for extension-point documentation. Although you can use all variables defined in ``CoqMakefile`` in the *recipes* of rules that you write and in the definitions of any variables that you assign with ``=``, many variables are not available for use if you assign variable values with ``:=`` nor to define the *targets* of rules nor in top-level conditionals such as ``ifeq``. Additionally, you must use `secondary expansion `_ to make use of such variables in the prerequisites of rules. To access variables defined in ``CoqMakefile`` in rule target computation, top-level conditionals, and ``:=`` variable assignment, for example to add new dependencies to compiled outputs, use the optional file ``CoqMakefile.local-late``. See :ref:`coqmakefilelocallate` for a non-exhaustive list of variables. The extensions of files listed in ``_CoqProject`` determine how they are built. In particular: + Coq files must use the ``.v`` extension + OCaml files must use the ``.ml`` or ``.mli`` extension + OCaml files that require pre processing for syntax extensions (like ``VERNAC EXTEND``) must use the ``.mlg`` extension + In order to generate a plugin one has to list all OCaml modules (i.e. ``Baz`` for ``baz.ml``) in a ``.mlpack`` file (or ``.mllib`` file). The use of ``.mlpack`` files has to be preferred over ``.mllib`` files, since it results in a “packed” plugin: All auxiliary modules (as ``Baz`` and ``Bazaux``) are hidden inside the plugin’s "namespace" (``Qux_plugin``). This reduces the chances of begin unable to load two distinct plugins because of a clash in their auxiliary module names. Comments ++++++++ ``#`` outside of double quotes starts a comment that continues to the end of the line. Comments are ignored. Quoting arguments to coqc +++++++++++++++++++++++++ Any string in a ``_CoqProject`` file may be enclosed in double quotes to include whitespace characters or ``#``. For example, use ``-arg "-w all"`` to pass the argument ``-w all`` to coqc. If the argument to coqc needs some quotes as well, use single-quotes inside the double-quotes. For example ``-arg "-set 'Default Goal Selector=!'"`` gets passed to coqc as ``-set 'Default Goal Selector=!'``. But note, that single-quotes in a ``_CoqProject`` file are only special characters if they appear in the string following ``-arg``. And on their own they don't quote spaces. For example ``-arg 'foo bar'`` in ``_CoqProject`` is equivalent to ``-arg foo "bar'"`` (in ``_CoqProject`` notation). ``-arg "'foo bar'"`` behaves differently and passes ``'foo bar'`` to coqc. Forbidden filenames +++++++++++++++++++ The paths of files given in a ``_CoqProject`` file may not contain any of the following characters: ``\n``, ``\t``, space, ``\``, ``'``, ``"``, ``#``, ``$``, ``%``. These characters have special meaning in Makefiles and ``coq_makefile`` doesn't support encoding them correctly. Warning: No common logical root +++++++++++++++++++++++++++++++ When a ``_CoqProject`` file contains something like ``-R theories Foo theories/Bar.v``, the ``install-doc`` target installs the documentation generated by ``coqdoc`` into ``user-contrib/Foo/``, in the folder where Coq was installed. But if the ``_CoqProject`` file contains something like: :: -R theories/Foo Foo -R theories/Bar Bar theories/Foo/Foo.v theories/Bar/Bar.v the Coq files of the project don’t have a logical path in common and ``coq_makefile`` doesn’t know where to install the documentation. It will give a warning: "No common logical root" and generate a Makefile that installs the documentation in some folder beginning with "orphan", in the above example, it'd be ``user-contrib/orphan_Foo_Bar``. In this case, specify the ``-docroot`` option in _CoqProject to override the automatically selected logical root. .. _coqmakefilelocal: CoqMakefile.local +++++++++++++++++ The optional file ``CoqMakefile.local`` is included by the generated file ``CoqMakefile``. It can contain two kinds of directives. **Variable assignment** The variable must belong to the variables listed in the ``Parameters`` section of the generated makefile. These include: :CAMLPKGS: can be used to specify third party findlib packages, and is passed to the OCaml compiler on building or linking of modules. Eg: ``-package yojson``. :CAMLFLAGS: can be used to specify additional flags to the OCaml compiler, like ``-bin-annot`` or ``-w``.... :OCAMLWARN: it contains a default of ``-warn-error +a-3``, useful to modify this setting; beware this is not recommended for projects in Coq's CI. :COQC, COQDEP, COQDOC: can be set in order to use alternative binaries (e.g. wrappers) :COQ_SRC_SUBDIRS: can be extended by including other paths in which ``*.cm*`` files are searched. For example ``COQ_SRC_SUBDIRS+=user-contrib/Unicoq`` lets you build a plugin containing OCaml code that depends on the OCaml code of ``Unicoq`` :COQFLAGS: override the flags passed to ``coqc``. By default ``-q``. :COQEXTRAFLAGS: extend the flags passed to ``coqc`` :COQCHKFLAGS: override the flags passed to ``coqchk``. By default ``-silent -o``. :COQCHKEXTRAFLAGS: extend the flags passed to ``coqchk`` :COQDOCFLAGS: override the flags passed to ``coqdoc``. By default ``-interpolate -utf8``. :COQDOCEXTRAFLAGS: extend the flags passed to ``coqdoc`` :COQLIBINSTALL, COQDOCINSTALL: specify where the Coq libraries and documentation will be installed. By default a combination of ``$(DESTDIR)`` (if defined) with ``$(COQLIB)/user-contrib`` and ``$(DOCDIR)/user-contrib``. Use :ref:`coqmakefilelocallate` instead to access more variables. **Rule extension** The following makefile rules can be extended. .. example:: :: pre-all:: echo "This line is print before making the all target" install-extra:: cp ThisExtraFile /there/it/goes ``pre-all::`` run before the ``all`` target. One can use this to configure the project, or initialize sub modules or check dependencies are met. ``post-all::`` run after the ``all`` target. One can use this to run a test suite, or compile extracted code. ``install-extra::`` run after ``install``. One can use this to install extra files. ``install-doc::`` One can use this to install extra doc. ``uninstall::`` \ ``uninstall-doc::`` \ ``clean::`` \ ``cleanall::`` \ ``archclean::`` \ ``merlin-hook::`` One can append lines to the generated ``.merlin`` file extending this target. .. _coqmakefilelocallate: CoqMakefile.local-late ++++++++++++++++++++++ The optional file ``CoqMakefile.local-late`` is included at the end of the generated file ``CoqMakefile``. The following is a partial list of accessible variables: :COQ_VERSION: the version of ``coqc`` being used, which can be used to provide different behavior depending on the Coq version :COQMAKEFILE_VERSION: the version of Coq used to generate the Makefile, which can be used to detect version mismatches :ALLDFILES: the list of generated dependency files, which can be used, for example, to cause ``make`` to recompute dependencies when files change by writing ``$(ALLDFILES): myfiles`` or to indicate that files must be generated before dependencies can be computed by writing ``$(ALLDFILES): | mygeneratedfiles`` :VOFILES, GLOBFILES, CMOFILES, CMXFILES, OFILES, CMAFILES, CMXAFILES, CMIFILES, CMXSFILES: lists of files that are generated by various invocations of the compilers In addition, the following variables may be useful for deciding what targets to present via ``$(shell ...)``; these variables are already accessible in recipes for rules added in ``CoqMakefile.local``, but are only accessible from top-level ``$(shell ...)`` invocations in ``CoqMakefile.local-late``: :COQC, COQDEP, COQDOC, CAMLC, CAMLOPTC: compiler binaries :COQFLAGS, CAMLFLAGS, COQLIBS, COQDEBUG, OCAMLLIBS: flags passed to the Coq or OCaml compilers Timing targets and performance testing ++++++++++++++++++++++++++++++++++++++ The generated ``Makefile`` supports the generation of two kinds of timing data: per-file build-times, and per-line times for an individual file. The following targets and Makefile variables allow collection of per- file timing data: + ``TIMED=1`` passing this variable will cause ``make`` to emit a line describing the user-space build-time and peak memory usage for each file built. .. note:: On ``Mac OS``, this works best if you’ve installed ``gnu-time``. .. example:: For example, the output of ``make TIMED=1`` may look like this: :: COQDEP Fast.v COQDEP Slow.v COQC Slow.v Slow.vo (user: 0.34 mem: 395448 ko) COQC Fast.v Fast.vo (user: 0.01 mem: 45184 ko) + ``pretty-timed`` this target stores the output of ``make TIMED=1`` into ``time-of-build.log``, and displays a table of the times and peak memory usages, sorted from slowest to fastest, which is also stored in ``time-of-build-pretty.log``. If you want to construct the ``log`` for targets other than the default one, you can pass them via the variable ``TGTS``, e.g., ``make pretty-timed TGTS="a.vo b.vo"``. .. note:: This target requires ``python`` to build the table. .. note:: This target will *append* to the timing log; if you want a fresh start, you must remove the file ``time-of-build.log`` or ``run make cleanall``. .. note:: By default the table displays user times. If the build log contains real times (which it does by default), passing ``TIMING_REAL=1`` to ``make pretty-timed`` will use real times rather than user times in the table. .. note:: Passing ``TIMING_INCLUDE_MEM=0`` to ``make`` will result in the tables not including peak memory usage information. Passing ``TIMING_SORT_BY_MEM=1`` to ``make`` will result in the tables be sorted by peak memory usage rather than by the time taken. .. example:: For example, the output of ``make pretty-timed`` may look like this: :: COQDEP VFILES COQC Slow.v Slow.vo (real: 0.52, user: 0.39, sys: 0.12, mem: 394648 ko) COQC Fast.v Fast.vo (real: 0.06, user: 0.02, sys: 0.03, mem: 56980 ko) Time | Peak Mem | File Name -------------------------------------------- 0m00.41s | 394648 ko | Total Time / Peak Mem -------------------------------------------- 0m00.39s | 394648 ko | Slow.vo 0m00.02s | 56980 ko | Fast.vo + ``print-pretty-timed-diff`` this target builds a table of timing changes between two compilations; run ``make make-pretty-timed-before`` to build the log of the “before” times, and run ``make make-pretty-timed-after`` to build the log of the “after” times. The table is printed on the command line, and stored in ``time-of-build-both.log``. This target is most useful for profiling the difference between two commits in a repository. .. note:: This target requires ``python`` to build the table. .. note:: The ``make-pretty-timed-before`` and ``make-pretty-timed-after`` targets will *append* to the timing log; if you want a fresh start, you must remove the files ``time-of-build-before.log`` and ``time-of-build-after.log`` or run ``make cleanall`` *before* building either the “before” or “after” targets. .. note:: The table will be sorted first by absolute time differences rounded towards zero to a whole-number of seconds, then by times in the “after” column, and finally lexicographically by file name. This will put the biggest changes in either direction first, and will prefer sorting by build-time over subsecond changes in build time (which are frequently noise); lexicographic sorting forces an order on files which take effectively no time to compile. If you prefer a different sorting order, you can pass ``TIMING_SORT_BY=absolute`` to sort by the total time taken, or ``TIMING_SORT_BY=diff`` to sort by the signed difference in time. .. note:: Just like ``pretty-timed``, this table defaults to using user times. Pass ``TIMING_REAL=1`` to ``make`` on the command line to show real times instead. .. note:: Just like ``pretty-timed``, passing ``TIMING_INCLUDE_MEM=0`` to ``make`` will result in the tables not including peak memory usage information. Passing ``TIMING_SORT_BY_MEM=1`` to ``make`` will result in the tables be sorted by peak memory usage rather than by the time taken. .. example:: For example, the output table from ``make print-pretty-timed-diff`` may look like this: :: After | Peak Mem | File Name | Before | Peak Mem || Change || Change (mem) | % Change | % Change (mem) ----------------------------------------------------------------------------------------------------------------------------- 0m00.43s | 394700 ko | Total Time / Peak Mem | 0m00.41s | 394648 ko || +0m00.01s || 52 ko | +4.87% | +0.01% ----------------------------------------------------------------------------------------------------------------------------- 0m00.39s | 394700 ko | Fast.vo | 0m00.02s | 56980 ko || +0m00.37s || 337720 ko | +1850.00% | +592.69% 0m00.04s | 56772 ko | Slow.vo | 0m00.39s | 394648 ko || -0m00.35s || -337876 ko | -89.74% | -85.61% The following targets and ``Makefile`` variables allow collection of per- line timing data: + ``TIMING=1`` passing this variable will cause ``make`` to use ``coqc -time`` to write to a ``.v.timing`` file for each ``.v`` file compiled, which contains line-by-line timing information. .. example:: For example, running ``make all TIMING=1`` may result in a file like this: :: Chars 0 - 26 [Require~Coq.ZArith.BinInt.] 0.157 secs (0.128u,0.028s) Chars 27 - 68 [Declare~Reduction~comp~:=~vm_c...] 0. secs (0.u,0.s) Chars 69 - 162 [Definition~foo0~:=~Eval~comp~i...] 0.153 secs (0.136u,0.019s) Chars 163 - 208 [Definition~foo1~:=~Eval~comp~i...] 0.239 secs (0.236u,0.s) + ``print-pretty-single-time-diff`` :: print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing this target will make a sorted table of the per-line timing differences between the timing logs in the ``BEFORE`` and ``AFTER`` files, display it, and save it to the file specified by the ``TIME_OF_PRETTY_BUILD_FILE`` variable, which defaults to ``time-of-build-pretty.log``. To generate the ``.v.before-timing`` or ``.v.after-timing`` files, you should pass ``TIMING=before`` or ``TIMING=after`` rather than ``TIMING=1``. .. note:: The sorting used here is the same as in the ``print-pretty-timed-diff`` target. .. note:: This target requires python to build the table. .. note:: This target follows the same sorting order as the ``print-pretty-timed-diff`` target, and supports the same options for the ``TIMING_SORT_BY`` variable. .. note:: By default, two lines are only considered the same if the character offsets and initial code strings are identical. Passing ``TIMING_FUZZ=N`` relaxes this constraint by allowing the character locations to differ by up to ``N``, as long as the total number of characters and initial code strings continue to match. This is useful when there are small changes to a file, and you want to match later lines that have not changed even though the character offsets have changed. .. note:: By default the table picks up real times, under the assumption that when comparing line-by-line, the real time is a more accurate representation as it includes disk time and time spent in the native compiler. Passing ``TIMING_REAL=0`` to ``make`` will use user times rather than real times in the table. .. example:: For example, running ``print-pretty-single-time-diff`` might give a table like this: :: After | Code | Before || Change | % Change --------------------------------------------------------------------------------------------------- 0m00.50s | Total | 0m04.17s || -0m03.66s | -87.96% --------------------------------------------------------------------------------------------------- 0m00.145s | Chars 069 - 162 [Definition~foo0~:=~Eval~comp~i...] | 0m00.192s || -0m00.04s | -24.47% 0m00.126s | Chars 000 - 026 [Require~Coq.ZArith.BinInt.] | 0m00.143s || -0m00.01s | -11.88% N/A | Chars 027 - 068 [Declare~Reduction~comp~:=~nati...] | 0m00.s || +0m00.00s | N/A 0m00.s | Chars 027 - 068 [Declare~Reduction~comp~:=~vm_c...] | N/A || +0m00.00s | N/A 0m00.231s | Chars 163 - 208 [Definition~foo1~:=~Eval~comp~i...] | 0m03.836s || -0m03.60s | -93.97% + ``all.timing.diff``, ``path/to/file.v.timing.diff`` The ``path/to/file.v.timing.diff`` target will make a ``.v.timing.diff`` file for the corresponding ``.v`` file, with a table as would be generated by the ``print-pretty-single-time-diff`` target; it depends on having already made the corresponding ``.v.before-timing`` and ``.v.after-timing`` files, which can be made by passing ``TIMING=before`` and ``TIMING=after``. The ``all.timing.diff`` target will make such timing difference files for all of the ``.v`` files that the ``Makefile`` knows about. It will fail if some ``.v.before-timing`` or ``.v.after-timing`` files don’t exist. .. note:: This target requires python to build the table. Building a subset of the targets with ``-j`` ++++++++++++++++++++++++++++++++++++++++++++ To build, say, two targets foo.vo and bar.vo in parallel one can use ``make only TGTS="foo.vo bar.vo" -j``. .. note:: ``make foo.vo bar.vo -j`` has a different meaning for the ``make`` utility, in particular it may build a shared prerequisite twice. .. note:: Due to limitations with the compilation chain, makefiles generated by ``coq_makefile`` won't correctly compile OCaml plugins with OCaml < 4.07.0 when using more than one job (``-j N`` for ``N > 1``). Precompiling for ``native_compute`` +++++++++++++++++++++++++++++++++++ To compile files for ``native_compute``, one can use the ``-native-compiler yes`` option of Coq, by putting it in the ``_CoqProject`` file. The generated installation target of ``CoqMakefile`` will then take care of installing the extra ``.coq-native`` directories. .. note:: As an alternative to modifying ``_CoqProject``, one can set an environment variable when calling ``make``: :: COQEXTRAFLAGS="-native-compiler yes" make This can be useful when files cannot be modified, for instance when installing via OPAM a package built with ``coq_makefile``: :: COQEXTRAFLAGS="-native-compiler yes" opam install coq-package .. note:: This requires all dependencies to be themselves compiled with ``-native-compiler yes``. The grammar of _CoqProject ++++++++++++++++++++++++++ A ``_CoqProject`` file encodes a list of strings using the following syntax: .. prodn:: CoqProject ::= {* {| @blank | @comment | @quoted_string | @unquoted_string } } blank ::= {| space | horizontal_tab | newline } comment ::= # {* comment_char } newline quoted_string ::= " {* quoted_char } " unquoted_string ::= string_start_char {* unquoted_char } where the following definitions apply: * :n:`space`, :n:`horizontal_tab` and :n:`newline` stand for the corresponding ASCII characters. * :n:`comment_char` is the set of all characters except :n:`newline`. * :n:`quoted_char` is the set of all characters except ``"``. * :n:`string_start_char` is the set of all characters except those that match :n:`@blank`, or are ``"`` or ``#``. * :n:`unquoted_char` is the set of all characters except those that match :n:`@blank` or are ``#``. The parser produces a list of strings in the same order as they were encountered in ``_CoqProject``. Blanks and comments are removed and the double quotes of :n:`@quoted_string` tokens are removed as well. The list is then treated as a list of command-line arguments of ``coq_makefile``. The semantics of ``-arg`` are as follows: the string given as argument is split on whitespace, but single quotes prevent splitting. The resulting list of strings is then passed to coqc. The current approach has a few limitations: Double quotes in a ``_CoqProject`` file are only special characters at the start of a string. For lack of an escaping mechanism, it is currently impossible to pass the following kinds of strings to ``coq_makefile`` using a ``_CoqProject`` file: * strings starting with ``"`` * strings starting with ``#`` and containing ``"`` * strings containing both whitespace and ``"`` In addition, it is impossible to pass strings containing ``'`` to coqc via ``-arg``. Building a Coq project with Dune ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. note:: Dune's Coq support is still experimental; we strongly recommend using Dune 2.3 or later. .. note:: The canonical documentation for the Coq Dune extension is maintained upstream; please refer to the `Dune manual `_ for up-to-date information. This documentation is up to date for Dune 2.3. Building a Coq project with Dune requires setting up a Dune project for your files. This involves adding a ``dune-project`` and ``pkg.opam`` file to the root (``pkg.opam`` can be empty or generated by Dune itself), and then providing ``dune`` files in the directories your ``.v`` files are placed. For the experimental version "0.1" of the Coq Dune language, Coq library stanzas look like: .. code:: scheme (coq.theory (name ) (package ) (synopsis ) (modules ) (libraries ) (flags )) This stanza will build all `.v` files in the given directory, wrapping the library under ````. If you declare an ````, an ``.install`` file for the library will be generated; the optional ``(modules )`` field allows you to filter the list of modules, and ``(libraries )`` allows the Coq theory depend on ML plugins. For the moment, Dune relies on Coq's standard mechanisms (such as ``COQPATH``) to locate installed Coq libraries. By default Dune will skip ``.v`` files present in subdirectories. In order to enable the usual recursive organization of Coq projects add .. code:: scheme (include_subdirs qualified) to you ``dune`` file. Once your project is set up, `dune build` will generate the `pkg.install` files and all the files necessary for the installation of your project. .. example:: A typical stanza for a Coq plugin is split into two parts. An OCaml build directive, which is standard Dune: .. code:: scheme (library (name equations_plugin) (public_name equations.plugin) (flags :standard -warn-error -3-9-27-32-33-50) (libraries coq.plugins.cc coq.plugins.extraction)) (coq.pp (modules g_equations)) And a Coq-specific part that depends on it via the ``libraries`` field: .. code:: scheme (coq.theory (name Equations) ; -R flag (package equations) (synopsis "Equations Plugin") (libraries coq.plugins.extraction equations.plugin) (modules :standard \ IdDec NoCycle)) ; exclude some modules that don't build (include_subdirs qualified) .. _coqdep: Computing Module dependencies ----------------------------- In order to compute module dependencies (to be used by ``make`` or ``dune``), Coq provides the ``coqdep`` tool. ``coqdep`` computes inter-module dependencies for Coq programs, and prints the dependencies on the standard output in a format readable by make. When a directory is given as argument, it is recursively looked at. Dependencies of Coq modules are computed by looking at :cmd:`Require` and :cmd:`Declare ML Module` commands. See the man page of ``coqdep`` for more details and options. Both Dune and ``coq_makefile`` use ``coqdep`` to compute the dependencies among the files part of a Coq project. .. _coqnative: Split compilation of native computation files --------------------------------------------- Coq features a :tacn:`native_compute` tactic to provide fast computation in the kernel. This process performs compilation of Coq terms to OCaml programs using the OCaml compiler, which may cause an important overhead. Hence native compilation is an opt-in configure flag. When native compilation is activated, Coq generates the compiled files upfront, i.e. during the ``coqc`` invocation on the corresponding ``.v`` file. This is impractical because it means one must chose in advance whether they will use a native-capable Coq installation. In particular, activating native compilation forces the recompilation of the whole Coq installation. See :ref:`command line options ` for more details. Starting from Coq 8.14, a new binary ``coqnative`` is available. It allows performing split native compilation by generating the native compute files out of the compiled ``.vo`` file rather than out of the source ``.v`` file. The ``coqnative`` command takes a name *file.vo* as argument and tries to perform native compilation on it. It assumes that the Coq libraries on which *file.vo* depends have been first compiled to their native files, and will fail otherwise. It accepts the ``-R``, ``-Q``, ``-I`` and ``-nI`` arguments with the same semantics as if the native compilation process had been performed through ``coqc``. In particular, it means that: + ``-R`` and ``-Q`` are equivalent + ``-I`` is a no-op that is accepted only for scripting convenience Embedded Coq phrases inside |Latex| documents ----------------------------------------------- When writing documentation about a proof development, one may want to insert Coq phrases inside a |Latex| document, possibly together with the corresponding answers of the system. We provide a mechanical way to process such Coq phrases embedded in |Latex| files: the ``coq-tex`` filter. This filter extracts Coq phrases embedded in |Latex| files, evaluates them, and insert the outcome of the evaluation after each phrase. Starting with a file ``file.tex`` containing Coq phrases, the ``coq-tex`` filter produces a file named ``file.v.tex`` with the Coq outcome. There are options to produce the Coq parts in smaller font, italic, between horizontal rules, etc. See the man page of ``coq-tex`` for more details. Man pages --------- There are man pages for the commands ``coqdep`` and ``coq-tex``. Man pages are installed at installation time (see installation instructions in file ``INSTALL``, step 6). coq-8.15.0/doc/sphinx/proof-engine/000077500000000000000000000000001417001151100170245ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/proof-engine/ltac.rst000066400000000000000000002561351417001151100205150ustar00rootroot00000000000000.. _ltac: Ltac ==== This chapter documents the tactic language |Ltac|. We start by giving the syntax followed by the informal semantics. To learn more about the language and especially about its foundations, please refer to :cite:`Del00`. (Note the examples in the paper won't work as-is; Coq has evolved since the paper was written.) .. example:: Basic tactic macros Here are some examples of simple tactic macros you can create with |Ltac|: .. coqdoc:: Ltac reduce_and_try_to_solve := simpl; intros; auto. Ltac destruct_bool_and_rewrite b H1 H2 := destruct b; [ rewrite H1; eauto | rewrite H2; eauto ]. See Section :ref:`ltac-examples` for more advanced examples. .. _ltac-syntax: Syntax ------ The syntax of the tactic language is given below. The main entry of the grammar is :n:`@ltac_expr`, which is used in proof mode as well as to define new tactics with the :cmd:`Ltac` command. The grammar uses multiple :n:`ltac_expr*` nonterminals to express how subexpressions are grouped when they're not fully parenthesized. For example, in many programming languages, `a*b+c` is interpreted as `(a*b)+c` because `*` has higher precedence than `+`. Usually `a/b/c` is given the :gdef:`left associative` interpretation `(a/b)/c` rather than the :gdef:`right associative` interpretation `a/(b/c)`. In Coq, the expression :n:`try repeat @tactic__1 || @tactic__2; @tactic__3; @tactic__4` is interpreted as :n:`(try (repeat (@tactic__1 || @tactic__2)); @tactic__3); @tactic__4` because `||` is part of :token:`ltac_expr2`, which has higher precedence than :tacn:`try` and :tacn:`repeat` (at the level of :token:`ltac_expr3`), which in turn have higher precedence than `;`, which is part of :token:`ltac_expr4`. (A *lower* number in the nonterminal name means *higher* precedence in this grammar.) The constructs in :token:`ltac_expr` are :term:`left associative`. .. insertprodn ltac_expr tactic_atom .. prodn:: ltac_expr ::= {| @ltac_expr4 | @binder_tactic } ltac_expr4 ::= @ltac_expr3 ; {| @ltac_expr3 | @binder_tactic } | @ltac_expr3 ; [ @for_each_goal ] | @ltac_expr3 ltac_expr3 ::= @l3_tactic | @ltac_expr2 ltac_expr2 ::= @ltac_expr1 + {| @ltac_expr2 | @binder_tactic } | @ltac_expr1 %|| {| @ltac_expr2 | @binder_tactic } | @l2_tactic | @ltac_expr1 ltac_expr1 ::= @tactic_value | @qualid {+ @tactic_arg } | @l1_tactic | @ltac_expr0 tactic_value ::= {| @value_tactic | @syn_value } tactic_arg ::= @tactic_value | @term | () ltac_expr0 ::= ( @ltac_expr ) | [> @for_each_goal ] | @tactic_atom tactic_atom ::= @integer | @qualid | () .. todo For the moment, I've left the language constructs like +, || and ; unchanged in the grammar. Not sure what to do with them. If we just make these indirections I think the grammar no longer gives you an overall idea of the concrete grammar without following the hyperlinks for many terms--not so easy (e.g. I have a construct and I want to figure out which productions generate it so I can read about them). We should think about eventually having a cheat sheet for the constructs, perhaps as part of the chapter introduction (use case: I know there's a construct but I can't remember its syntax). They do show up in the index but they're not so easy to find. I had thought a little about putting an ltac expression cheat sheet at the top of the tactics index. Unconventional, but people would see it and remember how to find it. OTOH, as you rightly note, they are not really tactics. Looking for better ideas that we are OK with. .. note:: Tactics described in other chapters of the documentation are :production:`simple_tactic`\s, which only modify the proof state. |Ltac| provides additional constructs that can generally be used wherever a :token:`simple_tactic` can appear, even though they don't modify the proof state and that syntactically they're at varying levels in :token:`ltac_expr`. For simplicity of presentation, the |Ltac| constructs are documented as tactics. Tactics are grouped as follows: - :production:`binder_tactic`\s are: :tacn:`fun` and :tacn:`let` - :production:`l3_tactic`\s include |Ltac| tactics: :tacn:`try`, :tacn:`do`, :tacn:`repeat`, :tacn:`timeout`, :tacn:`time`, :tacn:`progress`, :tacn:`once`, :tacn:`exactly_once`, :tacn:`only` and :tacn:`abstract` - :production:`l2_tactic`\s are: :tacn:`tryif` - :production:`l1_tactic`\s are the :token:`simple_tactic`\s, :tacn:`first`, :tacn:`solve`, :tacn:`idtac`, :tacn:`fail` and :tacn:`gfail` as well as :tacn:`match`, :tacn:`match goal` and their :n:`lazymatch` and :n:`multimatch` variants. - :production:`value_tactic`\s, which return values rather than change the proof state. They are: :tacn:`eval`, :tacn:`context`, :tacn:`numgoals`, :tacn:`fresh`, :tacn:`type of` and :tacn:`type_term`. The documentation for these |Ltac| constructs mentions which group they belong to. The difference is only relevant in some compound tactics where extra parentheses may be needed. For example, parentheses are required in :n:`idtac + (once idtac)` because :tacn:`once` is an :token:`l3_tactic`, which the production :n:`@ltac_expr2 ::= @ltac_expr1 + {| @ltac_expr2 | @binder_tactic }` doesn't accept after the `+`. .. note:: - The grammar reserves the token ``||``. .. _ltac-semantics: Semantics --------- .. todo For the compound tactics, review all the descriptions of evaluation vs application, backtracking, etc. to get the language consistent and simple (refactoring so the common elements are described in one place) Types of values ~~~~~~~~~~~~~~~ An |Ltac| value can be a tactic, integer, string, unit (written as "`()`" ) or syntactic value. Syntactic values correspond to certain nonterminal symbols in the grammar, each of which is a distinct type of value. Most commonly, the value of an |Ltac| expression is a tactic that can be executed. While there are a number of constructs that let you combine multiple tactics into compound tactics, there are no operations for combining most other types of values. For example, there's no function to add two integers. Syntactic values are entered with the :token:`syn_value` construct. Values of all types can be assigned to toplevel symbols with the :cmd:`Ltac` command or to local symbols with the :tacn:`let` tactic. |Ltac| :tacn:`functions` can return values of any type. .. todo: there are 36 subsections under "Semantics", which seems like far too many Syntactic values ~~~~~~~~~~~~~~~~ .. insertprodn syn_value syn_value .. prodn:: syn_value ::= @ident : ( @nonterminal ) Provides a way to use the syntax and semantics of a grammar nonterminal as a value in an :token:`ltac_expr`. The table below describes the most useful of these. You can see the others by running ":cmd:`Print Grammar` `tactic`" and examining the part at the end under "Entry tactic:tactic_value". :token:`ident` name of a grammar nonterminal listed in the table :production:`nonterminal` represents syntax described by :token:`nonterminal`. .. list-table:: :header-rows: 1 * - Specified :token:`ident` - Parsed as - Interpreted as - as in tactic * - ``ident`` - :token:`ident` - a user-specified name - :tacn:`intro` * - ``string`` - :token:`string` - a string - * - ``integer`` - :token:`integer` - an integer - * - ``reference`` - :token:`qualid` - a qualified identifier - * - ``uconstr`` - :token:`term` - an untyped term - :tacn:`refine` * - ``constr`` - :token:`term` - a term - :tacn:`exact` * - ``ltac`` - :token:`ltac_expr` - a tactic - :n:`ltac:(@ltac_expr)` can be used to indicate that the parenthesized item should be interpreted as a tactic and not as a term. The constructs can also be used to pass parameters to tactics written in OCaml. (While all of the :token:`syn_value`\s can appear at the beginning of an :token:`ltac_expr`, the others are not useful because they will not evaluate to tactics.) :n:`uconstr:(@term)` can be used to build untyped terms. Terms built in |Ltac| are well-typed by default. Building large terms in recursive |Ltac| functions may give very slow behavior because terms must be fully type checked at each step. In this case, using an untyped term may avoid most of the repetitive type checking for the term, improving performance. .. todo above: maybe elaborate on "well-typed by default" see https://github.com/coq/coq/pull/12103#discussion_r436317558 Untyped terms built using :n:`uconstr:(…)` can be used as arguments to the :tacn:`refine` tactic, for example. In that case the untyped term is type checked against the conclusion of the goal, and the holes which are not solved by the typing procedure are turned into new subgoals. Tactics in terms ~~~~~~~~~~~~~~~~ .. insertprodn term_ltac term_ltac .. prodn:: term_ltac ::= ltac : ( @ltac_expr ) Allows including an :token:`ltac_expr` within a term. Semantically, it's the same as the :token:`syn_value` for `ltac`, but these are distinct in the grammar. Substitution ~~~~~~~~~~~~ .. todo next paragraph: we need a better discussion of substitution. Looks like that also applies to binder_tactics in some form. See https://github.com/coq/coq/pull/12103#discussion_r422105218 :token:`name`\s within |Ltac| expressions are used to represent both terms and |Ltac| variables. If the :token:`name` corresponds to an |Ltac| variable or tactic name, |Ltac| substitutes the value before applying the expression. Generally it's best to choose distinctive names for |Ltac| variables that won't clash with term names. You can use :n:`ltac:(@name)` or :n:`(@name)` to control whether a :token:`name` is interpreted as, respectively, an |Ltac| variable or a term. Note that values from toplevel symbols, unlike locally-defined symbols, are substituted only when they appear at the beginning of an :token:`ltac_expr` or as a :token:`tactic_arg`. Local symbols are also substituted into tactics: .. example:: Substitution of global and local symbols .. coqtop:: reset none Goal True. .. coqtop:: all Ltac n := 1. let n2 := n in idtac n2. Fail idtac n. Sequence: ; ~~~~~~~~~~~ A sequence is an expression of the following form: .. tacn:: @ltac_expr3__1 ; {| @ltac_expr3__2 | @binder_tactic } :name: ltac-seq .. todo: can't use "… ; …" as the name because of the semicolon The expression :n:`@ltac_expr3__1` is evaluated to :n:`v__1`, which must be a tactic value. The tactic :n:`v__1` is applied to the current goals, possibly producing more goals. Then the right-hand side is evaluated to produce :n:`v__2`, which must be a tactic value. The tactic :n:`v__2` is applied to all the goals produced by the prior application. Sequence is associative. This construct uses backtracking: if :n:`@ltac_expr3__2` fails, Coq will try each alternative success (if any) for :n:`@ltac_expr3__1`, retrying :n:`@ltac_expr3__2` for each until both tactics succeed or all alternatives have failed. See :ref:`branching_and_backtracking`. .. todo I don't see the distinction between evaluating an ltac expression and applying it--how are they not the same thing? If different, the "Semantics" section above should explain it. See https://github.com/coq/coq/pull/12103#discussion_r422210482 .. note:: - If you want :n:`@tactic__2; @tactic__3` to be fully applied to the first subgoal generated by :n:`@tactic__1` before applying it to the other subgoals, then you should write: - :n:`@tactic__1; [> @tactic__2; @tactic__3 .. ]` rather than - :n:`@tactic__1; (@tactic__2; @tactic__3)`. Local application of tactics: [> ... ] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: [> @for_each_goal ] :name: [> … | … | … ] (dispatch) .. insertprodn for_each_goal goal_tactics .. prodn:: for_each_goal ::= @goal_tactics | {? @goal_tactics %| } {? @ltac_expr } .. {? %| @goal_tactics } goal_tactics ::= {*| {? @ltac_expr } } Applies a different :n:`{? @ltac_expr }` to each of the focused goals. In the first form of :token:`for_each_goal` (without `..`), the construct fails if the number of specified :n:`{? @ltac_expr }` is not the same as the number of focused goals. Omitting an :n:`@ltac_expr` leaves the corresponding goal unchanged. In the second form (with :n:`{? @ltac_expr } ..`), the left and right :token:`goal_tactics` are applied respectively to a prefix or suffix of the list of focused goals. The :n:`{? @ltac_expr }` before the `..` is applied to any focused goals in the middle (possibly none) that are not covered by the :token:`goal_tactics`. The number of :n:`{? @ltac_expr }` in the :token:`goal_tactics` must be no more than the number of focused goals. In particular: :n:`@goal_tactics | .. | @goal_tactics` The goals not covered by the two :token:`goal_tactics` are left unchanged. :n:`[> @ltac_expr .. ]` :n:`@ltac_expr` is applied independently to each of the goals, rather than globally. In particular, if there are no goals, the tactic is not run at all. A tactic which expects multiple goals, such as :tacn:`swap`, would act as if a single goal is focused. Note that :n:`@ltac_expr3 ; [ {*| @ltac_expr} ]` is a convenient idiom to process the goals generated by applying :n:`@ltac_expr3`. .. tacn:: @ltac_expr3 ; [ @for_each_goal ] :name: [ … | … | … ] (dispatch) :n:`@ltac_expr3 ; [ ... ]` is equivalent to :n:`[> @ltac_expr3 ; [> ... ] .. ]`. .. todo see discussion of [ ... ] in https://github.com/coq/coq/issues/12283 .. _goal-selectors: Goal selectors ~~~~~~~~~~~~~~ .. todo: mention this applies to Print commands and the Info command By default, tactic expressions are applied only to the first goal. Goal selectors provide a way to apply a tactic expression to another goal or multiple goals. (The :opt:`Default Goal Selector` option can be used to change the default behavior.) .. tacn:: @toplevel_selector : @ltac_expr :name: … : … (goal selector) .. insertprodn toplevel_selector toplevel_selector .. prodn:: toplevel_selector ::= @selector | all | ! | par Reorders the goals and applies :token:`ltac_expr` to the selected goals. It can only be used at the top level of a tactic expression; it cannot be used within a tactic expression. The selected goals are reordered so they appear after the lowest-numbered selected goal, ordered by goal number. :ref:`Example `. If the selector applies to a single goal or to all goals, the reordering will not be apparent. The order of the goals in the :token:`selector` is irrelevant. (This may not be what you expect; see `#8481 `_.) .. todo why shouldn't "all" and "!" be accepted anywhere a @selector is accepted? It would be simpler to explain. `all` Selects all focused goals. `!` If exactly one goal is in focus, apply :token:`ltac_expr` to it. Otherwise the tactic fails. `par` Applies :n:`@ltac_expr` to all focused goals in parallel. The number of workers can be controlled via the command line option :n:`-async-proofs-tac-j @natural` to specify the desired number of workers. Limitations: ``par:`` only works on goals that don't contain existential variables. :n:`@ltac_expr` must either solve the goal completely or do nothing (i.e. it cannot make some progress). Selectors can also be used nested within a tactic expression with the :tacn:`only` tactic: .. tacn:: only @selector : @ltac_expr3 .. insertprodn selector range_selector .. prodn:: selector ::= {+, @range_selector } | [ @ident ] range_selector ::= @natural - @natural | @natural Applies :token:`ltac_expr3` to the selected goals. :tacn:`only` is an :token:`l3_tactic`. :n:`{+, @range_selector }` The selected goals are the union of the specified :token:`range_selector`\s. :n:`[ @ident ]` Limits the application of :token:`ltac_expr3` to the goal previously named :token:`ident` by the user (see :ref:`existential-variables`). :n:`@natural__1 - @natural__2` Selects the goals :n:`@natural__1` through :n:`@natural__2`, inclusive. :n:`@natural` Selects a single goal. .. exn:: No such goal. :name: No such goal. (Goal selector) :undocumented: .. _reordering_goals_ex: .. example:: Selector reordering goals .. coqtop:: reset in Goal 1=0 /\ 2=0 /\ 3=0. .. coqtop:: all repeat split. 1,3: idtac. .. TODO change error message index entry Processing multiple goals ~~~~~~~~~~~~~~~~~~~~~~~~~ When presented with multiple focused goals, most |Ltac| constructs process each goal separately. They succeed only if there is a success for each goal. For example: .. example:: Multiple focused goals This tactic fails because there no match for the second goal (`False`). .. coqtop:: reset none fail Goal True /\ False. .. coqtop:: out split. .. coqtop:: all Fail all: let n := numgoals in idtac "numgoals =" n; match goal with | |- True => idtac end. Do loop ~~~~~~~ .. tacn:: do @nat_or_var @ltac_expr3 The do loop repeats a tactic :token:`nat_or_var` times: :n:`@ltac_expr` is evaluated to ``v``, which must be a tactic value. This tactic value ``v`` is applied :token:`nat_or_var` times. If :token:`nat_or_var` > 1, after the first application of ``v``, ``v`` is applied, at least once, to the generated subgoals and so on. It fails if the application of ``v`` fails before :token:`nat_or_var` applications have been completed. :tacn:`do` is an :token:`l3_tactic`. Repeat loop ~~~~~~~~~~~ .. tacn:: repeat @ltac_expr3 The repeat loop repeats a tactic until it fails or doesn't change the proof context. :n:`@ltac_expr` is evaluated to ``v``. If ``v`` denotes a tactic, this tactic is applied to each focused goal independently. If the application succeeds, the tactic is applied recursively to all the generated subgoals until it eventually fails. The recursion stops in a subgoal when the tactic has failed *to make progress*. The tactic :tacn:`repeat` :n:`@ltac_expr` itself never fails. :tacn:`repeat` is an :token:`l3_tactic`. Catching errors: try ~~~~~~~~~~~~~~~~~~~~ We can catch the tactic errors with: .. tacn:: try @ltac_expr3 :n:`@ltac_expr` is evaluated to ``v`` which must be a tactic value. The tactic value ``v`` is applied to each focused goal independently. If the application of ``v`` fails in a goal, it catches the error and leaves the goal unchanged. If the level of the exception is positive, then the exception is re-raised with its level decremented. :tacn:`try` is an :token:`l3_tactic`. Detecting progress ~~~~~~~~~~~~~~~~~~ We can check if a tactic made progress with: .. tacn:: progress @ltac_expr3 :n:`@ltac_expr` is evaluated to ``v`` which must be a tactic value. The tactic value ``v`` is applied to each focused subgoal independently. If the application of ``v`` to one of the focused subgoal produced subgoals equal to the initial goals (up to syntactical equality), then an error of level 0 is raised. :tacn:`progress` is an :token:`l3_tactic`. .. exn:: Failed to progress. :undocumented: .. _branching_and_backtracking: Branching and backtracking ~~~~~~~~~~~~~~~~~~~~~~~~~~ |Ltac| provides several :gdef:`branching` tactics that permit trying multiple alternative tactics for a proof step. For example, :tacn:`first`, which tries several alternatives and selects the first that succeeds, or :tacn:`tryif`, which tests whether a given tactic would succeed or fail if it was applied and then, depending on the result, applies one of two alternative tactics. There are also looping constructs :tacn:`do` and :tacn:`repeat`. The order in which the subparts of these tactics are evaluated is generally similar to structured programming constructs in many languages. The :tacn:`+<+ (backtracking branching)>`, :tacn:`multimatch` and :tacn:`multimatch goal` tactics provide more complex capability. Rather than applying a single successful tactic, these tactics generate a series of successful tactic alternatives that are tried sequentially when subsequent tactics outside these constructs fail. For example: .. example:: Backtracking .. coqtop:: all Fail multimatch True with | True => idtac "branch 1" | _ => idtac "branch 2" end ; idtac "branch A"; fail. These constructs are evaluated using :gdef:`backtracking`. Each creates a :gdef:`backtracking point`. When a subsequent tactic fails, evaluation continues from the nearest prior backtracking point with the next successful alternative and repeats the tactics after the backtracking point. When a backtracking point has no more successful alternatives, evaluation continues from the next prior backtracking point. If there are no more prior backtracking points, the overall tactic fails. Thus, backtracking tactics can have multiple successes. Non-backtracking constructs that appear after a backtracking point are reprocessed after backtracking, as in the example above, in which the :tacn:`;` construct is reprocessed after backtracking. When a backtracking construct is within a non-backtracking construct, the latter uses the :gdef:`first success`. Backtracking to a point within a non-backtracking construct won't change the branch that was selected by the non-backtracking construct. The :tacn:`once` tactic stops further backtracking to backtracking points within that tactic. Branching with backtracking: + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We can branch with backtracking with the following structure: .. tacn:: @ltac_expr1 + {| @ltac_expr2 | @binder_tactic } :name: + (backtracking branching) Evaluates and applies :n:`@ltac_expr1` to each focused goal independently. If it fails (i.e. there is no initial success), then evaluates and applies the right-hand side. If the right-hand side fails, the construct fails. If :n:`ltac_expr1` has an initial success and a subsequent tactic (outside the `+` construct) fails, |Ltac| backtracks and selects the next success for :n:`ltac_expr1`. If there are no more successes, then `+` similarly evaluates and applies (and backtracks in) the right-hand side. To prevent evaluation of further alternatives after an initial success for a tactic, use :tacn:`first` instead. `+` is left-associative. In all cases, :n:`(@ltac_expr__1 + @ltac_expr__2); @ltac_expr__3` is equivalent to :n:`(@ltac_expr__1; @ltac_expr__3) + (@ltac_expr__2; @ltac_expr__3)`. Additionally, in most cases, :n:`(@ltac_expr__1 + @ltac_expr__2) + @ltac_expr__3` is equivalent to :n:`@ltac_expr__1 + (@ltac_expr__2 + @ltac_expr__3)`. Here's an example where the behavior differs slightly: .. coqtop:: reset none Goal True. .. coqtop:: all Fail (fail 2 + idtac) + idtac. Fail fail 2 + (idtac + idtac). .. example:: Backtracking branching with + In the first tactic, `idtac "2"` is not executed. In the second, the subsequent `fail` causes backtracking and the execution of `idtac "B"`. .. coqtop:: reset none Goal True. .. coqtop:: all idtac "1" + idtac "2". assert_fails ((idtac "A" + idtac "B"); fail). First tactic to succeed ~~~~~~~~~~~~~~~~~~~~~~~ In some cases backtracking may be too expensive. .. tacn:: first [ {*| @ltac_expr } ] For each focused goal, independently apply the first :token:`ltac_expr` that succeeds. The :n:`@ltac_expr`\s must evaluate to tactic values. Failures in tactics after the :tacn:`first` won't cause backtracking. (To allow backtracking, use the :tacn:`+<+ (backtracking branching)>` construct above instead.) If the :tacn:`first` contains a tactic that can backtrack, "success" means the first success of that tactic. Consider the following: .. example:: Backtracking inside a non-backtracking construct .. coqtop:: reset none Goal True. The :tacn:`fail` doesn't trigger the second :tacn:`idtac`: .. coqtop:: all assert_fails (first [ idtac "1" | idtac "2" ]; fail). This backtracks within `(idtac "1A" + idtac "1B" + fail)` but :tacn:`first` won't consider the `idtac "2"` alternative: .. coqtop:: all assert_fails (first [ (idtac "1A" + idtac "1B" + fail) | idtac "2" ]; fail). :tacn:`first` is an :token:`l1_tactic`. .. exn:: No applicable tactic. :undocumented: .. todo the following is not accepted as a regular tactic but it does seem to do something see https://github.com/coq/coq/pull/12103#discussion_r422249862. Probably the same thing as for the :tacv:`solve` below. The code is in Coretactics.initial_tacticals .. tacv:: first @ltac_expr This is an |Ltac| alias that gives a primitive access to the first tactical as an |Ltac| definition without going through a parsing rule. It expects to be given a list of tactics through a :cmd:`Tactic Notation` command, permitting notations with the following form to be written: .. example:: .. coqtop:: in Tactic Notation "foo" tactic_list(tacs) := first tacs. Solving ~~~~~~~ Selects and applies the first tactic that solves each goal (i.e. leaves no subgoal) in a series of alternative tactics: .. tacn:: solve [ {*| @ltac_expr__i } ] For each current subgoal: evaluates and applies each :n:`@ltac_expr` in order until one is found that solves the subgoal. If any of the subgoals are not solved, then the overall :tacn:`solve` fails. .. note:: In :tacn:`solve` and :tacn:`first`, :n:`@ltac_expr`\s that don't evaluate to tactic values are ignored. So :tacn:`solve` `[ () | 1 |` :tacn:`constructor` `]` is equivalent to :tacn:`solve` `[` :tacn:`constructor` `]`. This may make it harder to debug scripts that inadvertently include non-tactic values. .. todo check the behavior of other constructs see https://github.com/coq/coq/pull/12103#discussion_r436320430 :tacn:`solve` is an :token:`l1_tactic`. .. tacv:: solve @ltac_expr This is an |Ltac| alias that gives a primitive access to the :tacn:`solve` tactic. See the :tacn:`first` tactic for more information. First tactic to make progress: || ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Yet another way of branching without backtracking is the following structure: .. tacn:: @ltac_expr1 %|| {| @ltac_expr2 | @binder_tactic } :name: || (first tactic making progress) :n:`@ltac_expr1 || @ltac_expr2` is equivalent to :n:`first [ progress @ltac_expr1 | @ltac_expr2 ]`, except that if it fails, it fails like :n:`@ltac_expr2. `||` is left-associative. :n:`@ltac_expr`\s that don't evaluate to tactic values are ignored. See the note at :tacn:`solve`. Conditional branching: tryif ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: tryif @ltac_expr__test then @ltac_expr__then else @ltac_expr2__else For each focused goal, independently: Evaluate and apply :n:`@ltac_expr__test`. If :n:`@ltac_expr__test` succeeds at least once, evaluate and apply :n:`@ltac_expr__then` to all the subgoals generated by :n:`@ltac_expr__test`. Otherwise, evaluate and apply :n:`@ltac_expr2__else` to all the subgoals generated by :n:`@ltac_expr__test`. :tacn:`tryif` is an :token:`l2_tactic`. .. multigoal example - not sure it adds much Goal True /\ False. split; tryif match goal with | |- True => idtac "True" | |- False => idtac "False" end then idtac "then" else idtac "else". Soft cut: once ~~~~~~~~~~~~~~ .. todo Would like a different subsection title above. I have trouble distinguishing once and exactly_once. We need to explain backtracking somewhere. See https://github.com/coq/coq/pull/12103#discussion_r422360181 Another way of restricting backtracking is to restrict a tactic to a single success: .. tacn:: once @ltac_expr3 :n:`@ltac_expr3` is evaluated to ``v`` which must be a tactic value. The tactic value ``v`` is applied but only its first success is used. If ``v`` fails, :tacn:`once` :n:`@ltac_expr3` fails like ``v``. If ``v`` has at least one success, :tacn:`once` :n:`@ltac_expr3` succeeds once, but cannot produce more successes. :tacn:`once` is an :token:`l3_tactic`. Checking for a single success: exactly_once ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Coq provides an experimental way to check that a tactic has *exactly one* success: .. tacn:: exactly_once @ltac_expr3 :n:`@ltac_expr3` is evaluated to ``v`` which must be a tactic value. The tactic value ``v`` is applied if it has at most one success. If ``v`` fails, :tacn:`exactly_once` :n:`@ltac_expr3` fails like ``v``. If ``v`` has a exactly one success, :tacn:`exactly_once` :n:`@ltac_expr3` succeeds like ``v``. If ``v`` has two or more successes, :tacn:`exactly_once` :n:`@ltac_expr3` fails. :tacn:`exactly_once` is an :token:`l3_tactic`. .. warning:: The experimental status of this tactic pertains to the fact if ``v`` has side effects, they may occur in an unpredictable way. Indeed, normally ``v`` would only be executed up to the first success until backtracking is needed, however :tacn:`exactly_once` needs to look ahead to see whether a second success exists, and may run further effects immediately. .. exn:: This tactic has more than one success. :undocumented: Checking for failure: assert_fails ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Coq defines an |Ltac| tactic in `Init.Tactics` to check that a tactic *fails*: .. tacn:: assert_fails @ltac_expr3 If :n:`@ltac_expr3` fails, the proof state is unchanged and no message is printed. If :n:`@ltac_expr3` unexpectedly has at least one success, the tactic performs a :tacn:`gfail` :n:`0`, printing the following message: .. exn:: Tactic failure: succeeds. :undocumented: .. note:: :tacn:`assert_fails` and :tacn:`assert_succeeds` work as described when :token:`ltac_expr3` is a :token:`simple_tactic`. In some more complex expressions, they may report an error from within :token:`ltac_expr3` when they shouldn't. This is due to the order in which parts of the :token:`ltac_expr3` are evaluated and executed. For example: .. coqtop:: reset none Goal True. .. coqtop:: all fail assert_fails match True with _ => fail end. should not show any message. The issue is that :tacn:`assert_fails` is an |Ltac|-defined tactic. That makes it a function that's processed in the evaluation phase, causing the :tacn:`match` to find its first success earlier. One workaround is to prefix :token:`ltac_expr3` with "`idtac;`". .. coqtop:: all assert_fails (idtac; match True with _ => fail end). Alternatively, substituting the :tacn:`match` into the definition of :tacn:`assert_fails` works as expected: .. coqtop:: all tryif (once match True with _ => fail end) then gfail 0 (* tac *) "succeeds" else idtac. Checking for success: assert_succeeds ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Coq defines an |Ltac| tactic in `Init.Tactics` to check that a tactic has *at least one* success: .. tacn:: assert_succeeds @ltac_expr3 If :n:`@ltac_expr3` has at least one success, the proof state is unchanged and no message is printed. If :n:`@ltac_expr3` fails, the tactic performs a :tacn:`gfail` :n:`0`, printing the following message: .. exn:: Tactic failure: fails. :undocumented: Print/identity tactic: idtac ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: idtac {* {| @ident | @string | @natural } } Leaves the proof unchanged and prints the given tokens. :token:`String`\s and :token:`natural`\s are printed literally. If :token:`ident` is an |Ltac| variable, its contents are printed; if not, it is an error. :tacn:`idtac` is an :token:`l1_tactic`. Failing ~~~~~~~ .. tacn:: {| fail | gfail } {? @nat_or_var } {* {| @ident | @string | @natural } } :name: fail; gfail :tacn:`fail` is the always-failing tactic: it does not solve any goal. It is useful for defining other tactics since it can be caught by :tacn:`try`, :tacn:`repeat`, :tacn:`match goal`, or the branching tacticals. :tacn:`gfail` fails even when used after :n:`;` and there are no goals left. Similarly, :tacn:`gfail` fails even when used after ``all:`` and there are no goals left. :tacn:`fail` and :tacn:`gfail` are :token:`l1_tactic`\s. See the example for a comparison of the two constructs. Note that if Coq terms have to be printed as part of the failure, term construction always forces the tactic into the goals, meaning that if there are no goals when it is evaluated, a tactic call like :tacn:`let` :n:`x := H in` :tacn:`fail` `0 x` will succeed. :n:`@nat_or_var` The failure level. If no level is specified, it defaults to 0. The level is used by :tacn:`try`, :tacn:`repeat`, :tacn:`match goal` and the branching tacticals. If 0, it makes :tacn:`match goal` consider the next clause (backtracking). If nonzero, the current :tacn:`match goal` block, :tacn:`try`, :tacn:`repeat`, or branching command is aborted and the level is decremented. In the case of :n:`+`, a nonzero level skips the first backtrack point, even if the call to :tacn:`fail` :n:`@natural` is not enclosed in a :n:`+` construct, respecting the algebraic identity. :n:`{* {| @ident | @string | @natural } }` The given tokens are used for printing the failure message. If :token:`ident` is an |Ltac| variable, its contents are printed; if not, it is an error. .. exn:: Tactic failure. :undocumented: .. exn:: Tactic failure (level @natural). :undocumented: .. exn:: No such goal. :name: No such goal. (fail) :undocumented: .. example:: .. todo the example is too long; could show the Goal True. Proof. once and hide the Aborts to shorten it. And add a line of text before each subexample. Perhaps add some very short explanations/generalizations (e.g. gfail always fails; "tac; fail" succeeds but "fail." alone fails. .. coqtop:: reset all fail Goal True. Proof. fail. Abort. Goal True. Proof. trivial; fail. Qed. Goal True. Proof. trivial. fail. Abort. Goal True. Proof. trivial. all: fail. Qed. Goal True. Proof. gfail. Abort. Goal True. Proof. trivial; gfail. Abort. Goal True. Proof. trivial. gfail. Abort. Goal True. Proof. trivial. all: gfail. Abort. Timeout ~~~~~~~ We can force a tactic to stop if it has not finished after a certain amount of time: .. tacn:: timeout @nat_or_var @ltac_expr3 :n:`@ltac_expr3` is evaluated to ``v`` which must be a tactic value. The tactic value ``v`` is applied normally, except that it is interrupted after :n:`@nat_or_var` seconds if it is still running. In this case the outcome is a failure. :tacn:`timeout` is an :token:`l3_tactic`. .. warning:: For the moment, timeout is based on elapsed time in seconds, which is very machine-dependent: a script that works on a quick machine may fail on a slow one. The converse is even possible if you combine a timeout with some other tacticals. This tactical is hence proposed only for convenience during debugging or other development phases, we strongly advise you to not leave any timeout in final scripts. Note also that this tactical isn’t available on the native Windows port of Coq. Timing a tactic ~~~~~~~~~~~~~~~ A tactic execution can be timed: .. tacn:: time {? @string } @ltac_expr3 evaluates :n:`@ltac_expr3` and displays the running time of the tactic expression, whether it fails or succeeds. In case of several successes, the time for each successive run is displayed. Time is in seconds and is machine-dependent. The :n:`@string` argument is optional. When provided, it is used to identify this particular occurrence of :tacn:`time`. :tacn:`time` is an :token:`l3_tactic`. Timing a tactic that evaluates to a term: time_constr ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tactic expressions that produce terms can be timed with the experimental tactic .. tacn:: time_constr @ltac_expr which evaluates :n:`@ltac_expr ()` and displays the time the tactic expression evaluated, assuming successful evaluation. Time is in seconds and is machine-dependent. This tactic currently does not support nesting, and will report times based on the innermost execution. This is due to the fact that it is implemented using the following internal tactics: .. tacn:: restart_timer {? @string } Reset a timer .. tacn:: finish_timing {? ( @string ) } {? @string } Display an optionally named timer. The parenthesized string argument is also optional, and determines the label associated with the timer for printing. By copying the definition of :tacn:`time_constr` from the standard library, users can achieve support for a fixed pattern of nesting by passing different :token:`string` parameters to :tacn:`restart_timer` and :tacn:`finish_timing` at each level of nesting. .. example:: .. coqtop:: all abort Ltac time_constr1 tac := let eval_early := match goal with _ => restart_timer "(depth 1)" end in let ret := tac () in let eval_early := match goal with _ => finish_timing ( "Tactic evaluation" ) "(depth 1)" end in ret. Goal True. let v := time_constr ltac:(fun _ => let x := time_constr1 ltac:(fun _ => constr:(10 * 10)) in let y := time_constr1 ltac:(fun _ => eval compute in x) in y) in pose v. Local definitions: let ~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: let {? rec } @let_clause {* with @let_clause } in @ltac_expr .. insertprodn let_clause let_clause .. prodn:: let_clause ::= @name := @ltac_expr | @ident {+ @name } := @ltac_expr Binds symbols within :token:`ltac_expr`. :tacn:`let` evaluates each :n:`@let_clause`, substitutes the bound variables into :n:`@ltac_expr` and then evaluates :n:`@ltac_expr`. There are no dependencies between the :n:`@let_clause`\s. Use :tacn:`let` `rec` to create recursive or mutually recursive bindings, which causes the definitions to be evaluated lazily. :tacn:`let` is a :token:`binder_tactic`. Function construction and application ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A parameterized tactic can be built anonymously (without resorting to local definitions) with: .. tacn:: fun {+ @name } => @ltac_expr Indeed, local definitions of functions are syntactic sugar for binding a :n:`fun` tactic to an identifier. :tacn:`fun` is a :token:`binder_tactic`. Functions can return values of any type. A function application is an expression of the form: .. tacn:: @qualid {+ @tactic_arg } :n:`@qualid` must be bound to a |Ltac| function with at least as many arguments as the provided :n:`@tactic_arg`\s. The :n:`@tactic_arg`\s are evaluated before the function is applied or partially applied. Functions may be defined with the :tacn:`fun` and :tacn:`let` tactics and with the :cmd:`Ltac` command. .. todo above: note "gobble" corner case https://github.com/coq/coq/pull/12103#discussion_r436414417 Pattern matching on terms: match ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: @match_key @ltac_expr__term with {? %| } {+| @match_pattern => @ltac_expr } end :name: lazymatch; match; multimatch .. insertprodn match_key cpattern .. prodn:: match_key ::= lazymatch | match | multimatch match_pattern ::= @cpattern | context {? @ident } [ @cpattern ] cpattern ::= @term :tacn:`lazymatch`, :tacn:`match` and :tacn:`multimatch` are :token:`ltac_expr1`\s. Evaluates :n:`@ltac_expr__term`, which must yield a term, and matches it sequentially with the :token:`match_pattern`\s, which may have metavariables. When a match is found, metavariable values are substituted into :n:`@ltac_expr`, which is then applied. Matching may continue depending on whether `lazymatch`, `match` or `multimatch` is specified. In the :token:`match_pattern`\s, metavariables have the form :n:`?@ident`, whereas in the :n:`@ltac_expr`\s, the question mark is omitted. Choose your metavariable names with care to avoid name conflicts. For example, if you use the metavariable `S`, then the :token:`ltac_expr` can't use `S` to refer to the constructor of `nat` without qualifying the constructor as `Datatypes.S`. .. todo how does this differ from the 1-2 other unification routines elsewhere in Coq? Does it use constr_eq or eq_constr_nounivs? Matching is non-linear: if a metavariable occurs more than once, each occurrence must match the same expression. Expressions match if they are syntactically equal or are :term:`α-convertible `. Matching is first-order except on variables of the form :n:`@?@ident` that occur in the head position of an application. For these variables, matching is second-order and returns a functional term. .. todo 30 May 20: the `@?ident` form is in dangling_pattern_extension_rule, not included in the doc yet maybe belongs with "Applications" `lazymatch` Causes the match to commit to the first matching branch rather than trying a new match if :n:`@ltac_expr` fails. :ref:`Example`. `match` If :n:`@ltac_expr` fails, continue matching with the next branch. Failures in subsequent tactics (after the `match`) will not cause selection of a new branch. Examples :ref:`here` and :ref:`here`. `multimatch` If :n:`@ltac_expr` fails, continue matching with the next branch. When an :n:`@ltac_expr` succeeds for a branch, subsequent failures (after the `multimatch`) causing consumption of all the successes of :n:`@ltac_expr` trigger selection of a new matching branch. :ref:`Example`. :tacn:`match` :n:`…` is, in fact, shorthand for :tacn:`once` :tacn:`multimatch` `…`. :n:`@cpattern` The syntax of :token:`cpattern` is the same as that of :token:`term`\s, but it can contain pattern matching metavariables in the form :n:`?@ident`. :g:`_` can be used to match irrelevant terms. :ref:`Example`. .. todo Didn't understand the following 2 paragraphs well enough to revise see https://github.com/coq/coq/pull/12103#discussion_r436297754 for a possible example When a metavariable in the form :n:`?id` occurs under binders, say :n:`x__1, …, x__n` and the expression matches, the metavariable is instantiated by a term which can then be used in any context which also binds the variables :n:`x__1, …, x__n` with same types. This provides with a primitive form of matching under context which does not require manipulating a functional term. There is also a special notation for second-order pattern matching: in an applicative pattern of the form :n:`@?@ident @ident__1 … @ident__n`, the variable :token:`ident` matches any complex expression with (possible) dependencies in the variables :n:`@ident__i` and returns a functional term of the form :n:`fun @ident__1 … @ident__n => @term`. .. _match_term_context: :n:`context {? @ident } [ @cpattern ]` Matches any term with a subterm matching :token:`cpattern`. If there is a match and :n:`@ident` is present, it is assigned the "matched context", i.e. the initial term where the matched subterm is replaced by a hole. Note that `context` (with very similar syntax) appearing after the `=>` is the :tacn:`context` tactic. For :tacn:`match` and :tacn:`multimatch`, if the evaluation of the :token:`ltac_expr` fails, the next matching subterm is tried. If no further subterm matches, the next branch is tried. Matching subterms are considered from top to bottom and from left to right (with respect to the raw printing obtained by setting the :flag:`Printing All` flag). :ref:`Example`. .. todo There's a more realistic example from @JasonGross here: https://github.com/coq/coq/pull/12103#discussion_r432996954 :n:`@ltac_expr` The tactic to apply if the construct matches. Metavariable values from the pattern match are substituted into :n:`@ltac_expr` before it's applied. Note that metavariables are not prefixed with the question mark as they are in :token:`cpattern`. If :token:`ltac_expr` evaluates to a tactic, then it is applied. If the tactic succeeds, the result of the match expression is :tacn:`idtac`. If :token:`ltac_expr` does not evaluate to a tactic, that value is the result of the match expression. If :n:`@ltac_expr` is a tactic with backtracking points, then subsequent failures after a :tacn:`lazymatch` or :tacn:`multimatch` (but not :tacn:`match`) can cause backtracking into :n:`@ltac_expr` to select its next success. (:tacn:`match` :n:`…` is equivalent to :tacn:`once` :tacn:`multimatch` `…`. The :tacn:`once` prevents backtracking into the :tacn:`match` after it has succeeded.) .. note:: Each |Ltac| construct is processed in two phases: an evaluation phase and an execution phase. In most cases, tactics that may change the proof state are applied in the second phase. (Tactics that generate integer, string or syntactic values, such as :tacn:`fresh`, are processed during the evaluation phase.) Unlike other tactics, `*match*` tactics get their first success (applying tactics to do so) as part of the evaluation phase. Among other things, this can affect how early failures are processed in :tacn:`assert_fails`. Please see the note in :tacn:`assert_fails`. .. exn:: Expression does not evaluate to a tactic. :n:`@ltac_expr` must evaluate to a tactic. .. exn:: No matching clauses for match. For at least one of the focused goals, there is no branch that matches its pattern *and* gets at least one success for :n:`@ltac_expr`. .. exn:: Argument of match does not evaluate to a term. This happens when :n:`@ltac_expr__term` does not denote a term. .. _match_vs_lazymatch_ex: .. example:: Comparison of lazymatch and match In :tacn:`lazymatch`, if :token:`ltac_expr` fails, the :tacn:`lazymatch` fails; it doesn't look for further matches. In :tacn:`match`, if :token:`ltac_expr` fails in a matching branch, it will try to match on subsequent branches. .. coqtop:: reset none Goal True. .. coqtop:: all Fail lazymatch True with | True => idtac "branch 1"; fail | _ => idtac "branch 2" end. .. coqtop:: all match True with | True => idtac "branch 1"; fail | _ => idtac "branch 2" end. .. _match_vs_multimatch_ex: .. example:: Comparison of match and multimatch :tacn:`match` tactics are only evaluated once, whereas :tacn:`multimatch` tactics may be evaluated more than once if the following constructs trigger backtracking: .. coqtop:: all Fail match True with | True => idtac "branch 1" | _ => idtac "branch 2" end ; idtac "branch A"; fail. .. coqtop:: all Fail multimatch True with | True => idtac "branch 1" | _ => idtac "branch 2" end ; idtac "branch A"; fail. .. _match_with_holes_ex: .. example:: Matching a pattern with holes Notice the :tacn:`idtac` prints ``(z + 1)`` while the :tacn:`pose` substitutes ``(x + 1)``. .. coqtop:: in reset Goal True. .. coqtop:: all match constr:(fun x => (x + 1) * 3) with | fun z => ?y * 3 => idtac "y =" y; pose (fun z: nat => y * 5) end. .. _match_term_context_ex: .. example:: Multiple matches for a "context" pattern. Internally "x <> y" is represented as "(~ (x = y))", which produces the first match. .. coqtop:: in reset Ltac f t := match t with | context [ (~ ?t) ] => idtac "?t = " t; fail | _ => idtac end. Goal True. .. coqtop:: all f ((~ True) <> (~ False)). .. _ltac-match-goal: Pattern matching on goals and hypotheses: match goal ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: @match_key {? reverse } goal with {? %| } {+| @goal_pattern => @ltac_expr } end :name: lazymatch goal; match goal; multimatch goal .. insertprodn goal_pattern match_hyp .. prodn:: goal_pattern ::= {*, @match_hyp } %|- @match_pattern | [ {*, @match_hyp } %|- @match_pattern ] | _ match_hyp ::= @name : @match_pattern | @name := @match_pattern | @name := [ @match_pattern ] : @match_pattern :tacn:`lazymatch goal`, :tacn:`match goal` and :tacn:`multimatch goal` are :token:`l1_tactic`\s. Use this form to match hypotheses and/or goals in the local context. These patterns have zero or more subpatterns to match hypotheses followed by a subpattern to match the conclusion. Except for the differences noted below, this works the same as the corresponding :n:`@match_key @ltac_expr` construct (see :tacn:`match`). Each current goal is processed independently. Matching is non-linear: if a metavariable occurs more than once, each occurrence must match the same expression. Within a single term, expressions match if they are syntactically equal or :term:`α-convertible `. When a metavariable is used across multiple hypotheses or across a hypothesis and the current goal, the expressions match if they are :term:`convertible`. :n:`{*, @match_hyp }` Patterns to match with hypotheses. Each pattern must match a distinct hypothesis in order for the branch to match. Hypotheses have the form :n:`@name {? := @term__binder } : @type`. Patterns bind each of these nonterminals separately: .. list-table:: :widths: 2 1 :header-rows: 1 * - Pattern syntax - Example pattern * - :n:`@name : @match_pattern__type` - `n : ?t` * - :n:`@name := @match_pattern__binder` - `n := ?b` * - :n:`@name := @term__binder : @type` - `n := ?b : ?t` * - :n:`@name := [ @match_pattern__binder ] : @match_pattern__type` - `n := [ ?b ] : ?t` .. :token:`name` can't have a `?`. Note that the last two forms are equivalent except that: - if the `:` in the third form has been bound to something else in a notation, you must use the fourth form. Note that cmd:`Require Import` `ssreflect` loads a notation that does this. - a :n:`@term__binder` such as `[ ?l ]` (e.g., denoting a singleton list after :cmd:`Import` `ListNotations`) must be parenthesized or, for the fourth form, use double brackets: `[ [ ?l ] ]`. :n:`@term__binder`\s in the form `[?x ; ?y]` for a list are not parsed correctly. The workaround is to add parentheses or to use the underlying term instead of the notation, i.e. `(cons ?x ?y)`. If there are multiple :token:`match_hyp`\s in a branch, there may be multiple ways to match them to hypotheses. For :tacn:`match goal` and :tacn:`multimatch goal`, if the evaluation of the :token:`ltac_expr` fails, matching will continue with the next hypothesis combination. When those are exhausted, the next alternative from any `context` constructs in the :token:`match_pattern`\s is tried and then, when the context alternatives are exhausted, the next branch is tried. :ref:`Example`. `reverse` Hypothesis matching for :token:`match_hyp`\s normally begins by matching them from left to right, to hypotheses, last to first. Specifying `reverse` begins matching in the reverse order, from first to last. :ref:`Normal` and :ref:`reverse` examples. :n:`|- @match_pattern` A pattern to match with the current goal :n:`@goal_pattern with [ ... ]` The square brackets don't affect the semantics. They are permitted for aesthetics. .. exn:: No matching clauses for match goal. No clause succeeds, i.e. all matching patterns, if any, fail at the application of the :token:`ltac_expr`. Examples: .. _match_goal_hyps_ex: .. example:: Matching hypotheses Hypotheses are matched from the last hypothesis (which is by default the newest hypothesis) to the first until the :tacn:`apply` succeeds. .. coqtop:: reset all Goal forall A B : Prop, A -> B -> (A->B). intros. match goal with | H : _ |- _ => idtac "apply " H; apply H end. .. _match_goal_hyps_rev_ex: .. example:: Matching hypotheses with reverse Hypotheses are matched from the first hypothesis to the last until the :tacn:`apply` succeeds. .. coqtop:: reset all Goal forall A B : Prop, A -> B -> (A->B). intros. match reverse goal with | H : _ |- _ => idtac "apply " H; apply H end. .. _match_goal_multiple_hyps_ex: .. example:: Multiple ways to match hypotheses Every possible match for the hypotheses is evaluated until the right-hand side succeeds. Note that `H1` and `H2` are never matched to the same hypothesis. Observe that the number of permutations can grow as the factorial of the number of hypotheses and hypothesis patterns. .. coqtop:: reset all Goal forall A B : Prop, A -> B -> (A->B). intros A B H. match goal with | H1 : _, H2 : _ |- _ => idtac "match " H1 H2; fail | _ => idtac end. .. todo need examples for: match_context_rule ::= [ {*, @match_hyp } |- @match_pattern ] => @ltac_expr match_hyp ::= | @name := {? [ @match_pattern ] : } @match_pattern .. todo The following items (up to numgoals) are part of "value_tactic". I'd like to make this a subsection and explain that they all return values. How do I get a 5th-level section title? Filling a term context ~~~~~~~~~~~~~~~~~~~~~~ The following expression is not a tactic in the sense that it does not produce subgoals but generates a term to be used in tactic expressions: .. tacn:: context @ident [ @term ] Returns the term matched with the `context` pattern (described :ref:`here`) substituting :token:`term` for the hole created by the pattern. :tacn:`context` is a :token:`value_tactic`. .. exn:: Not a context variable. :undocumented: .. exn:: Unbound context identifier @ident. :undocumented: .. example:: Substituting a matched context .. coqtop:: reset all Goal True /\ True. match goal with | |- context G [True] => let x := context G [False] in idtac x end. Generating fresh hypothesis names ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tactics sometimes need to generate new names for hypothesis. Letting Coq choose a name with the intro tactic is not so good since it is very awkward to retrieve that name. The following expression returns an identifier: .. tacn:: fresh {* {| @string | @qualid } } .. todo you can't have a :tacn: with the same name as a :gdef: for now, eg `fresh` can't be both Returns a fresh identifier name (i.e. one that is not already used in the local context and not previously returned by :tacn:`fresh` in the current :token:`ltac_expr`). The fresh identifier is formed by concatenating the final :token:`ident` of each :token:`qualid` (dropping any qualified components) and each specified :token:`string`. If the resulting name is already used, a number is appended to make it fresh. If no arguments are given, the name is a fresh derivative of the name ``H``. .. note:: We recommend generating the fresh identifier immediately before adding it to the local context. Using :tacn:`fresh` in a local function may not work as you expect: Successive calls to :tacn:`fresh` give distinct names even if the names haven't yet been added to the local context: .. coqtop:: reset none Goal True -> True. .. coqtop:: out intro x. .. coqtop:: all let a := fresh "x" in let b := fresh "x" in idtac a b. When applying :tacn:`fresh` in a function, the name is chosen based on the tactic context at the point where the function was defined: .. coqtop:: all let a := fresh "x" in let f := fun _ => fresh "x" in let c := f () in let d := f () in idtac a c d. :tacn:`fresh` is a :token:`value_tactic`. Computing in a term: eval ~~~~~~~~~~~~~~~~~~~~~~~~~ Evaluation of a term can be performed with: :n:`eval @red_expr in @term` See :tacn:`eval`. :tacn:`eval` is a :token:`value_tactic`. Getting the type of a term ~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: type of @term This tactic returns the type of :token:`term`. :tacn:`type of` is a :token:`value_tactic`. Manipulating untyped terms: type_term ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The :n:`uconstr : ( @term )` construct can be used to build an untyped term. See :token:`syn_value`. .. tacn:: type_term @one_term In |Ltac|, an untyped term can contain references to hypotheses or to |Ltac| variables containing typed or untyped terms. An untyped term can be type checked with :tacn:`type_term` whose argument is parsed as an untyped term and returns a well-typed term which can be used in tactics. :tacn:`type_term` is a :token:`value_tactic`. Counting goals: numgoals ~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: numgoals The number of goals under focus can be recovered using the :n:`numgoals` function. Combined with the :tacn:`guard` tactic below, it can be used to branch over the number of goals produced by previous tactics. :tacn:`numgoals` is a :token:`value_tactic`. .. example:: .. coqtop:: reset in Ltac pr_numgoals := let n := numgoals in idtac "There are" n "goals". Goal True /\ True /\ True. split;[|split]. .. coqtop:: all abort all:pr_numgoals. Testing boolean expressions: guard ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: guard @int_or_var @comparison @int_or_var .. insertprodn int_or_var comparison .. prodn:: int_or_var ::= {| @integer | @ident } comparison ::= = | < | <= | > | >= Tests a boolean expression. If the expression evaluates to true, it succeeds without affecting the proof. The tactic fails if the expression is false. The accepted tests are simple integer comparisons. .. todo why doesn't it support = and <> as well? .. example:: guard .. coqtop:: in Goal True /\ True /\ True. split;[|split]. .. coqtop:: all all:let n:= numgoals in guard n<4. Fail all:let n:= numgoals in guard n=2. .. exn:: Condition not satisfied. :undocumented: Checking properties of terms ---------------------------- Each of the following tactics acts as the identity if the check succeeds, and results in an error otherwise. .. tacn:: constr_eq_strict @one_term @one_term Succeeds if the arguments are equal modulo alpha conversion and ignoring casts. Universes are considered equal when they are equal in the universe graph. .. exn:: Not equal. :undocumented: .. exn:: Not equal (due to universes). :undocumented: .. tacn:: constr_eq @one_term @one_term Like :tacn:`constr_eq_strict`, but may add constraints to make universes equal. .. exn:: Not equal. :undocumented: .. exn:: Not equal (due to universes). :undocumented: .. tacn:: constr_eq_nounivs @one_term @one_term Like :tacn:`constr_eq_strict`, but all universes are considered equal. .. tacn:: unify @one_term @one_term {? with @ident } Succeeds if the arguments are unifiable, potentially instantiating existential variables, and fails otherwise. :n:`@ident`, if specified, is the name of the :ref:`hint database ` that specifies which definitions are transparent. Otherwise, all definitions are considered transparent. Unification only expands transparent definitions while matching the two :n:`@one_term`\s. .. tacn:: is_evar @one_term Succeeds if :n:`@one_term` is an existential variable and otherwise fails. Existential variables are uninstantiated variables generated by :tacn:`eapply` and some other tactics. .. exn:: Not an evar. :undocumented: .. tacn:: has_evar @one_term Succeeds if :n:`@one_term` has an existential variable as a subterm and fails otherwise. Unlike context patterns combined with ``is_evar``, this tactic scans all subterms, including those under binders. .. exn:: No evars. :undocumented: .. tacn:: is_ground @one_term The negation of :n:`has_evar @one_term`. Succeeds if :n:`@one_term` does not have an existential variable as a subterm and fails otherwise. .. exn:: Not ground. :undocumented: .. tacn:: is_var @one_term Succeeds if :n:`@one_term` is a variable or hypothesis in the current local context and fails otherwise. .. exn:: Not a variable or hypothesis. :undocumented: .. tacn:: is_const @one_term Succeeds if :n:`@one_term` is a global constant that is neither a (co)inductive type nor a constructor and fails otherwise. .. exn:: not a constant. :undocumented: .. tacn:: is_fix @one_term Succeeds if :n:`@one_term` is a `fix` construct (see :n:`@term_fix`) and fails otherwise. Fails for `let fix` forms. .. exn:: not a fix definition. :undocumented: .. example:: is_fix .. coqtop:: reset in Goal True. is_fix (fix f (n : nat) := match n with S n => f n | O => O end). .. tacn:: is_cofix @one_term :undocumented: Succeeds if :n:`@one_term` is a `cofix` construct (see :n:`@term_cofix`) and fails otherwise. Fails for `let cofix` forms. .. exn:: not a cofix definition. :undocumented: .. example:: is_cofix .. coqtop:: reset in Require Import Coq.Lists.Streams. Goal True. let c := constr:(cofix f : Stream unit := Cons tt f) in is_cofix c. .. tacn:: is_constructor @one_term Succeeds if :n:`@one_term` is the constructor of a (co)inductive type and fails otherwise. .. exn:: not a constructor. :undocumented: .. tacn:: is_ind @one_term Succeeds if :n:`@one_term` is a (co)inductive type (family) and fails otherwise. Note that `is_ind (list nat)` fails even though `is_ind list` succeeds, because `list nat` is an application. .. exn:: not an (co)inductive datatype. :undocumented: .. tacn:: is_proj @one_term Succeeds if :n:`@one_term` is a primitive projection applied to a record argument and fails otherwise. .. exn:: not a primitive projection. :undocumented: .. example:: is_proj .. coqtop:: reset in Set Primitive Projections. Record Box {T : Type} := box { unbox : T }. Arguments box {_} _. Goal True. is_proj (unbox (box 0)). Proving a subgoal as a separate lemma: abstract ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: abstract @ltac_expr2 {? using @ident__name } Does a :tacn:`solve` :n:`[ @ltac_expr2 ]` and saves the subproof as an auxiliary lemma. if :n:`@ident__name` is specified, the lemma is saved with that name; otherwise the lemma is saved with the name :n:`@ident`\ `_subproof`\ :n:`{? @natural }` where :token:`ident` is the name of the current goal (e.g. the theorem name) and :token:`natural` is chosen to get a fresh name. If the proof is closed with :cmd:`Qed`, the auxiliary lemma is inlined in the final proof term. This is useful with tactics such as :tacn:`discriminate` that generate huge proof terms with many intermediate goals. It can significantly reduce peak memory use. In most cases it doesn't have a significant impact on run time. One case in which it can reduce run time is when a tactic `foo` is known to always pass type checking when it succeeds, such as in reflective proofs. In this case, the idiom ":tacn:`abstract` :tacn:`exact_no_check` `foo`" will save half the type checking type time compared to ":tacn:`exact` `foo`". :tacn:`abstract` is an :token:`l3_tactic`. .. warning:: The abstract tactic, while very useful, still has some known limitations. See `#9146 `_ for more details. We recommend caution when using it in some "non-standard" contexts. In particular, ``abstract`` doesn't work properly when used inside quotations ``ltac:(...)``. If used as part of typeclass resolution, it may produce incorrect terms when in polymorphic universe mode. .. warning:: Provide :n:`@ident__name` at your own risk; explicitly named and reused subterms don’t play well with asynchronous proofs. .. tacn:: transparent_abstract @ltac_expr3 {? using @ident } Like :tacn:`abstract`, but save the subproof in a transparent lemma with a name in the form :n:`@ident`\ :n:`_subterm`\ :n:`{? @natural }`. .. warning:: Use this feature at your own risk; building computationally relevant terms with tactics is fragile, and explicitly named and reused subterms don’t play well with asynchronous proofs. .. exn:: Proof is not complete. :name: Proof is not complete. (abstract) :undocumented: Tactic toplevel definitions --------------------------- Defining |Ltac| symbols ~~~~~~~~~~~~~~~~~~~~~~~ |Ltac| toplevel definitions are made as follows: .. index:: ::= .. cmd:: Ltac @tacdef_body {* with @tacdef_body } .. insertprodn tacdef_body tacdef_body .. prodn:: tacdef_body ::= @qualid {* @name } {| := | ::= } @ltac_expr Defines or redefines an |Ltac| symbol. If the :attr:`local` attribute is specified, the definition will not be exported outside the current module. :token:`qualid` Name of the symbol being defined or redefined. For definitions, :token:`qualid` must be a simple :token:`ident`. :n:`{* @name }` If specified, the symbol defines a function with the given parameter names. If no names are specified, :token:`qualid` is assigned the value of :token:`ltac_expr`. `:=` Defines a user-defined symbol, but gives an error if the symbol has already been defined. .. todo apparent inconsistency: "Ltac intros := idtac" seems like it redefines/hides an existing tactic, but in fact it creates a tactic which can only be called by its qualified name. This is true in general of tactic notations. The only way to override most primitive tactics, and any user-defined tactic notation, is with another tactic notation. .. exn:: There is already an Ltac named @qualid :undocumented: `::=` Redefines an existing user-defined symbol, but gives an error if the symbol doesn't exist. Note that :cmd:`Tactic Notation`\s do not count as user-defined tactics for `::=`. If :attr:`local` is not specified, the redefinition applies across module boundaries. .. exn:: There is no Ltac named @qualid :undocumented: :n:`{* with @tacdef_body }` Permits definition of mutually recursive tactics. .. note:: The following definitions are equivalent: - :n:`Ltac @qualid {+ @name } := @ltac_expr` - :n:`Ltac @qualid := fun {+ @name } => @ltac_expr` Printing |Ltac| tactics ~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Print Ltac @qualid Defined |Ltac| functions can be displayed using this command. .. cmd:: Print Ltac Signatures This command displays a list of all user-defined tactics, with their arguments. .. _ltac-examples: Examples of using |Ltac| ------------------------- Proof that the natural numbers have at least two elements ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. example:: Proof that the natural numbers have at least two elements The first example shows how to use pattern matching over the proof context to prove that natural numbers have at least two elements. This can be done as follows: .. coqtop:: reset all Lemma card_nat : ~ exists x y : nat, forall z:nat, x = z \/ y = z. Proof. intros (x & y & Hz). destruct (Hz 0), (Hz 1), (Hz 2). At this point, the :tacn:`congruence` tactic would finish the job: .. coqtop:: all abort all: congruence. But for the purpose of the example, let's craft our own custom tactic to solve this: .. coqtop:: none Lemma card_nat : ~ exists x y : nat, forall z:nat, x = z \/ y = z. Proof. intros (x & y & Hz). destruct (Hz 0), (Hz 1), (Hz 2). .. coqtop:: all abort all: match goal with | _ : ?a = ?b, _ : ?a = ?c |- _ => assert (b = c) by now transitivity a end. all: discriminate. Notice that all the (very similar) cases coming from the three eliminations (with three distinct natural numbers) are successfully solved by a ``match goal`` structure and, in particular, with only one pattern (use of non-linear matching). Proving that a list is a permutation of a second list ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. example:: Proving that a list is a permutation of a second list Let's first define the permutation predicate: .. coqtop:: in reset Section Sort. Variable A : Set. Inductive perm : list A -> list A -> Prop := | perm_refl : forall l, perm l l | perm_cons : forall a l0 l1, perm l0 l1 -> perm (a :: l0) (a :: l1) | perm_append : forall a l, perm (a :: l) (l ++ a :: nil) | perm_trans : forall l0 l1 l2, perm l0 l1 -> perm l1 l2 -> perm l0 l2. End Sort. .. coqtop:: none Require Import List. Next we define an auxiliary tactic :g:`perm_aux` which takes an argument used to control the recursion depth. This tactic works as follows: If the lists are identical (i.e. convertible), it completes the proof. Otherwise, if the lists have identical heads, it looks at their tails. Finally, if the lists have different heads, it rotates the first list by putting its head at the end. Every time we perform a rotation, we decrement :g:`n`. When :g:`n` drops down to :g:`1`, we stop performing rotations and we fail. The idea is to give the length of the list as the initial value of :g:`n`. This way of counting the number of rotations will avoid going back to a head that had been considered before. From Section :ref:`ltac-syntax` we know that Ltac has a primitive notion of integers, but they are only used as arguments for primitive tactics and we cannot make computations with them. Thus, instead, we use Coq's natural number type :g:`nat`. .. coqtop:: in Ltac perm_aux n := match goal with | |- (perm _ ?l ?l) => apply perm_refl | |- (perm _ (?a :: ?l1) (?a :: ?l2)) => let newn := eval compute in (length l1) in (apply perm_cons; perm_aux newn) | |- (perm ?A (?a :: ?l1) ?l2) => match eval compute in n with | 1 => fail | _ => let l1' := constr:(l1 ++ a :: nil) in (apply (perm_trans A (a :: l1) l1' l2); [ apply perm_append | compute; perm_aux (pred n) ]) end end. The main tactic is :g:`solve_perm`. It computes the lengths of the two lists and uses them as arguments to call :g:`perm_aux` if the lengths are equal. (If they aren't, the lists cannot be permutations of each other.) .. coqtop:: in Ltac solve_perm := match goal with | |- (perm _ ?l1 ?l2) => match eval compute in (length l1 = length l2) with | (?n = ?n) => perm_aux n end end. And now, here is how we can use the tactic :g:`solve_perm`: .. coqtop:: out Goal perm nat (1 :: 2 :: 3 :: nil) (3 :: 2 :: 1 :: nil). .. coqtop:: all abort solve_perm. .. coqtop:: out Goal perm nat (0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 :: 7 :: 8 :: 9 :: nil) (0 :: 2 :: 4 :: 6 :: 8 :: 9 :: 7 :: 5 :: 3 :: 1 :: nil). .. coqtop:: all abort solve_perm. Deciding intuitionistic propositional logic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pattern matching on goals allows powerful backtracking when returning tactic values. An interesting application is the problem of deciding intuitionistic propositional logic. Considering the contraction-free sequent calculi LJT* of Roy Dyckhoff :cite:`Dyc92`, it is quite natural to code such a tactic using the tactic language as shown below. .. coqtop:: in reset Ltac basic := match goal with | |- True => trivial | _ : False |- _ => contradiction | _ : ?A |- ?A => assumption end. .. coqtop:: in Ltac simplify := repeat (intros; match goal with | H : ~ _ |- _ => red in H | H : _ /\ _ |- _ => elim H; do 2 intro; clear H | H : _ \/ _ |- _ => elim H; intro; clear H | H : ?A /\ ?B -> ?C |- _ => cut (A -> B -> C); [ intro | intros; apply H; split; assumption ] | H: ?A \/ ?B -> ?C |- _ => cut (B -> C); [ cut (A -> C); [ intros; clear H | intro; apply H; left; assumption ] | intro; apply H; right; assumption ] | H0 : ?A -> ?B, H1 : ?A |- _ => cut B; [ intro; clear H0 | apply H0; assumption ] | |- _ /\ _ => split | |- ~ _ => red end). .. coqtop:: in Ltac my_tauto := simplify; basic || match goal with | H : (?A -> ?B) -> ?C |- _ => cut (B -> C); [ intro; cut (A -> B); [ intro; cut C; [ intro; clear H | apply H; assumption ] | clear H ] | intro; apply H; intro; assumption ]; my_tauto | H : ~ ?A -> ?B |- _ => cut (False -> B); [ intro; cut (A -> False); [ intro; cut B; [ intro; clear H | apply H; assumption ] | clear H ] | intro; apply H; red; intro; assumption ]; my_tauto | |- _ \/ _ => (left; my_tauto) || (right; my_tauto) end. The tactic ``basic`` tries to reason using simple rules involving truth, falsity and available assumptions. The tactic ``simplify`` applies all the reversible rules of Dyckhoff’s system. Finally, the tactic ``my_tauto`` (the main tactic to be called) simplifies with ``simplify``, tries to conclude with ``basic`` and tries several paths using the backtracking rules (one of the four Dyckhoff’s rules for the left implication to get rid of the contraction and the right ``or``). Having defined ``my_tauto``, we can prove tautologies like these: .. coqtop:: in Lemma my_tauto_ex1 : forall A B : Prop, A /\ B -> A \/ B. Proof. my_tauto. Qed. .. coqtop:: in Lemma my_tauto_ex2 : forall A B : Prop, (~ ~ B -> B) -> (A -> B) -> ~ ~ A -> B. Proof. my_tauto. Qed. Deciding type isomorphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~ A trickier problem is to decide equalities between types modulo isomorphisms. Here, we choose to use the isomorphisms of the simply typed λ-calculus with Cartesian product and unit type (see, for example, :cite:`RC95`). The axioms of this λ-calculus are given below. .. coqtop:: in reset Open Scope type_scope. .. coqtop:: in Section Iso_axioms. .. coqtop:: in Variables A B C : Set. .. coqtop:: in Axiom Com : A * B = B * A. Axiom Ass : A * (B * C) = A * B * C. Axiom Cur : (A * B -> C) = (A -> B -> C). Axiom Dis : (A -> B * C) = (A -> B) * (A -> C). Axiom P_unit : A * unit = A. Axiom AR_unit : (A -> unit) = unit. Axiom AL_unit : (unit -> A) = A. .. coqtop:: in Lemma Cons : B = C -> A * B = A * C. Proof. intro Heq; rewrite Heq; reflexivity. Qed. .. coqtop:: in End Iso_axioms. .. coqtop:: in Ltac simplify_type ty := match ty with | ?A * ?B * ?C => rewrite <- (Ass A B C); try simplify_type_eq | ?A * ?B -> ?C => rewrite (Cur A B C); try simplify_type_eq | ?A -> ?B * ?C => rewrite (Dis A B C); try simplify_type_eq | ?A * unit => rewrite (P_unit A); try simplify_type_eq | unit * ?B => rewrite (Com unit B); try simplify_type_eq | ?A -> unit => rewrite (AR_unit A); try simplify_type_eq | unit -> ?B => rewrite (AL_unit B); try simplify_type_eq | ?A * ?B => (simplify_type A; try simplify_type_eq) || (simplify_type B; try simplify_type_eq) | ?A -> ?B => (simplify_type A; try simplify_type_eq) || (simplify_type B; try simplify_type_eq) end with simplify_type_eq := match goal with | |- ?A = ?B => try simplify_type A; try simplify_type B end. .. coqtop:: in Ltac len trm := match trm with | _ * ?B => let succ := len B in constr:(S succ) | _ => constr:(1) end. .. coqtop:: in Ltac assoc := repeat rewrite <- Ass. .. coqtop:: in Ltac solve_type_eq n := match goal with | |- ?A = ?A => reflexivity | |- ?A * ?B = ?A * ?C => apply Cons; let newn := len B in solve_type_eq newn | |- ?A * ?B = ?C => match eval compute in n with | 1 => fail | _ => pattern (A * B) at 1; rewrite Com; assoc; solve_type_eq (pred n) end end. .. coqtop:: in Ltac compare_structure := match goal with | |- ?A = ?B => let l1 := len A with l2 := len B in match eval compute in (l1 = l2) with | ?n = ?n => solve_type_eq n end end. .. coqtop:: in Ltac solve_iso := simplify_type_eq; compare_structure. The tactic to judge equalities modulo this axiomatization is shown above. The algorithm is quite simple. First types are simplified using axioms that can be oriented (this is done by ``simplify_type`` and ``simplify_type_eq``). The normal forms are sequences of Cartesian products without a Cartesian product in the left component. These normal forms are then compared modulo permutation of the components by the tactic ``compare_structure``. If they have the same length, the tactic ``solve_type_eq`` attempts to prove that the types are equal. The main tactic that puts all these components together is ``solve_iso``. Here are examples of what can be solved by ``solve_iso``. .. coqtop:: in Lemma solve_iso_ex1 : forall A B : Set, A * unit * B = B * (unit * A). Proof. intros; solve_iso. Qed. .. coqtop:: in Lemma solve_iso_ex2 : forall A B C : Set, (A * unit -> B * (C * unit)) = (A * unit -> (C -> unit) * C) * (unit -> A -> B). Proof. intros; solve_iso. Qed. Debugging |Ltac| tactics ------------------------ Backtraces ~~~~~~~~~~ .. flag:: Ltac Backtrace Setting this :term:`flag` displays a backtrace on Ltac failures that can be useful to find out what went wrong. It is disabled by default for performance reasons. Tracing execution ~~~~~~~~~~~~~~~~~ .. cmd:: Info @natural @ltac_expr Applies :token:`ltac_expr` and prints a trace of the tactics that were successfully applied, discarding branches that failed. :tacn:`idtac` tactics appear in the trace as comments containing the output. This command is valid only in proof mode. It accepts :ref:`goal-selectors`. The number :n:`@natural` is the unfolding level of tactics in the trace. At level 0, the trace contains a sequence of tactics in the actual script, at level 1, the trace will be the concatenation of the traces of these tactics, etc… .. example:: .. coqtop:: in reset Ltac t x := exists x; reflexivity. Goal exists n, n=0. .. coqtop:: all Info 0 t 1||t 0. .. coqtop:: in Undo. .. coqtop:: all Info 1 t 1||t 0. The trace produced by :cmd:`Info` tries its best to be a reparsable |Ltac| script, but this goal is not achievable in all generality. So some of the output traces will contain oddities. As an additional help for debugging, the trace produced by :cmd:`Info` contains (in comments) the messages produced by the :tacn:`idtac` tactical at the right position in the script. In particular, the calls to idtac in branches which failed are not printed. .. opt:: Info Level @natural This :term:`option` is an alternative to the :cmd:`Info` command. This will automatically print the same trace as :n:`Info @natural` at each tactic call. The unfolding level can be overridden by a call to the :cmd:`Info` command. .. _interactive-debugger: Interactive debugger ~~~~~~~~~~~~~~~~~~~~ .. flag:: Ltac Debug This flag, when set, enables the step-by-step debugger in the |Ltac| interpreter. The debugger is supported in `coqtop` and Proof General by printing information on the console and accepting typed commands. In addition, CoqIDE now supports a :ref:`visual debugger ` with additional capabilities. When the debugger is activated in `coqtop`, it stops at every step of the evaluation of the current |Ltac| expression and prints information on what it is doing. The debugger stops, prompting for a command which can be one of the following: +-----------------+-----------------------------------------------+ | newline | go to the next step | +-----------------+-----------------------------------------------+ | h | get help | +-----------------+-----------------------------------------------+ | r n | advance n steps further | +-----------------+-----------------------------------------------+ | r string | advance up to the next call to “idtac string” | +-----------------+-----------------------------------------------+ | s | continue current evaluation without stopping | +-----------------+-----------------------------------------------+ | x | exit current evaluation | +-----------------+-----------------------------------------------+ .. exn:: Debug mode not available in the IDE :undocumented: A non-interactive mode for the debugger is available via the flag: .. flag:: Ltac Batch Debug This flag has the effect of presenting a newline at every prompt, when the debugger is on in `coqtop`. (It has no effect when running the CoqIDE debugger.) The debug log thus created, which does not require user input to generate when this flag is set, can then be run through external tools such as diff. Profiling |Ltac| tactics ~~~~~~~~~~~~~~~~~~~~~~~~ It is possible to measure the time spent in invocations of primitive tactics as well as tactics defined in |Ltac| and their inner invocations. The primary use is the development of complex tactics, which can sometimes be so slow as to impede interactive usage. The reasons for the performance degradation can be intricate, like a slowly performing |Ltac| match or a sub-tactic whose performance only degrades in certain situations. The profiler generates a call tree and indicates the time spent in a tactic depending on its calling context. Thus it allows to locate the part of a tactic definition that contains the performance issue. .. flag:: Ltac Profiling This :term:`flag` enables and disables the profiler. .. cmd:: Show Ltac Profile {? {| CutOff @integer | @string } } Prints the profile. :n:`CutOff @integer` By default, tactics that account for less than 2% of the total time are not displayed. `CutOff` lets you specify a different percentage. :n:`@string` Limits the profile to all tactics that start with :n:`@string`. Append a period (.) to the string if you only want exactly that name. .. cmd:: Reset Ltac Profile Resets the profile, that is, deletes all accumulated information. .. warning:: Backtracking across a :cmd:`Reset Ltac Profile` will not restore the information. .. coqtop:: reset in Require Import Lia. Ltac mytauto := tauto. Ltac tac := intros; repeat split; lia || mytauto. Notation max x y := (x + (y - x)) (only parsing). Goal forall x y z A B C D E F G H I J K L M N O P Q R S T U V W X Y Z, max x (max y z) = max (max x y) z /\ max x (max y z) = max (max x y) z /\ (A /\ B /\ C /\ D /\ E /\ F /\ G /\ H /\ I /\ J /\ K /\ L /\ M /\ N /\ O /\ P /\ Q /\ R /\ S /\ T /\ U /\ V /\ W /\ X /\ Y /\ Z -> Z /\ Y /\ X /\ W /\ V /\ U /\ T /\ S /\ R /\ Q /\ P /\ O /\ N /\ M /\ L /\ K /\ J /\ I /\ H /\ G /\ F /\ E /\ D /\ C /\ B /\ A). Proof. .. coqtop:: all Set Ltac Profiling. tac. Show Ltac Profile. Show Ltac Profile "lia". .. coqtop:: in Abort. Unset Ltac Profiling. .. tacn:: start ltac profiling This tactic behaves like :tacn:`idtac` but enables the profiler. .. tacn:: stop ltac profiling Similarly to :tacn:`start ltac profiling`, this tactic behaves like :tacn:`idtac`. Together, they allow you to exclude parts of a proof script from profiling. .. tacn:: reset ltac profile Equivalent to the :cmd:`Reset Ltac Profile` command, which allows resetting the profile from tactic scripts for benchmarking purposes. .. tacn:: show ltac profile {? {| cutoff @integer | @string } } Equivalent to the :cmd:`Show Ltac Profile` command, which allows displaying the profile from tactic scripts for benchmarking purposes. .. warn:: Ltac Profiler encountered an invalid stack (no \ self node). This can happen if you reset the profile during \ tactic execution Currently, :tacn:`reset ltac profile` is not very well-supported, as it clears all profiling information about all tactics, including ones above the current tactic. As a result, the profiler has trouble understanding where it is in tactic execution. This mixes especially poorly with backtracking into multi-success tactics. In general, non-top-level calls to :tacn:`reset ltac profile` should be avoided. You can also pass the ``-profile-ltac`` command line option to ``coqc``, which turns the :flag:`Ltac Profiling` flag on at the beginning of each document, and performs a :cmd:`Show Ltac Profile` at the end. Run-time optimization tactic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. tacn:: optimize_heap This tactic behaves like :tacn:`idtac`, except that running it compacts the heap in the OCaml run-time system. It is analogous to the :cmd:`Optimize Heap` command. .. cmd:: infoH @ltac_expr Used internally by Proof General. See `#12423 `_ for some background. coq-8.15.0/doc/sphinx/proof-engine/ltac2.rst000066400000000000000000002036201417001151100205660ustar00rootroot00000000000000.. _ltac2: Ltac2 ===== The |Ltac| tactic language is probably one of the ingredients of the success of Coq, yet it is at the same time its Achilles' heel. Indeed, |Ltac|: - has often unclear semantics - is very non-uniform due to organic growth - lacks expressivity (data structures, combinators, types, ...) - is slow - is error-prone and fragile - has an intricate implementation Following the need of users who are developing huge projects relying critically on Ltac, we believe that we should offer a proper modern language that features at least the following: - at least informal, predictable semantics - a type system - standard programming facilities (e.g., datatypes) This new language, called Ltac2, is described in this chapter. It is still experimental but we nonetheless encourage users to start testing it, especially wherever an advanced tactic language is needed. The previous implementation of Ltac, described in the previous chapter, will be referred to as Ltac1. Current limitations include: - There are a number of tactics that are not yet supported in Ltac2 because the interface OCaml and/or Ltac2 notations haven't been written. See :ref:`defining_tactics`. - Missing usability features such as: - Printing functions are limited and awkward to use. Only a few data types are printable. - Deep pattern matching and matching on tuples don't work. - A convenient way to build terms with casts through the low-level API. Because the cast type is opaque, building terms with casts currently requires an awkward construction like the following, which also incurs extra overhead to repeat typechecking for each call to `get_vm_cast`: .. coqdoc:: Constr.Unsafe.make (Constr.Unsafe.Cast 'I (get_vm_cast ()) 'True) with: .. coqtop:: none From Ltac2 Require Import Ltac2. .. coqtop:: in Ltac2 get_vm_cast () := match Constr.Unsafe.kind '(I <: True) with | Constr.Unsafe.Cast _ cst _ => cst | _ => Control.throw Not_found end. - Missing low-level primitives that are convenient for writing automation, such as: - An easy way to get the number of constructors of an inductive type. Currently only way to do this is to destruct a variable of the inductive type and count the number of goals that result. - Error messages may be cryptic. .. _ltac2_design: General design -------------- There are various alternatives to Ltac1, such as Mtac or Rtac for instance. While those alternatives can be quite different from Ltac1, we designed Ltac2 to be as close as reasonably possible to Ltac1, while fixing the aforementioned defects. In particular, Ltac2 is: - a member of the ML family of languages, i.e. * a call-by-value functional language * with effects * together with the Hindley-Milner type system - a language featuring meta-programming facilities for the manipulation of Coq-side terms - a language featuring notation facilities to help write palatable scripts We describe these in more detail in the remainder of this document. ML component ------------ Overview ~~~~~~~~ Ltac2 is a member of the ML family of languages, in the sense that it is an effectful call-by-value functional language, with static typing à la Hindley-Milner (see :cite:`MilnerPrincipalTypeSchemes`). It is commonly accepted that ML constitutes a sweet spot in PL design, as it is relatively expressive while not being either too lax (unlike dynamic typing) nor too strict (unlike, say, dependent types). The main goal of Ltac2 is to serve as a meta-language for Coq. As such, it naturally fits in the ML lineage, just as the historical ML was designed as the tactic language for the LCF prover. It can also be seen as a general-purpose language, by simply forgetting about the Coq-specific features. Sticking to a standard ML type system can be considered somewhat weak for a meta-language designed to manipulate Coq terms. In particular, there is no way to statically guarantee that a Coq term resulting from an Ltac2 computation will be well-typed. This is actually a design choice, motivated by backward compatibility with Ltac1. Instead, well-typedness is deferred to dynamic checks, allowing many primitive functions to fail whenever they are provided with an ill-typed term. The language is naturally effectful as it manipulates the global state of the proof engine. This allows to think of proof-modifying primitives as effects in a straightforward way. Semantically, proof manipulation lives in a monad, which allows to ensure that Ltac2 satisfies the same equations as a generic ML with unspecified effects would do, e.g. function reduction is substitution by a value. Use the following command to import Ltac2: .. coqtop:: in From Ltac2 Require Import Ltac2. Type Syntax ~~~~~~~~~~~ At the level of terms, we simply elaborate on Ltac1 syntax, which is quite close to OCaml. Types follow the simply-typed syntax of OCaml. .. insertprodn ltac2_type ltac2_typevar .. prodn:: ltac2_type ::= @ltac2_type2 -> @ltac2_type | @ltac2_type2 ltac2_type2 ::= @ltac2_type1 * {+* @ltac2_type1 } | @ltac2_type1 ltac2_type1 ::= @ltac2_type0 @qualid | @ltac2_type0 ltac2_type0 ::= ( {+, @ltac2_type } ) {? @qualid } | @ltac2_typevar | _ | @qualid ltac2_typevar ::= ' @ident The set of base types can be extended thanks to the usual ML type declarations such as algebraic datatypes and records. Built-in types include: - ``int``, machine integers (size not specified, in practice inherited from OCaml) - ``string``, mutable strings - ``'a array``, mutable arrays - ``exn``, exceptions - ``constr``, kernel-side terms - ``pattern``, term patterns - ``ident``, well-formed identifiers Type declarations ~~~~~~~~~~~~~~~~~ One can define new types with the following commands. .. cmd:: Ltac2 Type {? rec } @tac2typ_def {* with @tac2typ_def } .. insertprodn tac2typ_def tac2rec_field .. prodn:: tac2typ_def ::= {? @tac2typ_prm } @qualid {? {| := | ::= } @tac2typ_knd } tac2typ_prm ::= @ltac2_typevar | ( {+, @ltac2_typevar } ) tac2typ_knd ::= @ltac2_type | [ {? {? %| } {+| @tac2alg_constructor } } ] | [ .. ] | %{ {? {+; @tac2rec_field } {? ; } } %} tac2alg_constructor ::= @ident | @ident ( {*, @ltac2_type } ) tac2rec_field ::= {? mutable } @ident : @ltac2_type :n:`:=` Defines a type with with an explicit set of constructors :n:`::=` Extends an existing open variant type, a special kind of variant type whose constructors are not statically defined, but can instead be extended dynamically. A typical example is the standard `exn` type for exceptions. Pattern matching on open variants must always include a catch-all clause. They can be extended with this form, in which case :token:`tac2typ_knd` should be in the form :n:`[ {? {? %| } {+| @tac2alg_constructor } } ]`. Without :n:`{| := | ::= }` Defines an abstract type for use representing data from OCaml. Not for end users. :n:`with @tac2typ_def` Permits definition of mutually recursive type definitions. Each production of :token:`tac2typ_knd` defines one of four possible kinds of definitions, respectively: alias, variant, open variant and record types. Aliases are names for a given type expression and are transparently unfoldable to that expression. They cannot be recursive. .. The non-terminal :token:`uident` designates identifiers starting with an uppercase. Variants are sum types defined by constructors and eliminated by pattern-matching. They can be recursive, but the `rec` flag must be explicitly set. Pattern matching must be exhaustive. Open variants can be extended with additional constructors using the `::=` form. Records are product types with named fields and eliminated by projection. Likewise they can be recursive if the `rec` flag is set. .. cmd:: Ltac2 @ external @ident : @ltac2_type := @string @string :name: Ltac2 external Declares abstract terms. Frequently, these declare OCaml functions defined in Coq and give their type information. They can also declare data structures from OCaml. This command has no use for the end user. This command supports the :attr:`deprecated` attribute. APIs ~~~~ Ltac2 provides over 150 API functions that provide various capabilities. These are declared with :cmd:`Ltac2 external` in :n:`lib/coq/user-contrib/Ltac2/*.v`. For example, `Message.print` defined in `Message.v` is used to print messages: .. coqtop:: none Goal True. .. coqtop:: all abort Message.print (Message.of_string "fully qualified calls"). From Ltac2 Require Import Message. print (of_string "unqualified calls"). Term Syntax ~~~~~~~~~~~ The syntax of the functional fragment is very close to that of Ltac1, except that it adds a true pattern-matching feature, as well as a few standard constructs from ML. In practice, there is some additional syntactic sugar that allows the user to bind a variable and match on it at the same time, in the usual ML style. There is dedicated syntax for list and array literals. .. insertprodn ltac2_expr ltac2_tactic_atom .. prodn:: ltac2_expr ::= @ltac2_expr5 ; @ltac2_expr | @ltac2_expr5 ltac2_expr5 ::= fun {+ @tac2pat0 } {? : @ltac2_type } => @ltac2_expr | let {? rec } @ltac2_let_clause {* with @ltac2_let_clause } in @ltac2_expr | @ltac2_expr3 ltac2_let_clause ::= {+ @tac2pat0 } {? : @ltac2_type } := @ltac2_expr ltac2_expr3 ::= {+, @ltac2_expr2 } ltac2_expr2 ::= @ltac2_expr1 :: @ltac2_expr2 | @ltac2_expr1 ltac2_expr1 ::= @ltac2_expr0 {+ @ltac2_expr0 } | @ltac2_expr0 .( @qualid ) | @ltac2_expr0 .( @qualid ) := @ltac2_expr5 | @ltac2_expr0 tac2rec_fieldexpr ::= @qualid := @ltac2_expr1 ltac2_expr0 ::= ( @ltac2_expr ) | ( @ltac2_expr : @ltac2_type ) | () | [ {*; @ltac2_expr5 } ] | %{ {? {+ @tac2rec_fieldexpr } {? ; } } %} | @ltac2_tactic_atom ltac2_tactic_atom ::= @integer | @string | @qualid | @ @ident | & @lident | ' @term | @ltac2_quotations The non-terminal :production:`lident` designates identifiers starting with a lowercase letter. :n:`'@term` is equivalent to :n:`open_constr:(@term)`. Ltac2 Definitions ~~~~~~~~~~~~~~~~~ .. cmd:: Ltac2 {? mutable } {? rec } @tac2def_body {* with @tac2def_body } .. insertprodn tac2def_body tac2def_body .. prodn:: tac2def_body ::= {| _ | @ident } {* @tac2pat0 } {? : @ltac2_type } := @ltac2_expr This command defines a new global Ltac2 value. If one or more :token:`tac2pat0` are specified, the new value is a function. This is a shortcut for one of the :token:`ltac2_expr5` productions. For example: :n:`Ltac2 foo a b := …` is equivalent to :n:`Ltac2 foo := fun a b => …`. The body of an Ltac2 definition is required to be a syntactical value that is, a function, a constant, a pure constructor recursively applied to values or a (non-recursive) let binding of a value in a value. If ``rec`` is set, the tactic is expanded into a recursive binding. If ``mutable`` is set, the definition can be redefined at a later stage (see below). This command supports the :attr:`deprecated` attribute. .. cmd:: Ltac2 Set @qualid {? as @ident } := @ltac2_expr This command redefines a previous ``mutable`` definition. Mutable definitions act like dynamic binding, i.e. at runtime, the last defined value for this entry is chosen. This is useful for global flags and the like. The previous value of the binding can be optionally accessed using the `as` binding syntax. .. example:: Dynamic nature of mutable cells .. coqtop:: all Ltac2 mutable x := true. Ltac2 y := x. Ltac2 Eval y. Ltac2 Set x := false. Ltac2 Eval y. .. example:: Interaction with recursive calls .. coqtop:: all Ltac2 mutable rec f b := if b then 0 else f true. Ltac2 Set f := fun b => if b then 1 else f true. Ltac2 Eval (f false). Ltac2 Set f as oldf := fun b => if b then 2 else oldf false. Ltac2 Eval (f false). In the definition, the `f` in the body is resolved statically because the definition is marked recursive. In the first re-definition, the `f` in the body is resolved dynamically. This is witnessed by the second re-definition. Reduction ~~~~~~~~~ We use the usual ML call-by-value reduction, with an otherwise unspecified evaluation order. This is a design choice making it compatible with OCaml, if ever we implement native compilation. The expected equations are as follows:: (fun x => t) V ≡ t{x := V} (βv) let x := V in t ≡ t{x := V} (let) match C V₀ ... Vₙ with ... | C x₀ ... xₙ => t | ... end ≡ t {xᵢ := Vᵢ} (ι) (t any term, V values, C constructor) Note that call-by-value reduction is already a departure from Ltac1 which uses heuristics to decide when to evaluate an expression. For instance, the following expressions do not evaluate the same way in Ltac1. :n:`foo (idtac; let x := 0 in bar)` :n:`foo (let x := 0 in bar)` Instead of relying on the :n:`idtac` idiom, we would now require an explicit thunk to not compute the argument, and :n:`foo` would have e.g. type :n:`(unit -> unit) -> unit`. :n:`foo (fun () => let x := 0 in bar)` Typing ~~~~~~ Typing is strict and follows the Hindley-Milner system. Unlike Ltac1, there are no type casts at runtime, and one has to resort to conversion functions. See notations though to make things more palatable. In this setting, all the usual argument-free tactics have type :n:`unit -> unit`, but one can return a value of type :n:`t` thanks to terms of type :n:`unit -> t`, or take additional arguments. Effects ~~~~~~~ Effects in Ltac2 are straightforward, except that instead of using the standard IO monad as the ambient effectful world, Ltac2 is has a tactic monad. Note that the order of evaluation of application is *not* specified and is implementation-dependent, as in OCaml. We recall that the `Proofview.tactic` monad is essentially a IO monad together with backtracking state representing the proof state. Intuitively a thunk of type :n:`unit -> 'a` can do the following: - It can perform non-backtracking IO like printing and setting mutable variables - It can fail in a non-recoverable way - It can use first-class backtracking. One way to think about this is that thunks are isomorphic to this type: :n:`(unit -> 'a) ~ (unit -> exn + ('a * (exn -> 'a)))` i.e. thunks can produce a lazy list of results where each tail is waiting for a continuation exception. - It can access a backtracking proof state, consisting among other things of the current evar assignment and the list of goals under focus. We now describe more thoroughly the various effects in Ltac2. Standard IO +++++++++++ The Ltac2 language features non-backtracking IO, notably mutable data and printing operations. Mutable fields of records can be modified using the set syntax. Likewise, built-in types like `string` and `array` feature imperative assignment. See modules `String` and `Array` respectively. A few printing primitives are provided in the `Message` module for displaying information to the user. Fatal errors ++++++++++++ The Ltac2 language provides non-backtracking exceptions, also known as *panics*, through the following primitive in module `Control`:: val throw : exn -> 'a Unlike backtracking exceptions from the next section, this kind of error is never caught by backtracking primitives, that is, throwing an exception destroys the stack. This is codified by the following equation, where `E` is an evaluation context:: E[throw e] ≡ throw e (e value) There is currently no way to catch such an exception, which is a deliberate design choice. Eventually there might be a way to catch it and destroy all backtrack and return values. Backtracking ++++++++++++ In Ltac2, we have the following backtracking primitives, defined in the `Control` module:: Ltac2 Type 'a result := [ Val ('a) | Err (exn) ]. val zero : exn -> 'a val plus : (unit -> 'a) -> (exn -> 'a) -> 'a val case : (unit -> 'a) -> ('a * (exn -> 'a)) result If one views thunks as lazy lists, then `zero` is the empty list and `plus` is list concatenation, while `case` is pattern-matching. The backtracking is first-class, i.e. one can write :n:`plus (fun () => "x") (fun _ => "y") : string` producing a backtracking string. These operations are expected to satisfy a few equations, most notably that they form a monoid compatible with sequentialization.:: plus t zero ≡ t () plus (fun () => zero e) f ≡ f e plus (plus t f) g ≡ plus t (fun e => plus (f e) g) case (fun () => zero e) ≡ Err e case (fun () => plus (fun () => t) f) ≡ Val (t,f) let x := zero e in u ≡ zero e let x := plus t f in u ≡ plus (fun () => let x := t in u) (fun e => let x := f e in u) (t, u, f, g, e values) Goals +++++ A goal is given by the data of its conclusion and hypotheses, i.e. it can be represented as `[Γ ⊢ A]`. The tactic monad naturally operates over the whole proofview, which may represent several goals, including none. Thus, there is no such thing as *the current goal*. Goals are naturally ordered, though. It is natural to do the same in Ltac2, but we must provide a way to get access to a given goal. This is the role of the `enter` primitive, which applies a tactic to each currently focused goal in turn:: val enter : (unit -> unit) -> unit It is guaranteed that when evaluating `enter f`, `f` is called with exactly one goal under focus. Note that `f` may be called several times, or never, depending on the number of goals under focus before the call to `enter`. Accessing the goal data is then implicit in the Ltac2 primitives, and may panic if the invariants are not respected. The two essential functions for observing goals are given below.:: val hyp : ident -> constr val goal : unit -> constr The two above functions panic if there is not exactly one goal under focus. In addition, `hyp` may also fail if there is no hypothesis with the corresponding name. Meta-programming ---------------- Overview ~~~~~~~~ One of the major implementation issues of Ltac1 is the fact that it is never clear whether an object refers to the object world or the meta-world. This is an incredible source of slowness, as the interpretation must be aware of bound variables and must use heuristics to decide whether a variable is a proper one or referring to something in the Ltac context. Likewise, in Ltac1, constr parsing is implicit, so that ``foo 0`` is not ``foo`` applied to the Ltac integer expression ``0`` (|Ltac| does have a notion of integers, though it is not first-class), but rather the Coq term :g:`Datatypes.O`. The implicit parsing is confusing to users and often gives unexpected results. Ltac2 makes these explicit using quoting and unquoting notation, although there are notations to do it in a short and elegant way so as not to be too cumbersome to the user. Quotations ~~~~~~~~~~ .. _ltac2_built-in-quotations: Built-in quotations +++++++++++++++++++ .. insertprodn ltac2_quotations ltac1_expr_in_env .. prodn:: ltac2_quotations ::= ident : ( @lident ) | constr : ( @term ) | open_constr : ( @term ) | pat : ( @cpattern ) | reference : ( {| & @ident | @qualid } ) | ltac1 : ( @ltac1_expr_in_env ) | ltac1val : ( @ltac1_expr_in_env ) ltac1_expr_in_env ::= @ltac_expr | {* @ident } %|- @ltac_expr The current implementation recognizes the following built-in quotations: - ``ident``, which parses identifiers (type ``Init.ident``). - ``constr``, which parses Coq terms and produces an-evar free term at runtime (type ``Init.constr``). - ``open_constr``, which parses Coq terms and produces a term potentially with holes at runtime (type ``Init.constr`` as well). - ``pat``, which parses Coq patterns and produces a pattern used for term matching (type ``Init.pattern``). - ``reference`` Qualified names are globalized at internalization into the corresponding global reference, while ``&id`` is turned into ``Std.VarRef id``. This produces at runtime a ``Std.reference``. - ``ltac1``, for calling Ltac1 code, described in :ref:`simple_api`. - ``ltac1val``, for manipulating Ltac1 values, described in :ref:`low_level_api`. The following syntactic sugar is provided for two common cases: - ``@id`` is the same as ``ident:(id)`` - :n:`'@term` is the same as :n:`open_constr:(@term)` Strict vs. non-strict mode ++++++++++++++++++++++++++ Depending on the context, quotation-producing terms (i.e. ``constr`` or ``open_constr``) are not internalized in the same way. There are two possible modes, the *strict* and the *non-strict* mode. - In strict mode, all simple identifiers appearing in a term quotation are required to be resolvable statically. That is, they must be the short name of a declaration which is defined globally, excluding section variables and hypotheses. If this doesn't hold, internalization will fail. To work around this error, one has to specifically use the ``&`` notation. - In non-strict mode, any simple identifier appearing in a term quotation which is not bound in the global environment is turned into a dynamic reference to a hypothesis. That is to say, internalization will succeed, but the evaluation of the term at runtime will fail if there is no such variable in the dynamic context. Strict mode is enforced by default, such as for all Ltac2 definitions. Non-strict mode is only set when evaluating Ltac2 snippets in interactive proof mode. The rationale is that it is cumbersome to explicitly add ``&`` interactively, while it is expected that global tactics enforce more invariants on their code. Term Antiquotations ~~~~~~~~~~~~~~~~~~~ Syntax ++++++ One can also insert Ltac2 code into Coq terms, similar to what is possible in Ltac1. .. prodn:: term += ltac2:( @ltac2_expr ) Antiquoted terms are expected to have type ``unit``, as they are only evaluated for their side-effects. Semantics +++++++++ A quoted Coq term is interpreted in two phases, internalization and evaluation. - Internalization is part of the static semantics, that is, it is done at Ltac2 typing time. - Evaluation is part of the dynamic semantics, that is, it is done when a term gets effectively computed by Ltac2. Note that typing of Coq terms is a *dynamic* process occurring at Ltac2 evaluation time, and not at Ltac2 typing time. Static semantics **************** During internalization, Coq variables are resolved and antiquotations are type checked as Ltac2 terms, effectively producing a ``glob_constr`` in Coq implementation terminology. Note that although it went through the type checking of **Ltac2**, the resulting term has not been fully computed and is potentially ill-typed as a runtime **Coq** term. .. example:: The following term is valid (with type `unit -> constr`), but will fail at runtime: .. coqtop:: in Ltac2 myconstr () := constr:(nat -> 0). Term antiquotations are type checked in the enclosing Ltac2 typing context of the corresponding term expression. .. example:: The following will type check, with type `constr`. .. coqdoc:: let x := '0 in constr:(1 + ltac2:(exact x)) Beware that the typing environment of antiquotations is **not** expanded by the Coq binders from the term. .. example:: The following Ltac2 expression will **not** type check:: `constr:(fun x : nat => ltac2:(exact x))` `(* Error: Unbound variable 'x' *)` There is a simple reason for that, which is that the following expression would not make sense in general. `constr:(fun x : nat => ltac2:(clear @x; exact x))` Indeed, a hypothesis can suddenly disappear from the runtime context if some other tactic pulls the rug from under you. Rather, the tactic writer has to resort to the **dynamic** goal environment, and must write instead explicitly that she is accessing a hypothesis, typically as follows. `constr:(fun x : nat => ltac2:(exact (hyp @x)))` This pattern is so common that we provide dedicated Ltac2 and Coq term notations for it. - `&x` as an Ltac2 expression expands to `hyp @x`. - `&x` as a Coq constr expression expands to `ltac2:(Control.refine (fun () => hyp @x))`. In the special case where Ltac2 antiquotations appear inside a Coq term notation, the notation variables are systematically bound in the body of the tactic expression with type `Ltac2.Init.preterm`. Such a type represents untyped syntactic Coq expressions, which can by typed in the current context using the `Ltac2.Constr.pretype` function. .. example:: The following notation is essentially the identity. .. coqtop:: in Notation "[ x ]" := ltac2:(let x := Ltac2.Constr.pretype x in exact $x) (only parsing). Dynamic semantics ***************** During evaluation, a quoted term is fully evaluated to a kernel term, and is in particular type checked in the current environment. Evaluation of a quoted term goes as follows. - The quoted term is first evaluated by the pretyper. - Antiquotations are then evaluated in a context where there is exactly one goal under focus, with the hypotheses coming from the current environment extended with the bound variables of the term, and the resulting term is fed into the quoted term. Relative orders of evaluation of antiquotations and quoted term are not specified. For instance, in the following example, `tac` will be evaluated in a context with exactly one goal under focus, whose last hypothesis is `H : nat`. The whole expression will thus evaluate to the term :g:`fun H : nat => H`. `let tac () := hyp @H in constr:(fun H : nat => ltac2:(tac ()))` Many standard tactics perform type checking of their argument before going further. It is your duty to ensure that terms are well-typed when calling such tactics. Failure to do so will result in non-recoverable exceptions. **Trivial Term Antiquotations** It is possible to refer to a variable of type `constr` in the Ltac2 environment through a specific syntax consistent with the antiquotations presented in the notation section. .. prodn:: term += $@lident In a Coq term, writing :g:`$x` is semantically equivalent to :g:`ltac2:(Control.refine (fun () => x))`, up to re-typechecking. It allows to insert in a concise way an Ltac2 variable of type :n:`constr` into a Coq term. Match over terms ~~~~~~~~~~~~~~~~ Ltac2 features a construction similar to Ltac1 :tacn:`match` over terms, although in a less hard-wired way. .. tacn:: @ltac2_match_key @ltac2_expr__term with @ltac2_match_list end :name: lazy_match!; match!; multi_match! .. insertprodn ltac2_match_key ltac2_match_pattern .. prodn:: ltac2_match_key ::= lazy_match! | match! | multi_match! ltac2_match_list ::= {? %| } {+| @ltac2_match_rule } ltac2_match_rule ::= @ltac2_match_pattern => @ltac2_expr ltac2_match_pattern ::= @cpattern | context {? @ident } [ @cpattern ] Evaluates :n:`@ltac2_expr__term`, which must yield a term, and matches it sequentially with the :token:`ltac2_match_pattern`\s, which may contain metavariables. When a match is found, metavariable values are substituted into :n:`@ltac2_expr`, which is then applied. Matching may continue depending on whether `lazy_match!`, `match!` or `multi_match!` is specified. In the :token:`ltac2_match_pattern`\s, metavariables have the form :n:`?@ident`, whereas in the :n:`@ltac2_expr`\s, the question mark is omitted. .. todo how does this differ from the 1-2 other unification routines elsewhere in Coq? Matching is non-linear: if a metavariable occurs more than once, each occurrence must match the same expression. Expressions match if they are syntactically equal or are :term:`α-convertible `. Matching is first-order except on variables of the form :n:`@?@ident` that occur in the head position of an application. For these variables, matching is second-order and returns a functional term. .. todo the `@?ident` form is in dangling_pattern_extension_rule, not included in the doc yet maybe belongs with "Applications" `lazy_match!` Causes the match to commit to the first matching branch rather than trying a new match if :n:`@ltac2_expr` fails. :ref:`Example`. `match!` If :n:`@ltac2_expr` fails, continue matching with the next branch. Failures in subsequent tactics (after the `match!`) will not cause selection of a new branch. Examples :ref:`here` and :ref:`here`. `multi_match!` If :n:`@ltac2_expr` fails, continue matching with the next branch. When a :n:`@ltac2_expr` succeeds for a branch, subsequent failures (after the `multi_match!`) causing consumption of all the successes of :n:`@ltac2_expr` trigger selection of a new matching branch. :ref:`Example`. :n:`@cpattern` The syntax of :token:`cpattern` is the same as that of :token:`term`\s, but it can contain pattern matching metavariables in the form :n:`?@ident` and :n:`@?@ident`. :g:`_` can be used to match irrelevant terms. .. todo more on @?@ident here: https://github.com/coq/coq/pull/12085#discussion_r467504046 .. todo Example is broken :ref:`Example`. .. todo Didn't understand the following 2 paragraphs well enough to revise see https://github.com/coq/coq/pull/12103#discussion_r436297754 for a possible example Unlike Ltac1, Ltac2 :n:`?id` metavariables only match closed terms. There is also a special notation for second-order pattern matching: in an applicative pattern of the form :n:`@?@ident @ident__1 … @ident__n`, the variable :token:`ident` matches any complex expression with (possible) dependencies in the variables :n:`@ident__i` and returns a functional term of the form :n:`fun @ident__1 … @ident__n => @term`. .. _match_term_context: :n:`context {? @ident } [ @cpattern ]` Matches any term with a subterm matching :token:`cpattern`. If there is a match and :n:`@ident` is present, it is assigned the "matched context", i.e. the initial term where the matched subterm is replaced by a hole. This hole in the matched context can be filled with the expression :n:`Pattern.instantiate @ident @cpattern`. For :tacn:`match!` and :tacn:`multi_match!`, if the evaluation of the :token:`ltac2_expr` fails, the next matching subterm is tried. If no further subterm matches, the next branch is tried. Matching subterms are considered from top to bottom and from left to right (with respect to the raw printing obtained by setting the :flag:`Printing All` flag). :ref:`Example`. .. todo There's a more realistic example from @JasonGross here: https://github.com/coq/coq/pull/12103#discussion_r432996954 :n:`@ltac2_expr` The tactic to apply if the construct matches. Metavariable values from the pattern match are statically bound as Ltac2 variables in :n:`@ltac2_expr` before it is applied. If :n:`@ltac2_expr` is a tactic with backtracking points, then subsequent failures after a :tacn:`lazy_match!` or :tacn:`multi_match!` (but not :tacn:`match!`) can cause backtracking into :n:`@ltac2_expr` to select its next success. Variables from the :n:`@tac2pat1` are statically bound in the body of the branch. Variables from the :n:`@term` pattern have values of type `constr`. Variables from the :n:`@ident` in the `context` construct have values of type `Pattern.context` (defined in `Pattern.v`). Note that unlike Ltac1, only lowercase identifiers are valid as Ltac2 bindings. Ltac2 will report an error if one of the bound variables starts with an uppercase character. The semantics of this construction are otherwise the same as the corresponding one from Ltac1, except that it requires the goal to be focused. .. _ltac2_match_vs_lazymatch_ex: .. example:: Ltac2 Comparison of lazy_match! and match! (Equivalent to this :ref:`Ltac1 example`.) These lines define a `msg` tactic that's used in several examples as a more-succinct alternative to `print (to_string "...")`: .. coqtop:: in From Ltac2 Require Import Message. Ltac2 msg x := print (of_string x). .. coqtop:: none Goal True. In :tacn:`lazy_match!`, if :token:`ltac2_expr` fails, the :tacn:`lazy_match!` fails; it doesn't look for further matches. In :tacn:`match!`, if :token:`ltac2_expr` fails in a matching branch, it will try to match on subsequent branches. Note that :n:`'@term` below is equivalent to :n:`open_constr:(@term)`. .. coqtop:: all Fail lazy_match! 'True with | True => msg "branch 1"; fail | _ => msg "branch 2" end. match! 'True with | True => msg "branch 1"; fail | _ => msg "branch 2" end. .. _ltac2_match_vs_multimatch_ex: .. example:: Ltac2 Comparison of match! and multi_match! (Equivalent to this :ref:`Ltac1 example`.) :tacn:`match!` tactics are only evaluated once, whereas :tacn:`multi_match!` tactics may be evaluated more than once if the following constructs trigger backtracking: .. coqtop:: all Fail match! 'True with | True => msg "branch 1" | _ => msg "branch 2" end ; msg "branch A"; fail. .. coqtop:: all Fail multi_match! 'True with | True => msg "branch 1" | _ => msg "branch 2" end ; msg "branch A"; fail. .. _ltac2_match_with_holes_ex: .. todo EXAMPLE DOESN'T WORK: Ltac2 does not (yet?) handle pattern variables matching open terms. Matching a pattern with holes (Equivalent to this :ref:`Ltac1 example`.) Notice the :tacn:`idtac` prints ``(z + 1)`` while the :tacn:`pose` substitutes ``(x + 1)``. .. coqtop:: all match! constr:(fun x => (x + 1) * 3) with | fun z => ?y * 3 => print (of_constr y); pose (fun z: nat => $y * 5) end. .. _ltac2_match_term_context_ex: .. example:: Ltac2 Multiple matches for a "context" pattern. (Equivalent to this :ref:`Ltac1 example`.) Internally "x <> y" is represented as "(~ (x = y))", which produces the first match. .. coqtop:: in Ltac2 f2 t := match! t with | context [ (~ ?t) ] => print (of_constr t); fail | _ => () end. .. coqtop:: all abort f2 constr:((~ True) <> (~ False)). Match over goals ~~~~~~~~~~~~~~~~ .. tacn:: @ltac2_match_key {? reverse } goal with @goal_match_list end :name: lazy_match! goal; match! goal; multi_match! goal .. insertprodn goal_match_list gmatch_hyp_pattern .. prodn:: goal_match_list ::= {? %| } {+| @gmatch_rule } gmatch_rule ::= @gmatch_pattern => @ltac2_expr gmatch_pattern ::= [ {*, @gmatch_hyp_pattern } %|- @ltac2_match_pattern ] gmatch_hyp_pattern ::= @name : @ltac2_match_pattern Matches over goals, similar to Ltac1 :tacn:`match goal`. Use this form to match hypotheses and/or goals in the local context. These patterns have zero or more subpatterns to match hypotheses followed by a subpattern to match the conclusion. Except for the differences noted below, this works the same as the corresponding :n:`@ltac2_match_key @ltac2_expr` construct (see :tacn:`match!`). Each current goal is processed independently. Matching is non-linear: if a metavariable occurs more than once, each occurrence must match the same expression. Within a single term, expressions match if they are syntactically equal or :term:`α-convertible `. When a metavariable is used across multiple hypotheses or across a hypothesis and the current goal, the expressions match if they are :term:`convertible`. .. more detail here: https://github.com/coq/coq/pull/12085#discussion_r470406466 :n:`{*, @gmatch_pattern }` Patterns to match with hypotheses. Each pattern must match a distinct hypothesis in order for the branch to match. Hypotheses have the form :n:`@name {? := @term__binder } : @type`. Currently Ltac2 doesn't allow matching on or capturing the value of :n:`@term__binder`. It only supports matching on the :token:`name` and the :token:`type`, for example `n : ?t`. .. currently only supports the first row :list-table:: :widths: 2 1 :header-rows: 1 * - Pattern syntax - Example pattern * - :n:`@name : @ltac2_match_pattern` - `n : ?t` * - :n:`@name := @match_pattern__binder` - `n := ?b` * - :n:`@name := @term__binder : @type` - `n := ?b : ?t` * - :n:`@name := [ @match_pattern__binder ] : @ltac2_match_pattern` - `n := [ ?b ] : ?t` :token:`name` can't have a `?`. Note that the last two forms are equivalent except that: - if the `:` in the third form has been bound to something else in a notation, you must use the fourth form. Note that cmd:`Require Import` `ssreflect` loads a notation that does this. - a :n:`@term__binder` such as `[ ?l ]` (e.g., denoting a singleton list after :cmd:`Import` `ListNotations`) must be parenthesized or, for the fourth form, use double brackets: `[ [ ?l ] ]`. If there are multiple :token:`gmatch_hyp_pattern`\s in a branch, there may be multiple ways to match them to hypotheses. For :tacn:`match! goal` and :tacn:`multi_match! goal`, if the evaluation of the :token:`ltac2_expr` fails, matching will continue with the next hypothesis combination. When those are exhausted, the next alternative from any `context` construct in the :token:`ltac2_match_pattern`\s is tried and then, when the context alternatives are exhausted, the next branch is tried. :ref:`Example`. `reverse` Hypothesis matching for :token:`gmatch_hyp_pattern`\s normally begins by matching them from left to right, to hypotheses, last to first. Specifying `reverse` begins matching in the reverse order, from first to last. :ref:`Normal` and :ref:`reverse` examples. :n:`|- @ltac2_match_pattern` A pattern to match with the current goal Note that unlike Ltac1, only lowercase identifiers are valid as Ltac2 bindings. Ltac2 will report an error if you try to use a bound variable that starts with an uppercase character. Variables from :n:`@gmatch_hyp_pattern` and :n:`@ltac2_match_pattern` are bound in the body of the branch. Their types are: - ``constr`` for pattern variables appearing in a :n:`@term` - ``Pattern.context`` for variables binding a context - ``ident`` for variables binding a hypothesis name. The same identifier caveat as in the case of matching over constr applies, and this feature has the same semantics as in Ltac1. .. _ltac2_match_goal_hyps_ex: .. example:: Ltac2 Matching hypotheses (Equivalent to this :ref:`Ltac1 example`.) Hypotheses are matched from the last hypothesis (which is by default the newest hypothesis) to the first until the :tacn:`apply` succeeds. .. coqtop:: all abort Goal forall A B : Prop, A -> B -> (A->B). intros. match! goal with | [ h : _ |- _ ] => let h := Control.hyp h in print (of_constr h); apply $h end. .. _ltac2_match_goal_hyps_rev_ex: .. example:: Matching hypotheses with reverse (Equivalent to this :ref:`Ltac1 example`.) Hypotheses are matched from the first hypothesis to the last until the :tacn:`apply` succeeds. .. coqtop:: all abort Goal forall A B : Prop, A -> B -> (A->B). intros. match! reverse goal with | [ h : _ |- _ ] => let h := Control.hyp h in print (of_constr h); apply $h end. .. _ltac2_match_goal_multiple_hyps_ex: .. example:: Multiple ways to match a hypotheses (Equivalent to this :ref:`Ltac1 example`.) Every possible match for the hypotheses is evaluated until the right-hand side succeeds. Note that `h1` and `h2` are never matched to the same hypothesis. Observe that the number of permutations can grow as the factorial of the number of hypotheses and hypothesis patterns. .. coqtop:: all abort Goal forall A B : Prop, A -> B -> (A->B). intros A B H. match! goal with | [ h1 : _, h2 : _ |- _ ] => print (concat (of_string "match ") (concat (of_constr (Control.hyp h1)) (concat (of_string " ") (of_constr (Control.hyp h2))))); fail | [ |- _ ] => () end. Match on values ~~~~~~~~~~~~~~~ .. tacn:: match @ltac2_expr5 with {? @ltac2_branches } end :name: match (Ltac2) Matches a value, akin to the OCaml `match` construct. By itself, it doesn't cause backtracking as do the `*match*!` and `*match*! goal` constructs. .. insertprodn ltac2_branches atomic_tac2pat .. prodn:: ltac2_branches ::= {? %| } {+| @tac2pat1 => @ltac2_expr } tac2pat1 ::= @qualid {+ @tac2pat0 } | @qualid | [ ] | @tac2pat0 :: @tac2pat0 | @tac2pat0 tac2pat0 ::= _ | () | @qualid | ( {? @atomic_tac2pat } ) atomic_tac2pat ::= @tac2pat1 : @ltac2_type | @tac2pat1 , {*, @tac2pat1 } | @tac2pat1 .. tacn:: if @ltac2_expr5__test then @ltac2_expr5__then else @ltac2_expr5__else :name: if-then-else (Ltac2) Equivalent to a :tacn:`match ` on a boolean value. If the :n:`@ltac2_expr5__test` evaluates to true, :n:`@ltac2_expr5__then` is evaluated. Otherwise :n:`@ltac2_expr5__else` is evaluated. .. note:: For now, deep pattern matching is not implemented. .. _ltac2_notations: Notations --------- .. cmd:: Ltac2 Notation {+ @ltac2_scope } {? : @natural } := @ltac2_expr .. todo seems like name maybe should use lident rather than ident, considering: Ltac2 Notation "ex1" X(constr) := print (of_constr X). ex1 1. Unbound constructor X This works fine with lower-case "x" in place of "X" .. todo Ltac2 Notation := permits redefining same symbol (no warning) Also allows defining a symbol beginning with uppercase, which is prohibited in similar constructs. :cmd:`Ltac2 Notation` provides a way to extend the syntax of Ltac2 tactics. The left-hand side (before the `:=`) defines the syntax to recognize and gives formal parameter names for the syntactic values. :n:`@integer` is the level of the notation. When the notation is used, the values are substituted into the right-hand side. The right-hand side is typechecked when the notation is used, not when it is defined. In the following example, `x` is the formal parameter name and `constr` is its :ref:`syntactic class`. `print` and `of_constr` are functions provided by Coq through `Message.v`. .. todo "print" doesn't seem to pay attention to "Set Printing All" .. example:: Printing a :n:`@term` .. coqtop:: none Goal True. .. coqtop:: all From Ltac2 Require Import Message. Ltac2 Notation "ex1" x(constr) := print (of_constr x). ex1 (1 + 2). You can also print terms with a regular Ltac2 definition, but then the :n:`@term` must be in the quotation `constr:( … )`: .. coqtop:: all Ltac2 ex2 x := print (of_constr x). ex2 constr:(1+2). There are also metasyntactic classes described :ref:`here` that combine other items. For example, `list1(constr, ",")` recognizes a comma-separated list of one or more :token:`term`\s. .. example:: Parsing a list of :n:`@term`\s .. coqtop:: abort all Ltac2 rec print_list x := match x with | a :: t => print (of_constr a); print_list t | [] => () end. Ltac2 Notation "ex2" x(list1(constr, ",")) := print_list x. ex2 1, 2, 3. An Ltac2 notation adds a parsing rule to the Ltac2 grammar, which is expanded to the provided body where every token from the notation is let-bound to the corresponding generated expression. .. example:: Assume we perform: .. coqdoc:: Ltac2 Notation "foo" c(thunk(constr)) ids(list0(ident)) := Bar.f c ids. Then the following expression `let y := @X in foo (nat -> nat) x $y` will expand at parsing time to `let y := @X in` `let c := fun () => constr:(nat -> nat) with ids := [@x; y] in Bar.f c ids` Beware that the order of evaluation of multiple let-bindings is not specified, so that you may have to resort to thunking to ensure that side-effects are performed at the right time. This command supports the :attr:`deprecated` attribute. .. exn:: Notation levels must range between 0 and 6. The level of a notation must be an integer between 0 and 6 inclusive. Abbreviations ~~~~~~~~~~~~~ .. cmd:: Ltac2 Notation {| @string | @lident } := @ltac2_expr :name: Ltac2 Notation (abbreviation) Introduces a special kind of notation, called an abbreviation, that does not add any parsing rules. It is similar in spirit to Coq abbreviations (see :cmd:`Notation (abbreviation)`, insofar as its main purpose is to give an absolute name to a piece of pure syntax, which can be transparently referred to by this name as if it were a proper definition. The abbreviation can then be manipulated just like a normal Ltac2 definition, except that it is expanded at internalization time into the given expression. Furthermore, in order to make this kind of construction useful in practice in an effectful language such as Ltac2, any syntactic argument to an abbreviation is thunked on-the-fly during its expansion. For instance, suppose that we define the following. :n:`Ltac2 Notation foo := fun x => x ().` Then we have the following expansion at internalization time. :n:`foo 0 ↦ (fun x => x ()) (fun _ => 0)` Note that abbreviations are not type checked at all, and may result in typing errors after expansion. This command supports the :attr:`deprecated` attribute. .. _defining_tactics: Defining tactics ~~~~~~~~~~~~~~~~ Built-in tactics (those defined in OCaml code in the Coq executable) and Ltac1 tactics, which are defined in `.v` files, must be defined through notations. (Ltac2 tactics can be defined with :cmd:`Ltac2`. Notations for many but not all built-in tactics are defined in `Notations.v`, which is automatically loaded with Ltac2. The Ltac2 syntax for these tactics is often identical or very similar to the tactic syntax described in other chapters of this documentation. These notations rely on tactic functions declared in `Std.v`. Functions corresponding to some built-in tactics may not yet be defined in the Coq executable or declared in `Std.v`. Adding them may require code changes to Coq or defining workarounds through Ltac1 (described below). Two examples of syntax differences: - There is no notation defined that's equivalent to :n:`intros until {| @ident | @natural }`. There is, however, already an ``intros_until`` tactic function defined ``Std.v``, so it may be possible for a user to add the necessary notation. - The built-in `simpl` tactic in Ltac1 supports the use of scope keys in delta flags, e.g. :n:`simpl ["+"%nat]` which is not accepted by Ltac2. This is because Ltac2 uses a different definition for :token:`delta_reductions`; compare it to :token:`ltac2_delta_reductions`. This also affects :tacn:`compute`. Ltac1 tactics are not automatically available in Ltac2. (Note that some of the tactics described in the documentation are defined with Ltac1.) You can make them accessible in Ltac2 with commands similar to the following: .. coqtop:: in From Coq Require Import Lia. Local Ltac2 lia_ltac1 () := ltac1:(lia). Ltac2 Notation "lia" := lia_ltac1 (). A similar approach can be used to access missing built-in tactics. See :ref:`simple_api` for an example that passes two parameters to a missing build-in tactic. .. _syntactic_classes: Syntactic classes ~~~~~~~~~~~~~~~~~ The simplest syntactic classes in Ltac2 notations represent individual nonterminals from the Coq grammar. Only a few selected nonterminals are available as syntactic classes. In addition, there are metasyntactic operations for describing more complex syntax, such as making an item optional or representing a list of items. When parsing, each syntactic class expression returns a value that's bound to a name in the notation definition. Syntactic classes are described with a form of S-expression: .. insertprodn ltac2_scope ltac2_scope .. prodn:: ltac2_scope ::= @string | @integer | @name | @name ( {+, @ltac2_scope } ) .. todo no syn class for ints or strings? parm names are not reserved (e.g the var can be named "list1") Metasyntactic operations that can be applied to other syntactic classes are: :n:`opt(@ltac2_scope)` Parses an optional :token:`ltac2_scope`. The associated value is either :n:`None` or enclosed in :n:`Some` :n:`list1(@ltac2_scope {? , @string })` Parses a list of one or more :token:`ltac2_scope`\s. If :token:`string` is specified, items must be separated by :token:`string`. :n:`list0(@ltac2_scope {? , @string })` Parses a list of zero or more :token:`ltac2_scope`\s. If :token:`string` is specified, items must be separated by :token:`string`. For zero items, the associated value is an empty list. :n:`seq({+, @ltac2_scope })` Parses the :token:`ltac2_scope`\s in order. The associated value is a tuple, omitting :token:`ltac2_scope`\s that are :token:`string`\s. `self` and `next` are not permitted within `seq`. The following classes represent nonterminals with some special handling. The table further down lists the classes that that are handled plainly. :n:`constr {? ( {+, @scope_key } ) }` Parses a :token:`term`. If specified, the :token:`scope_key`\s are used to interpret the term (as described in :ref:`LocalInterpretationRulesForNotations`). The last :token:`scope_key` is the top of the scope stack that's applied to the :token:`term`. :n:`open_constr {? ( {+, @scope_key } ) }` Parses an open :token:`term`. Like :n:`constr` above, this class accepts a list of notation scopes with the same effects. :n:`ident` Parses :token:`ident` or :n:`$@ident`. The first form returns :n:`ident:(@ident)`, while the latter form returns the variable :n:`@ident`. :n:`@string` Accepts the specified string that is not a keyword, returning a value of `()`. :n:`keyword(@string)` Accepts the specified string that is a keyword, returning a value of `()`. :n:`terminal(@string)` Accepts the specified string whether it's a keyword or not, returning a value of `()`. :n:`tactic {? (@integer) }` Parses an :token:`ltac2_expr`. If :token:`integer` is specified, the construct parses a :n:`ltac2_expr@integer`, for example `tactic(5)` parses :token:`ltac2_expr5`. `tactic(6)` parses :token:`ltac2_expr`. :token:`integer` must be in the range `0 .. 6`. You can also use `tactic` to accept an :token:`integer` or a :token:`string`, but there's no syntactic class that accepts *only* an :token:`integer` or a :token:`string`. .. todo this doesn't work as expected: "::" is in ltac2_expr1 Ltac2 Notation "ex4" x(tactic(0)) := x. ex4 auto :: [auto]. .. not sure "self" and "next" do anything special. I get the same error message for both from constructs like Ltac2 Notation "ex5" x(self) := auto. ex5 match. Syntax error: [tactic:tac2expr level 5] expected after 'match' (in [tactic:tac2expr]). :n:`self` parses an Ltac2 expression at the current level and returns it as is. :n:`next` parses an Ltac2 expression at the next level and returns it as is. :n:`thunk(@ltac2_scope)` Used for semantic effect only, parses the same as :token:`ltac2_scope`. If :n:`e` is the parsed expression for :token:`ltac2_scope`, `thunk` returns :n:`fun () => e`. :n:`pattern` parses a :token:`cpattern` A few syntactic classes contain antiquotation features. For the sake of uniformity, all antiquotations are introduced by the syntax :n:`$@lident`. A few other specific syntactic classes exist to handle Ltac1-like syntax, but their use is discouraged and they are thus not documented. For now there is no way to declare new syntactic classes from the Ltac2 side, but this is planned. Other nonterminals that have syntactic classes are listed here. .. list-table:: :header-rows: 1 * - Syntactic class name - Nonterminal - Similar non-Ltac2 syntax * - :n:`intropatterns` - :token:`ltac2_intropatterns` - :n:`{* @intropattern }` * - :n:`intropattern` - :token:`ltac2_simple_intropattern` - :token:`simple_intropattern` * - :n:`ident` - :token:`ident_or_anti` - :token:`ident` * - :n:`destruction_arg` - :token:`ltac2_destruction_arg` - :token:`destruction_arg` * - :n:`with_bindings` - :token:`q_with_bindings` - :n:`{? with @bindings }` * - :n:`bindings` - :token:`ltac2_bindings` - :token:`bindings` * - :n:`reductions` - :token:`ltac2_reductions` - :token:`reductions` * - :n:`reference` - :token:`refglobal` - :token:`reference` * - :n:`clause` - :token:`ltac2_clause` - :token:`occurrences` * - :n:`occurrences` - :token:`q_occurrences` - :n:`{? at @occs_nums }` * - :n:`induction_clause` - :token:`ltac2_induction_clause` - :token:`induction_clause` * - :n:`conversion` - :token:`ltac2_conversion` - * - :n:`rewriting` - :token:`ltac2_oriented_rewriter` - :token:`oriented_rewriter` * - :n:`dispatch` - :token:`ltac2_for_each_goal` - :token:`for_each_goal` * - :n:`hintdb` - :token:`hintdb` - :token:`hintbases` * - :n:`move_location` - :token:`move_location` - :token:`where` * - :n:`pose` - :token:`pose` - :token:`bindings_with_parameters` * - :n:`assert` - :token:`assertion` - :n:`( @ident := @term )` * - :n:`constr_matching` - :token:`ltac2_match_list` - See :tacn:`match` * - :n:`goal_matching` - :token:`goal_match_list` - See :tacn:`match goal` Here is the syntax for the :n:`q_*` nonterminals: .. insertprodn ltac2_intropatterns nonsimple_intropattern .. prodn:: ltac2_intropatterns ::= {* @nonsimple_intropattern } nonsimple_intropattern ::= * | ** | @ltac2_simple_intropattern .. insertprodn ltac2_simple_intropattern ltac2_naming_intropattern .. prodn:: ltac2_simple_intropattern ::= @ltac2_naming_intropattern | _ | @ltac2_or_and_intropattern | @ltac2_equality_intropattern ltac2_or_and_intropattern ::= [ {+| @ltac2_intropatterns } ] | () | ( {+, @ltac2_simple_intropattern } ) | ( {+& @ltac2_simple_intropattern } ) ltac2_equality_intropattern ::= -> | <- | [= @ltac2_intropatterns ] ltac2_naming_intropattern ::= ? @lident | ?$ @lident | ? | @ident_or_anti .. insertprodn ident_or_anti ident_or_anti .. prodn:: ident_or_anti ::= @lident | $ @ident .. insertprodn ltac2_destruction_arg ltac2_constr_with_bindings .. prodn:: ltac2_destruction_arg ::= @natural | @lident | @ltac2_constr_with_bindings ltac2_constr_with_bindings ::= @term {? with @ltac2_bindings } .. insertprodn q_with_bindings qhyp .. prodn:: q_with_bindings ::= {? with @ltac2_bindings } ltac2_bindings ::= {+ @ltac2_simple_binding } | {+ @term } ltac2_simple_binding ::= ( @qhyp := @term ) qhyp ::= $ @ident | @natural | @lident .. insertprodn ltac2_reductions ltac2_delta_reductions .. prodn:: ltac2_reductions ::= {+ @ltac2_red_flag } | {? @ltac2_delta_reductions } ltac2_red_flag ::= beta | iota | match | fix | cofix | zeta | delta {? @ltac2_delta_reductions } ltac2_delta_reductions ::= {? - } [ {+ @refglobal } ] .. insertprodn refglobal refglobal .. prodn:: refglobal ::= & @ident | @qualid | $ @ident .. insertprodn ltac2_clause ltac2_in_clause .. prodn:: ltac2_clause ::= in @ltac2_in_clause | at @ltac2_occs_nums ltac2_in_clause ::= * {? @ltac2_occs } | * %|- {? @ltac2_concl_occ } | {*, @ltac2_hypident_occ } {? %|- {? @ltac2_concl_occ } } .. insertprodn q_occurrences ltac2_hypident .. prodn:: q_occurrences ::= {? @ltac2_occs } ltac2_occs ::= at @ltac2_occs_nums ltac2_occs_nums ::= {? - } {+ {| @natural | $ @ident } } ltac2_concl_occ ::= * {? @ltac2_occs } ltac2_hypident_occ ::= @ltac2_hypident {? @ltac2_occs } ltac2_hypident ::= @ident_or_anti | ( type of @ident_or_anti ) | ( value of @ident_or_anti ) .. insertprodn ltac2_induction_clause ltac2_eqn_ipat .. prodn:: ltac2_induction_clause ::= @ltac2_destruction_arg {? @ltac2_as_or_and_ipat } {? @ltac2_eqn_ipat } {? @ltac2_clause } ltac2_as_or_and_ipat ::= as @ltac2_or_and_intropattern ltac2_eqn_ipat ::= eqn : @ltac2_naming_intropattern .. insertprodn ltac2_conversion ltac2_conversion .. prodn:: ltac2_conversion ::= @term | @term with @term .. insertprodn ltac2_oriented_rewriter ltac2_rewriter .. prodn:: ltac2_oriented_rewriter ::= {? {| -> | <- } } @ltac2_rewriter ltac2_rewriter ::= {? @natural } {? {| ? | ! } } @ltac2_constr_with_bindings .. insertprodn ltac2_for_each_goal ltac2_goal_tactics .. prodn:: ltac2_for_each_goal ::= @ltac2_goal_tactics | {? @ltac2_goal_tactics %| } {? @ltac2_expr } .. {? %| @ltac2_goal_tactics } ltac2_goal_tactics ::= {*| {? @ltac2_expr } } .. insertprodn hintdb hintdb .. prodn:: hintdb ::= * | {+ @ident_or_anti } .. insertprodn move_location move_location .. prodn:: move_location ::= at top | at bottom | after @ident_or_anti | before @ident_or_anti .. insertprodn pose ltac2_as_name .. prodn:: pose ::= ( @ident_or_anti := @term ) | @term {? @ltac2_as_name } ltac2_as_name ::= as @ident_or_anti .. insertprodn assertion ltac2_by_tactic .. prodn:: assertion ::= ( @ident_or_anti := @term ) | ( @ident_or_anti : @term ) {? @ltac2_by_tactic } | @term {? @ltac2_as_ipat } {? @ltac2_by_tactic } ltac2_as_ipat ::= as @ltac2_simple_intropattern ltac2_by_tactic ::= by @ltac2_expr Evaluation ---------- Ltac2 features a toplevel loop that can be used to evaluate expressions. .. cmd:: Ltac2 Eval @ltac2_expr This command evaluates the term in the current proof if there is one, or in the global environment otherwise, and displays the resulting value to the user together with its type. This command is pure in the sense that it does not modify the state of the proof, and in particular all side-effects are discarded. Debug ----- .. flag:: Ltac2 Backtrace When this :term:`flag` is set, toplevel failures will be printed with a backtrace. Compatibility layer with Ltac1 ------------------------------ Ltac1 from Ltac2 ~~~~~~~~~~~~~~~~ .. _simple_api: Simple API ++++++++++ One can call Ltac1 code from Ltac2 by using the :n:`ltac1:(@ltac1_expr_in_env)` quotation. See :ref:`ltac2_built-in-quotations`. It parses a Ltac1 expression, and semantics of this quotation is the evaluation of the corresponding code for its side effects. In particular, it cannot return values, and the quotation has type :n:`unit`. Ltac1 **cannot** implicitly access variables from the Ltac2 scope, but this can be done with an explicit annotation on the :n:`ltac1:({* @ident } |- @ltac_expr)` quotation. See :ref:`ltac2_built-in-quotations`. For example: .. coqtop:: in Local Ltac2 replace_with (lhs: constr) (rhs: constr) := ltac1:(lhs rhs |- replace lhs with rhs) (Ltac1.of_constr lhs) (Ltac1.of_constr rhs). Ltac2 Notation "replace" lhs(constr) "with" rhs(constr) := replace_with lhs rhs. The return type of this expression is a function of the same arity as the number of identifiers, with arguments of type `Ltac2.Ltac1.t` (see below). This syntax will bind the variables in the quoted Ltac1 code as if they had been bound from Ltac1 itself. Similarly, the arguments applied to the quotation will be passed at runtime to the Ltac1 code. .. _low_level_api: Low-level API +++++++++++++ There exists a lower-level FFI into Ltac1 that is not recommended for daily use, which is available in the `Ltac2.Ltac1` module. This API allows to directly manipulate dynamically-typed Ltac1 values, either through the function calls, or using the `ltac1val` quotation. The latter parses the same as `ltac1`, but has type `Ltac2.Ltac1.t` instead of `unit`, and dynamically behaves as an Ltac1 thunk, i.e. `ltac1val:(foo)` corresponds to the tactic closure that Ltac1 would generate from `idtac; foo`. Due to intricate dynamic semantics, understanding when Ltac1 value quotations focus is very hard. This is why some functions return a continuation-passing style value, as it can dispatch dynamically between focused and unfocused behavior. The same mechanism for explicit binding of variables as described in the previous section applies. Ltac2 from Ltac1 ~~~~~~~~~~~~~~~~ Same as above by switching Ltac1 by Ltac2 and using the `ltac2` quotation instead. .. prodn:: ltac_expr += ltac2 : ( @ltac2_expr ) | ltac2 : ( {+ @ident } |- @ltac2_expr ) The typing rules are dual, that is, the optional identifiers are bound with type `Ltac2.Ltac1.t` in the Ltac2 expression, which is expected to have type unit. The value returned by this quotation is an Ltac1 function with the same arity as the number of bound variables. Note that when no variables are bound, the inner tactic expression is evaluated eagerly, if one wants to use it as an argument to a Ltac1 function, one has to resort to the good old :n:`idtac; ltac2:(foo)` trick. For instance, the code below will fail immediately and won't print anything. .. coqtop:: in From Ltac2 Require Import Ltac2. Set Default Proof Mode "Classic". .. coqtop:: all Ltac mytac tac := idtac "I am being evaluated"; tac. Goal True. Proof. (* Doesn't print anything *) Fail mytac ltac2:(fail). (* Prints and fails *) Fail mytac ltac:(idtac; ltac2:(fail)). In any case, the value returned by the fully applied quotation is an unspecified dummy Ltac1 closure and should not be further used. Switching between Ltac languages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We recommend using the :opt:`Default Proof Mode` option to switch between tactic languages with a proof-based granularity. This allows to incrementally port the proof scripts. Transition from Ltac1 --------------------- Owing to the use of a lot of notations, the transition should not be too difficult. In particular, it should be possible to do it incrementally. That said, we do *not* guarantee it will be a blissful walk either. Hopefully, owing to the fact Ltac2 is typed, the interactive dialogue with Coq will help you. We list the major changes and the transition strategies hereafter. Syntax changes ~~~~~~~~~~~~~~ Due to conflicts, a few syntactic rules have changed. - The dispatch tactical :n:`tac; [foo|bar]` is now written :n:`tac > [foo|bar]`. - Levels of a few operators have been revised. Some tacticals now parse as if they were normal functions. Parentheses are now required around complex arguments, such as abstractions. The tacticals affected are: :n:`try`, :n:`repeat`, :n:`do`, :n:`once`, :n:`progress`, :n:`time`, :n:`abstract`. - :n:`idtac` is no more. Either use :n:`()` if you expect nothing to happen, :n:`(fun () => ())` if you want a thunk (see next section), or use printing primitives from the :n:`Message` module if you want to display something. Tactic delay ~~~~~~~~~~~~ Tactics are not magically delayed anymore, neither as functions nor as arguments. It is your responsibility to thunk them beforehand and apply them at the call site. A typical example of a delayed function: :n:`Ltac foo := blah.` becomes :n:`Ltac2 foo () := blah.` All subsequent calls to `foo` must be applied to perform the same effect as before. Likewise, for arguments: :n:`Ltac bar tac := tac; tac; tac.` becomes :n:`Ltac2 bar tac := tac (); tac (); tac ().` We recommend the use of syntactic notations to ease the transition. For instance, the first example can alternatively be written as: :n:`Ltac2 foo0 () := blah.` :n:`Ltac2 Notation foo := foo0 ().` This allows to keep the subsequent calls to the tactic as-is, as the expression `foo` will be implicitly expanded everywhere into `foo0 ()`. Such a trick also works for arguments, as arguments of syntactic notations are implicitly thunked. The second example could thus be written as follows. :n:`Ltac2 bar0 tac := tac (); tac (); tac ().` :n:`Ltac2 Notation bar := bar0.` Variable binding ~~~~~~~~~~~~~~~~ Ltac1 relies on complex dynamic trickery to be able to tell apart bound variables from terms, hypotheses, etc. There is no such thing in Ltac2, as variables are recognized statically and other constructions do not live in the same syntactic world. Due to the abuse of quotations, it can sometimes be complicated to know what a mere identifier represents in a tactic expression. We recommend tracking the context and letting the compiler print typing errors to understand what is going on. We list below the typical changes one has to perform depending on the static errors produced by the typechecker. In Ltac expressions +++++++++++++++++++ .. exn:: Unbound {| value | constructor } X * if `X` is meant to be a term from the current static environment, replace the problematic use by `'X`. * if `X` is meant to be a hypothesis from the local context, replace the problematic use by `&X`. In quotations +++++++++++++ .. exn:: The reference X was not found in the current environment * if `X` is meant to be a tactic expression bound by a Ltac2 let or function, replace the problematic use by `$X`. * if `X` is meant to be a hypothesis from the local context, replace the problematic use by `&X`. Exception catching ~~~~~~~~~~~~~~~~~~ Ltac2 features a proper exception-catching mechanism. For this reason, the Ltac1 mechanism relying on `fail` taking integers, and tacticals decreasing it, has been removed. Now exceptions are preserved by all tacticals, and it is your duty to catch them and re-raise them as needed. coq-8.15.0/doc/sphinx/proof-engine/proof-handling.rst000066400000000000000000000001611417001151100224630ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/proof-engine/ssreflect-proof-language.rst000066400000000000000000005461311417001151100244660ustar00rootroot00000000000000.. _thessreflectprooflanguage: ------------------------------ The |SSR| proof language ------------------------------ :Authors: Georges Gonthier, Assia Mahboubi, Enrico Tassi Introduction ------------ This chapter describes a set of tactics known as |SSR| originally designed to provide support for the so-called *small scale reflection* proof methodology. Despite the original purpose this set of tactic is of general interest and is available in Coq starting from version 8.7. |SSR| was developed independently of the tactics described in Chapter :ref:`tactics`. Indeed the scope of the tactics part of |SSR| largely overlaps with the standard set of tactics. Eventually the overlap will be reduced in future releases of Coq. Proofs written in |SSR| typically look quite different from the ones written using only tactics as per Chapter :ref:`tactics`. We try to summarise here the most “visible” ones in order to help the reader already accustomed to the tactics described in Chapter :ref:`tactics` to read this chapter. The first difference between the tactics described in this chapter and the tactics described in Chapter :ref:`tactics` is the way hypotheses are managed (we call this *bookkeeping*). In Chapter :ref:`tactics` the most common approach is to avoid moving explicitly hypotheses back and forth between the context and the conclusion of the goal. On the contrary in |SSR| all bookkeeping is performed on the conclusion of the goal, using for that purpose a couple of syntactic constructions behaving similar to tacticals (and often named as such in this chapter). The ``:`` tactical moves hypotheses from the context to the conclusion, while ``=>`` moves hypotheses from the conclusion to the context, and ``in`` moves back and forth a hypothesis from the context to the conclusion for the time of applying an action to it. While naming hypotheses is commonly done by means of an ``as`` clause in the basic model of Chapter :ref:`tactics`, it is here to ``=>`` that this task is devoted. Tactics frequently leave new assumptions in the conclusion, and are often followed by ``=>`` to explicitly name them. While generalizing the goal is normally not explicitly needed in Chapter :ref:`tactics`, it is an explicit operation performed by ``:``. .. seealso:: :ref:`bookkeeping_ssr` Beside the difference of bookkeeping model, this chapter includes specific tactics which have no explicit counterpart in Chapter :ref:`tactics` such as tactics to mix forward steps and generalizations as :tacn:`generally have` or :tacn:`without loss`. |SSR| adopts the point of view that rewriting, definition expansion and partial evaluation participate all to a same concept of rewriting a goal in a larger sense. As such, all these functionalities are provided by the :tacn:`rewrite ` tactic. |SSR| includes a little language of patterns to select subterms in tactics or tacticals where it matters. Its most notable application is in the :tacn:`rewrite ` tactic, where patterns are used to specify where the rewriting step has to take place. Finally, |SSR| supports so-called reflection steps, typically allowing to switch back and forth between the computational view and logical view of a concept. To conclude it is worth mentioning that |SSR| tactics can be mixed with non-|SSR| tactics in the same proof, or in the same Ltac expression. The few exceptions to this statement are described in section :ref:`compatibility_issues_ssr`. Acknowledgments ~~~~~~~~~~~~~~~ The authors would like to thank Frédéric Blanqui, François Pottier and Laurence Rideau for their comments and suggestions. Usage ----- Getting started ~~~~~~~~~~~~~~~ To be available, the tactics presented in this manual need the following minimal set of libraries to be loaded: ``ssreflect.v``, ``ssrfun.v`` and ``ssrbool.v``. Moreover, these tactics come with a methodology specific to the authors of |SSR| and which requires a few options to be set in a different way than in their default way. All in all, this corresponds to working in the following context: .. coqtop:: in From Coq Require Import ssreflect ssrfun ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. seealso:: :flag:`Implicit Arguments`, :flag:`Strict Implicit`, :flag:`Printing Implicit Defensive` .. _compatibility_issues_ssr: Compatibility issues ~~~~~~~~~~~~~~~~~~~~ Requiring the above modules creates an environment which is mostly compatible with the rest of Coq, up to a few discrepancies: + New keywords (``is``) might clash with variable, constant, tactic or tactical names, or with quasi-keywords in tactic or notation commands. + New tactic(al)s names (:tacn:`last`, :tacn:`done`, :tacn:`have`, :tacn:`suffices`, :tacn:`suff`, :tacn:`without loss`, :tacn:`wlog`, :tacn:`congr`, :tacn:`unlock`) might clash with user tactic names. + Identifiers with both leading and trailing ``_``, such as ``_x_``, are reserved by |SSR| and cannot appear in scripts. + The extensions to the :tacn:`rewrite` tactic are partly incompatible with those available in current versions of Coq; in particular: ``rewrite .. in (type of k)`` or ``rewrite .. in *`` or any other variant of :tacn:`rewrite` will not work, and the |SSR| syntax and semantics for occurrence selection and rule chaining is different. Use an explicit rewrite direction (``rewrite <- …`` or ``rewrite -> …``) to access the Coq rewrite tactic. + New symbols (``//``, ``/=``, ``//=``) might clash with adjacent existing symbols. This can be avoided by inserting white spaces. + New constant and theorem names might clash with the user theory. This can be avoided by not importing all of |SSR|: .. coqtop:: in From Coq Require ssreflect. Import ssreflect.SsrSyntax. Note that the full syntax of |SSR|’s rewrite and reserved identifiers are enabled only if the ssreflect module has been required and if ``SsrSyntax`` has been imported. Thus a file that requires (without importing) ``ssreflect`` and imports ``SsrSyntax``, can be required and imported without automatically enabling |SSR|’s extended rewrite syntax and reserved identifiers. + Some user notations (in particular, defining an infix ``;``) might interfere with the "open term", parenthesis free, syntax of tactics such as have, set and pose. + The generalization of if statements to non-Boolean conditions is turned off by |SSR|, because it is mostly subsumed by Coercion to ``bool`` of the ``sumXXX`` types (declared in ``ssrfun.v``) and the :n:`if @term is @pattern then @term else @term` construct (see :ref:`pattern_conditional_ssr`). To use the generalized form, turn off the |SSR| Boolean ``if`` notation using the command: ``Close Scope boolean_if_scope``. + The following flags can be unset to make |SSR| more compatible with parts of Coq: .. flag:: SsrRewrite Controls whether the incompatible rewrite syntax is enabled (the default). Disabling the :term:`flag` makes the syntax compatible with other parts of Coq. .. flag:: SsrIdents Controls whether tactics can refer to |SSR|-generated variables that are in the form _xxx_. Scripts with explicit references to such variables are fragile; they are prone to failure if the proof is later modified or if the details of variable name generation change in future releases of Coq. The default is on, which gives an error message when the user tries to create such identifiers. Disabling the :term:`flag` generates a warning instead, increasing compatibility with other parts of Coq. Gallina extensions -------------------- Small-scale reflection makes an extensive use of the programming subset of Gallina, Coq’s logical specification language. This subset is quite suited to the description of functions on representations, because it closely follows the well-established design of the ML programming language. The |SSR| extension provides three additions to Gallina, for pattern assignment, pattern testing, and polymorphism; these mitigate minor but annoying discrepancies between Gallina and ML. Pattern assignment ~~~~~~~~~~~~~~~~~~ The |SSR| extension provides the following construct for irrefutable pattern matching, that is, destructuring assignment: .. prodn:: term += let: @pattern := @term in @term Note the colon ``:`` after the ``let`` keyword, which avoids any ambiguity with a function definition or Coq’s basic destructuring let. The let: construct differs from the latter in that + The pattern can be nested (deep pattern matching), in particular, this allows expression of the form: .. coqdoc:: let: exist (x, y) p_xy := Hp in … . + The destructured constructor is explicitly given in the pattern, and is used for type inference. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Definition f u := let: (m, n) := u in m + n. Check f. Using :g:`let:` Coq infers a type for :g:`f`, whereas with a usual ``let`` the same term requires an extra type annotation in order to type check. .. coqtop:: reset all Fail Definition f u := let (m, n) := u in m + n. The ``let:`` construct is just (more legible) notation for the primitive Gallina expression :n:`match @term with @pattern => @term end`. The |SSR| destructuring assignment supports all the dependent match annotations; the full syntax is .. prodn:: term += let: @pattern {? as @ident} {? in @pattern} := @term {? return @term} in @term where the second :token:`pattern` and the second :token:`term` are *types*. When the ``as`` and ``return`` keywords are both present, then :token:`ident` is bound in both the second :token:`pattern` and the second :token:`term`; variables in the optional type :token:`pattern` are bound only in the second term, and other variables in the first :token:`pattern` are bound only in the third :token:`term`, however. .. _pattern_conditional_ssr: Pattern conditional ~~~~~~~~~~~~~~~~~~~ The following construct can be used for a refutable pattern matching, that is, pattern testing: .. prodn:: term += if @term is @pattern then @term else @term Although this construct is not strictly ML (it does exist in variants such as the pattern calculus or the ρ-calculus), it turns out to be very convenient for writing functions on representations, because most such functions manipulate simple data types such as Peano integers, options, lists, or binary trees, and the pattern conditional above is almost always the right construct for analyzing such simple types. For example, the null and all list function(al)s can be defined as follows: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variable d: Set. Definition null (s : list d) := if s is nil then true else false. Variable a : d -> bool. Fixpoint all (s : list d) : bool := if s is cons x s' then a x && all s' else true. The pattern conditional also provides a notation for destructuring assignment with a refutable pattern, adapted to the pure functional setting of Gallina, which lacks a ``Match_Failure`` exception. Like ``let:`` above, the ``if…is`` construct is just (more legible) notation for the primitive Gallina expression :n:`match @term with @pattern => @term | _ => @term end`. Similarly, it will always be displayed as the expansion of this form in terms of primitive match expressions (where the default expression may be replicated). Explicit pattern testing also largely subsumes the generalization of the ``if`` construct to all binary data types; compare :n:`if @term is inl _ then @term else @term` and :n:`if @term then @term else @term`. The latter appears to be marginally shorter, but it is quite ambiguous, and indeed often requires an explicit annotation ``(term : {_} + {_})`` to type check, which evens the character count. Therefore, |SSR| restricts by default the condition of a plain if construct to the standard ``bool`` type; this avoids spurious type annotations. .. example:: .. coqtop:: all Definition orb b1 b2 := if b1 then true else b2. As pointed out in section :ref:`compatibility_issues_ssr`, this restriction can be removed with the command: ``Close Scope boolean_if_scope.`` Like ``let:`` above, the ``if-is-then-else`` construct supports the dependent match annotations: .. prodn:: term += if @term is @pattern as @ident in @pattern return @term then @term else @term As in ``let:`` the variable :token:`ident` (and those in the type pattern) are bound in the second :token:`term`; :token:`ident` is also bound in the third :token:`term` (but not in the fourth :token:`term`), while the variables in the first :token:`pattern` are bound only in the third :token:`term`. Another variant allows to treat the ``else`` case first: .. prodn:: term += if @term isn't @pattern then @term else @term Note that :token:`pattern` eventually binds variables in the third :token:`term` and not in the second :token:`term`. .. _parametric_polymorphism_ssr: Parametric polymorphism ~~~~~~~~~~~~~~~~~~~~~~~ Unlike ML, polymorphism in core Gallina is explicit: the type parameters of polymorphic functions must be declared explicitly, and supplied at each point of use. However, Coq provides two features to suppress redundant parameters: + Sections are used to provide (possibly implicit) parameters for a set of definitions. + Implicit arguments declarations are used to tell Coq to use type inference to deduce some parameters from the context at each point of call. The combination of these features provides a fairly good emulation of ML-style polymorphism, but unfortunately this emulation breaks down for higher-order programming. Implicit arguments are indeed not inferred at all points of use, but only at points of call, leading to expressions such as .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Variable T : Type. Variable null : forall T : Type, T -> bool. Variable all : (T -> bool) -> list T -> bool. .. coqtop:: all Definition all_null (s : list T) := all (@null T) s. Unfortunately, such higher-order expressions are quite frequent in representation functions, especially those which use Coq's ``Structures`` to emulate Haskell typeclasses. Therefore, |SSR| provides a variant of Coq’s implicit argument declaration, which causes Coq to fill in some implicit parameters at each point of use, e.g., the above definition can be written: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Variable T : Type. Variable null : forall T : Type, T -> bool. Variable all : (T -> bool) -> list T -> bool. .. coqtop:: all Prenex Implicits null. Definition all_null (s : list T) := all null s. Better yet, it can be omitted entirely, since :g:`all_null s` isn’t much of an improvement over :g:`all null s`. The syntax of the new declaration is .. cmd:: Prenex Implicits {+ @ident__i} This command checks that each :n:`@ident__i` is the name of a functional constant, whose implicit arguments are prenex, i.e., the first :math:`n_i > 0` arguments of :n:`@ident__i` are implicit; then it assigns ``Maximal Implicit`` status to these arguments. As these prenex implicit arguments are ubiquitous and have often large display strings, it is strongly recommended to change the default display settings of Coq so that they are not printed (except after a ``Set Printing All`` command). All |SSR| library files thus start with the incantation .. coqdoc:: Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Anonymous arguments ~~~~~~~~~~~~~~~~~~~ When in a definition, the type of a certain argument is mandatory, but not its name, one usually uses “arrow” abstractions for prenex arguments, or the ``(_ : term)`` syntax for inner arguments. In |SSR|, the latter can be replaced by the open syntax ``of term`` or (equivalently) ``& term``, which are both syntactically equivalent to a ``(_ : term)`` expression. This feature almost behaves as the following extension of the binder syntax: .. prodn:: binder += {| & @term | of @term } Caveat: ``& T`` and ``of T`` abbreviations have to appear at the end of a binder list. For instance, the usual two-constructor polymorphic type list, i.e. the one of the standard ``List`` library, can be defined by the following declaration: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Inductive list (A : Type) : Type := nil | cons of A & list A. Wildcards ~~~~~~~~~ The terms passed as arguments to |SSR| tactics can contain *holes*, materialized by wildcards ``_``. Since |SSR| allows a more powerful form of type inference for these arguments, it enhances the possibilities of using such wildcards. These holes are in particular used as a convenient shorthand for abstractions, especially in local definitions or type expressions. Wildcards may be interpreted as abstractions (see for example sections :ref:`definitions_ssr` and :ref:`structure_ssr`), or their content can be inferred from the whole context of the goal (see for example section :ref:`abbreviations_ssr`). .. _definitions_ssr: Definitions ~~~~~~~~~~~ .. tacn:: pose :name: pose (ssreflect) This tactic allows to add a defined constant to a proof context. |SSR| generalizes this tactic in several ways. In particular, the |SSR| pose tactic supports *open syntax*: the body of the definition does not need surrounding parentheses. For instance: .. coqdoc:: pose t := x + y. is a valid tactic expression. The pose tactic is also improved for the local definition of higher order terms. Local definitions of functions can use the same syntax as global ones. For example, the tactic :tacn:`pose ` supports parameters: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : True. pose f x y := x + y. The |SSR| pose tactic also supports (co)fixpoints, by providing the local counterpart of the ``Fixpoint f := …`` and ``CoFixpoint f := …`` constructs. For instance, the following tactic: .. coqdoc:: pose fix f (x y : nat) {struct x} : nat := if x is S p then S (f p y) else 0. defines a local fixpoint ``f``, which mimics the standard plus operation on natural numbers. Similarly, local cofixpoints can be defined by a tactic of the form: .. coqdoc:: pose cofix f (arg : T) := … . The possibility to include wildcards in the body of the definitions offers a smooth way of defining local abstractions. The type of “holes” is guessed by type inference, and the holes are abstracted. For instance the tactic: .. coqdoc:: pose f := _ + 1. is shorthand for: .. coqdoc:: pose f n := n + 1. When the local definition of a function involves both arguments and holes, hole abstractions appear first. For instance, the tactic: .. coqdoc:: pose f x := x + _. is shorthand for: .. coqdoc:: pose f n x := x + n. The interaction of the pose tactic with the interpretation of implicit arguments results in a powerful and concise syntax for local definitions involving dependent types. For instance, the tactic: .. coqdoc:: pose f x y := (x, y). adds to the context the local definition: .. coqdoc:: pose f (Tx Ty : Type) (x : Tx) (y : Ty) := (x, y). The generalization of wildcards makes the use of the pose tactic resemble ML-like definitions of polymorphic functions. .. _abbreviations_ssr: Abbreviations ~~~~~~~~~~~~~ .. tacn:: set @ident {? : @term } := {? @occ_switch } @term :name: set (ssreflect) The |SSR| ``set`` tactic performs abbreviations: it introduces a defined constant for a subterm appearing in the goal and/or in the context. |SSR| extends the :tacn:`set` tactic by supplying: + an open syntax, similarly to the :tacn:`pose ` tactic; + a more aggressive matching algorithm; + an improved interpretation of wildcards, taking advantage of the matching algorithm; + an improved occurrence selection mechanism allowing to abstract only selected occurrences of a term. .. prodn:: occ_switch ::= { {? {| + | - } } {* @natural } } where: + :token:`ident` is a fresh identifier chosen by the user. + term 1 is an optional type annotation. The type annotation term 1 can be given in open syntax (no surrounding parentheses). If no :token:`occ_switch` (described hereafter) is present, it is also the case for the second :token:`term`. On the other hand, in presence of :token:`occ_switch`, parentheses surrounding the second :token:`term` are mandatory. + In the occurrence switch :token:`occ_switch`, if the first element of the list is a natural, this element should be a number, and not an Ltac variable. The empty list ``{}`` is not interpreted as a valid occurrence switch, it is rather used as a flag to signal the intent of the user to clear the name following it (see :ref:`ssr_rewrite_occ_switch` and :ref:`introduction_ssr`) The tactic: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom f : nat -> nat. .. coqtop:: all Lemma test x : f x + f x = f x. set t := f _. .. coqtop:: all restart set t := {2}(f _). The type annotation may contain wildcards, which will be filled with the appropriate value by the matching process. The tactic first tries to find a subterm of the goal matching the second :token:`term` (and its type), and stops at the first subterm it finds. Then the occurrences of this subterm selected by the optional :token:`occ_switch` are replaced by :token:`ident` and a definition :n:`@ident := @term` is added to the context. If no :token:`occ_switch` is present, then all the occurrences are abstracted. Matching ```````` The matching algorithm compares a pattern :token:`term` with a subterm of the goal by comparing their heads and then pairwise unifying their arguments (modulo conversion). Head symbols match under the following conditions: + If the head of :token:`term` is a constant, then it should be syntactically equal to the head symbol of the subterm. + If this head is a projection of a canonical structure, then canonical structure equations are used for the matching. + If the head of term is *not* a constant, the subterm should have the same structure (λ abstraction, let…in structure …). + If the head of :token:`term` is a hole, the subterm should have at least as many arguments as :token:`term`. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test (x y z : nat) : x + y = z. set t := _ x. + In the special case where ``term`` is of the form ``(let f := t0 in f) t1 … tn`` , then the pattern ``term`` is treated as ``(_ t1 … tn)``. For each subterm in the goal having the form ``(A u1 … um)`` with m ≥ n, the matching algorithm successively tries to find the largest partial application ``(A u1 … uj)`` convertible to the head ``t0`` of ``term``. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : (let f x y z := x + y + z in f 1) 2 3 = 6. set t := (let g y z := S y + z in g) 2. The notation ``unkeyed`` defined in ``ssreflect.v`` is a shorthand for the degenerate term ``let x := … in x``. Moreover: + Multiple holes in ``term`` are treated as independent placeholders. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y z : x + y = z. set t := _ + _. + The type of the subterm matched should fit the type (possibly casted by some type annotations) of the pattern ``term``. + The replacement of the subterm found by the instantiated pattern should not capture variables. In the example above ``x`` is bound and should not be captured. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : forall x : nat, x + 1 = 0. Fail set t := _ + 1. + Typeclass inference should fill in any residual hole, but matching should never assign a value to a global existential variable. .. _occurrence_selection_ssr: Occurrence selection ```````````````````` |SSR| provides a generic syntax for the selection of occurrences by their position indexes. These *occurrence switches* are shared by all |SSR| tactics which require control on subterm selection like rewriting, generalization, … An *occurrence switch* can be: + A list natural numbers ``{+ n1 … nm}`` of occurrences affected by the tactic. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom f : nat -> nat. .. coqtop:: all Lemma test : f 2 + f 8 = f 2 + f 2. set x := {+1 3}(f 2). Notice that some occurrences of a given term may be hidden to the user, for example because of a notation. Setting the :flag:`Printing All` flag causes these hidden occurrences to be shown when the term is displayed. This setting should be used to find the correct coding of the occurrences to be selected [#1]_. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Notation "a < b":= (le (S a) b). Lemma test x y : x < y -> S x < S y. set t := S x. + A list of natural numbers between ``{n1 … nm}``. This is equivalent to the previous ``{+ n1 … nm}`` but the list should start with a number, and not with an Ltac variable. + A list ``{- n1 … nm}`` of occurrences *not* to be affected by the tactic. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom f : nat -> nat. .. coqtop:: all Lemma test : f 2 + f 8 = f 2 + f 2. set x := {-2}(f 2). Note that, in this goal, it behaves like ``set x := {1 3}(f 2).`` + In particular, the switch ``{+}`` selects *all* the occurrences. This switch is useful to turn off the default behavior of a tactic which automatically clears some assumptions (see section :ref:`discharge_ssr` for instance). + The switch ``{-}`` imposes that *no* occurrences of the term should be affected by the tactic. The tactic: ``set x := {-}(f 2).`` leaves the goal unchanged and adds the definition ``x := f 2`` to the context. This kind of tactic may be used to take advantage of the power of the matching algorithm in a local definition, instead of copying large terms by hand. It is important to remember that matching *precedes* occurrence selection. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y z : x + y = x + y + z. set a := {2}(_ + _). Hence, in the following goal, the same tactic fails since there is only one occurrence of the selected term. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y z : (x + y) + (z + z) = z + z. Fail set a := {2}(_ + _). .. _basic_localization_ssr: Basic localization ~~~~~~~~~~~~~~~~~~ It is possible to define an abbreviation for a term appearing in the context of a goal thanks to the ``in`` tactical. .. tacv:: set @ident := @term in {+ @ident} This variant of :tacn:`set ` introduces a defined constant called :token:`ident` in the context, and folds it in the context entries mentioned on the right hand side of ``in``. The body of :token:`ident` is the first subterm matching these context entries (taken in the given order). .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. .. coqtop:: all Lemma test x t (Hx : x = 3) : x + t = 4. set z := 3 in Hx. .. tacv:: set @ident := @term in {+ @ident} * This variant matches :token:`term` and then folds :token:`ident` similarly in all the given context entries but also folds :token:`ident` in the goal. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. .. coqtop:: all Lemma test x t (Hx : x = 3) : x + t = 4. set z := 3 in Hx * . Indeed, remember that 4 is just a notation for (S 3). The use of the ``in`` tactical is not limited to the localization of abbreviations: for a complete description of the in tactical, see section :ref:`bookkeeping_ssr` and :ref:`localization_ssr`. .. _basic_tactics_ssr: Basic tactics ------------- A sizable fraction of proof scripts consists of steps that do not "prove" anything new, but instead perform menial bookkeeping tasks such as selecting the names of constants and assumptions or splitting conjuncts. Although they are logically trivial, bookkeeping steps are extremely important because they define the structure of the data-flow of a proof script. This is especially true for reflection-based proofs, which often involve large numbers of constants and assumptions. Good bookkeeping consists in always explicitly declaring (i.e., naming) all new constants and assumptions in the script, and systematically pruning irrelevant constants and assumptions in the context. This is essential in the context of an interactive development environment (IDE), because it facilitates navigating the proof, allowing to instantly "jump back" to the point at which a questionable assumption was added, and to find relevant assumptions by browsing the pruned context. While novice or casual Coq users may find the automatic name selection feature convenient, the usage of such a feature severely undermines the readability and maintainability of proof scripts, much like automatic variable declaration in programming languages. The |SSR| tactics are therefore designed to support precise bookkeeping and to eliminate name generation heuristics. The bookkeeping features of |SSR| are implemented as tacticals (or pseudo-tacticals), shared across most |SSR| tactics, and thus form the foundation of the |SSR| proof language. .. _bookkeeping_ssr: Bookkeeping ~~~~~~~~~~~ During the course of a proof Coq always present the user with a *sequent* whose general form is:: ci : Ti … dj := ej : Tj … Fk : Pk … ================= forall (xl : Tl) …, let ym := bm in … in Pn -> … -> C The *goal* to be proved appears below the double line; above the line is the *context* of the sequent, a set of declarations of *constants* ``ci`` , *defined constants* ``dj`` , and *facts* ``Fk`` that can be used to prove the goal (usually, ``Ti`` , ``Tj : Type`` and ``Pk : Prop``). The various kinds of declarations can come in any order. The top part of the context consists of declarations produced by the Section commands ``Variable``, ``Let``, and ``Hypothesis``. This *section context* is never affected by the |SSR| tactics: they only operate on the lower part — the *proof context*. As in the figure above, the goal often decomposes into a series of (universally) quantified *variables* ``(xl : Tl)``, local *definitions* ``let ym := bm in``, and *assumptions* ``P n ->``, and a *conclusion* ``C`` (as in the context, variables, definitions, and assumptions can appear in any order). The conclusion is what actually needs to be proved — the rest of the goal can be seen as a part of the proof context that happens to be “below the line”. However, although they are logically equivalent, there are fundamental differences between constants and facts on the one hand, and variables and assumptions on the others. Constants and facts are *unordered*, but *named* explicitly in the proof text; variables and assumptions are *ordered*, but *unnamed*: the display names of variables may change at any time because of α-conversion. Similarly, basic deductive steps such as apply can only operate on the goal because the Gallina terms that control their action (e.g., the type of the lemma used by ``apply``) only provide unnamed bound variables. [#2]_ Since the proof script can only refer directly to the context, it must constantly shift declarations from the goal to the context and conversely in between deductive steps. In |SSR| these moves are performed by two *tacticals* ``=>`` and ``:``, so that the bookkeeping required by a deductive step can be directly associated with that step, and that tactics in an |SSR| script correspond to actual logical steps in the proof rather than merely shuffle facts. Still, some isolated bookkeeping is unavoidable, such as naming variables and assumptions at the beginning of a proof. |SSR| provides a specific ``move`` tactic for this purpose. Now ``move`` does essentially nothing: it is mostly a placeholder for ``=>`` and ``:``. The ``=>`` tactical moves variables, local definitions, and assumptions to the context, while the ``:`` tactical moves facts and constants to the goal. .. example:: For example, the proof of [#3]_ .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma subnK : forall m n, n <= m -> m - n + n = m. might start with .. coqtop:: all move=> m n le_n_m. where move does nothing, but ``=> m n le_m_n`` changes the variables and assumption of the goal in the constants ``m n : nat`` and the fact ``le_n_m : n <= m``, thus exposing the conclusion ``m - n + n = m``. The ``:`` tactical is the converse of ``=>``, indeed it removes facts and constants from the context by turning them into variables and assumptions. .. coqtop:: all move: m le_n_m. turns back ``m`` and ``le_m_n`` into a variable and an assumption, removing them from the proof context, and changing the goal to ``forall m, n <= m -> m - n + n = m`` which can be proved by induction on ``n`` using ``elim: n``. Because they are tacticals, ``:`` and ``=>`` can be combined, as in .. coqdoc:: move: m le_n_m => p le_n_p. simultaneously renames ``m`` and ``le_m_n`` into ``p`` and ``le_n_p``, respectively, by first turning them into unnamed variables, then turning these variables back into constants and facts. Furthermore, |SSR| redefines the basic Coq tactics ``case``, ``elim``, and ``apply`` so that they can take better advantage of ``:`` and ``=>``. In there |SSR| variants, these tactic operate on the first variable or constant of the goal and they do not use or change the proof context. The ``:`` tactical is used to operate on an element in the context. .. example:: For instance the proof of ``subnK`` could continue with ``elim: n``. Instead of ``elim n`` (note, no colon), this has the advantage of removing n from the context. Better yet, this ``elim`` can be combined with previous move and with the branching version of the ``=>`` tactical (described in :ref:`introduction_ssr`), to encapsulate the inductive step in a single command: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma subnK : forall m n, n <= m -> m - n + n = m. move=> m n le_n_m. elim: n m le_n_m => [|n IHn] m => [_ | lt_n_m]. which breaks down the proof into two subgoals, the second one having in its context ``lt_n_m : S n <= m`` and ``IHn : forall m, n <= m -> m - n + n = m``. The ``:`` and ``=>`` tacticals can be explained very simply if one views the goal as a stack of variables and assumptions piled on a conclusion: + ``tactic : a b c`` pushes the context constants ``a``, ``b``, ``c`` as goal variables *before* performing tactic. + ``tactic => a b c`` pops the top three goal variables as context constants ``a``, ``b``, ``c``, *after* tactic has been performed. These pushes and pops do not need to balance out as in the examples above, so ``move: m le_n_m => p`` would rename ``m`` into ``p``, but leave an extra assumption ``n <= p`` in the goal. Basic tactics like apply and elim can also be used without the ’:’ tactical: for example we can directly start a proof of ``subnK`` by induction on the top variable ``m`` with .. coqdoc:: elim=> [|m IHm] n le_n. The general form of the localization tactical in is also best explained in terms of the goal stack:: tactic in a H1 H2 *. is basically equivalent to .. coqdoc:: move: a H1 H2; tactic => a H1 H2. with two differences: the in tactical will preserve the body of an if a is a defined constant, and if the ``*`` is omitted it will use a temporary abbreviation to hide the statement of the goal from ``tactic``. The general form of the in tactical can be used directly with the ``move``, ``case`` and ``elim`` tactics, so that one can write .. coqdoc:: elim: n => [|n IHn] in m le_n_m *. instead of .. coqdoc:: elim: n m le_n_m => [|n IHn] m le_n_m. This is quite useful for inductive proofs that involve many facts. See section :ref:`localization_ssr` for the general syntax and presentation of the in tactical. .. _the_defective_tactics_ssr: The defective tactics ~~~~~~~~~~~~~~~~~~~~~ In this section we briefly present the three basic tactics performing context manipulations and the main backward chaining tool. The move tactic. ```````````````` .. tacn:: move :name: move (ssreflect) This tactic, in its defective form, behaves like the :tacn:`hnf` tactic. .. example:: .. coqtop:: reset all Require Import ssreflect. Goal not False. move. More precisely, the :tacn:`move ` tactic inspects the goal and does nothing (:tacn:`idtac`) if an introduction step is possible, i.e. if the goal is a product or a ``let … in``, and performs :tacn:`hnf` otherwise. Of course this tactic is most often used in combination with the bookkeeping tacticals (see section :ref:`introduction_ssr` and :ref:`discharge_ssr`). These combinations mostly subsume the :tacn:`intros`, :tacn:`generalize`, :tacn:`revert`, :tacn:`rename`, :tacn:`clear` and :tacn:`pattern` tactics. .. _the_case_tactic_ssr: The case tactic ``````````````` .. tacn:: case :name: case (ssreflect) This tactic performs *primitive case analysis* on (co)inductive types; specifically, it destructs the top variable or assumption of the goal, exposing its constructor(s) and its arguments, as well as setting the value of its type family indices if it belongs to a type family (see section :ref:`type_families_ssr`). The |SSR| case tactic has a special behavior on equalities. If the top assumption of the goal is an equality, the case tactic “destructs” it as a set of equalities between the constructor arguments of its left and right hand sides, as per the tactic injection. For example, ``case`` changes the goal:: (x, y) = (1, 2) -> G. into:: x = 1 -> y = 2 -> G. The :tacn:`case` can generate the following warning: .. warn:: SSReflect: cannot obtain new equations out of ... The tactic was run on an equation that cannot generate simpler equations, for example `x = 1`. The warning can be silenced or made fatal by using the :opt:`Warnings` option and the `spurious-ssr-injection` key. Finally the :tacn:`case` tactic of |SSR| performs :g:`False` elimination, even if no branch is generated by this case operation. Hence the tactic :tacn:`case` on a goal of the form :g:`False -> G` will succeed and prove the goal. The elim tactic ``````````````` .. tacn:: elim :name: elim (ssreflect) This tactic performs inductive elimination on inductive types. In its defective form, the tactic performs inductive elimination on a goal whose top assumption has an inductive type. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test m : forall n : nat, m <= n. elim. .. _apply_ssr: The apply tactic ```````````````` .. tacn:: apply {? @term } :name: apply (ssreflect) This is the main backward chaining tactic of the proof system. It takes as argument any :token:`term` and applies it to the goal. Assumptions in the type of :token:`term` that don’t directly match the goal may generate one or more subgoals. In its defective form, this tactic is a synonym for:: intro top; first [refine top | refine (top _) | refine (top _ _) | …]; clear top. where :g:`top` is a fresh name, and the sequence of :tacn:`refine` tactics tries to catch the appropriate number of wildcards to be inserted. Note that this use of the :tacn:`refine` tactic implies that the tactic tries to match the goal up to expansion of constants and evaluation of subterms. :tacn:`apply ` has a special behavior on goals containing existential metavariables of sort :g:`Prop`. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom lt_trans : forall a b c, a < b -> b < c -> a < c. .. coqtop:: all Lemma test : forall y, 1 < y -> y < 2 -> exists x : { n | n < 3 }, 0 < proj1_sig x. move=> y y_gt1 y_lt2; apply: (ex_intro _ (exist _ y _)). by apply: lt_trans y_lt2 _. by move=> y_lt3; apply: lt_trans y_gt1. Note that the last ``_`` of the tactic ``apply: (ex_intro _ (exist _ y _))`` represents a proof that ``y < 3``. Instead of generating the goal:: 0 < proj1_sig (exist (fun n : nat => n < 3) y ?Goal). the system tries to prove ``y < 3`` calling the trivial tactic. If it succeeds, let’s say because the context contains ``H : y < 3``, then the system generates the following goal:: 0 < proj1_sig (exist (fun n => n < 3) y H). Otherwise the missing proof is considered to be irrelevant, and is thus discharged generating the two goals shown above. Last, the user can replace the trivial tactic by defining an Ltac expression named ``ssrautoprop``. .. _discharge_ssr: Discharge ~~~~~~~~~ The general syntax of the discharging tactical ``:`` is: .. tacn:: @tactic {? @ident } : {+ @d_item } {? @clear_switch } :name: … : … (ssreflect) :undocumented: .. prodn:: d_item ::= {? {| @occ_switch | @clear_switch } } @term .. prodn:: clear_switch ::= { {+ @ident } } with the following requirements: + :token:`tactic` must be one of the four basic tactics described in :ref:`the_defective_tactics_ssr`, i.e., ``move``, ``case``, ``elim`` or ``apply``, the ``exact`` tactic (section :ref:`terminators_ssr`), the ``congr`` tactic (section :ref:`congruence_ssr`), or the application of the *view* tactical ‘/’ (section :ref:`interpreting_assumptions_ssr`) to one of move, case, or elim. + The optional :token:`ident` specifies *equation generation* (section :ref:`generation_of_equations_ssr`), and is only allowed if tactic is ``move``, ``case`` or ``elim``, or the application of the view tactical ‘/’ (section :ref:`interpreting_assumptions_ssr`) to ``case`` or ``elim``. + An :token:`occ_switch` selects occurrences of :token:`term`, as in :ref:`abbreviations_ssr`; :token:`occ_switch` is not allowed if :token:`tactic` is ``apply`` or ``exact``. + A clear item :token:`clear_switch` specifies facts and constants to be deleted from the proof context (as per the clear tactic). The ``:`` tactical first *discharges* all the :token:`d_item`, right to left, and then performs tactic, i.e., for each :token:`d_item`, starting with the last one : #. The |SSR| matching algorithm described in section :ref:`abbreviations_ssr` is used to find occurrences of term in the goal, after filling any holes ‘_’ in term; however if tactic is apply or exact a different matching algorithm, described below, is used [#4]_. #. These occurrences are replaced by a new variable; in particular, if term is a fact, this adds an assumption to the goal. #. If term is *exactly* the name of a constant or fact in the proof context, it is deleted from the context, unless there is an :token:`occ_switch`. Finally, tactic is performed just after the first :token:`d_item` has been generalized — that is, between steps 2 and 3. The names listed in the final :token:`clear_switch` (if it is present) are cleared first, before :token:`d_item` n is discharged. Switches affect the discharging of a :token:`d_item` as follows: + An :token:`occ_switch` restricts generalization (step 2) to a specific subset of the occurrences of term, as per section :ref:`abbreviations_ssr`, and prevents clearing (step 3). + All the names specified by a :token:`clear_switch` are deleted from the context in step 3, possibly in addition to term. For example, the tactic: .. coqdoc:: move: n {2}n (refl_equal n). + first generalizes ``(refl_equal n : n = n)``; + then generalizes the second occurrence of ``n``. + finally generalizes all the other occurrences of ``n``, and clears ``n`` from the proof context (assuming n is a proof constant). Therefore this tactic changes any goal ``G`` into .. coqdoc:: forall n n0 : nat, n = n0 -> G. where the name ``n0`` is picked by the Coq display function, and assuming ``n`` appeared only in ``G``. Finally, note that a discharge operation generalizes defined constants as variables, and not as local definitions. To override this behavior, prefix the name of the local definition with a ``@``, like in ``move: @n``. This is in contrast with the behavior of the in tactical (see section :ref:`localization_ssr`), which preserves local definitions by default. Clear rules ``````````` The clear step will fail if term is a proof constant that appears in other facts; in that case either the facts should be cleared explicitly with a :token:`clear_switch`, or the clear step should be disabled. The latter can be done by adding an :token:`occ_switch` or simply by putting parentheses around term: both ``move: (n).`` and ``move: {+}n.`` generalize ``n`` without clearing ``n`` from the proof context. The clear step will also fail if the :token:`clear_switch` contains a :token:`ident` that is not in the *proof* context. Note that |SSR| never clears a section constant. If tactic is ``move`` or ``case`` and an equation :token:`ident` is given, then clear (step 3) for :token:`d_item` is suppressed (see section :ref:`generation_of_equations_ssr`). Intro patterns (see section :ref:`introduction_ssr`) and the ``rewrite`` tactic (see section :ref:`rewriting_ssr`) let one place a :token:`clear_switch` in the middle of other items (namely identifiers, views and rewrite rules). This can trigger the addition of proof context items to the ones being explicitly cleared, and in turn this can result in clear errors (e.g. if the context item automatically added occurs in the goal). The relevant sections describe ways to avoid the unintended clear of context items. Matching for apply and exact ```````````````````````````` The matching algorithm for :token:`d_item` of the |SSR| ``apply`` and ``exact`` tactics exploits the type of the first :token:`d_item` to interpret wildcards in the other :token:`d_item` and to determine which occurrences of these should be generalized. Therefore, occur switches are not needed for apply and exact. Indeed, the |SSR| tactic ``apply: H x`` is equivalent to ``refine (@H _ … _ x); clear H x`` with an appropriate number of wildcards between ``H`` and ``x``. Note that this means that matching for ``apply`` and ``exact`` has much more context to interpret wildcards; in particular it can accommodate the ``_`` :token:`d_item`, which would always be rejected after ``move:``. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom f : nat -> nat. Axiom g : nat -> nat. .. coqtop:: all Lemma test (Hfg : forall x, f x = g x) a b : f a = g b. apply: trans_equal (Hfg _) _. This tactic is equivalent (see section :ref:`bookkeeping_ssr`) to: ``refine (trans_equal (Hfg _) _).`` and this is a common idiom for applying transitivity on the left hand side of an equation. .. _abstract_ssr: The abstract tactic ``````````````````` .. tacn:: abstract: {+ @d_item} :name: abstract (ssreflect) This tactic assigns an abstract constant previously introduced with the :n:`[: @ident ]` intro pattern (see section :ref:`introduction_ssr`). In a goal like the following:: m : nat abs : n : nat ============= m < 5 + n The tactic :g:`abstract: abs n` first generalizes the goal with respect to :g:`n` (that is not visible to the abstract constant abs) and then assigns abs. The resulting goal is:: m : nat n : nat ============= m < 5 + n Once this subgoal is closed, all other goals having abs in their context see the type assigned to ``abs``. In this case:: m : nat abs : forall n, m < 5 + n ============= … For a more detailed example the reader should refer to section :ref:`structure_ssr`. .. _introduction_ssr: Introduction in the context ~~~~~~~~~~~~~~~~~~~~~~~~~~~ The application of a tactic to a given goal can generate (quantified) variables, assumptions, or definitions, which the user may want to *introduce* as new facts, constants or defined constants, respectively. If the tactic splits the goal into several subgoals, each of them may require the introduction of different constants and facts. Furthermore it is very common to immediately decompose or rewrite with an assumption instead of adding it to the context, as the goal can often be simplified and even proved after this. All these operations are performed by the introduction tactical ``=>``, whose general syntax is .. tacn:: @tactic => {+ @i_item } :name: => :undocumented: .. prodn:: i_item ::= {| @i_pattern | @s_item | @clear_switch | @i_view | @i_block } .. prodn:: s_item ::= {| /= | // | //= } .. prodn:: i_view ::= {? %{%} } {| /@term | /ltac:( @tactic ) } .. prodn:: i_pattern ::= {| @ident | > | _ | ? | * | + | {? @occ_switch } {| -> | <- } | [ {?| @i_item } ] | - | [: {+ @ident } ] } .. prodn:: i_block ::= {| [^ @ident ] | [^~ {| @ident | @natural } ] } The ``=>`` tactical first executes :token:`tactic`, then the :token:`i_item`\s, left to right. An :token:`s_item` specifies a simplification operation; a :token:`clear_switch` specifies context pruning as in :ref:`discharge_ssr`. The :token:`i_pattern`\s can be seen as a variant of *intro patterns* (see :tacn:`intros`:) each performs an introduction operation, i.e., pops some variables or assumptions from the goal. Simplification items ````````````````````` An :token:`s_item` can simplify the set of subgoals or the subgoals themselves: + ``//`` removes all the “trivial” subgoals that can be resolved by the |SSR| tactic :tacn:`done` described in :ref:`terminators_ssr`, i.e., it executes ``try done``. + ``/=`` simplifies the goal by performing partial evaluation, as per the tactic :tacn:`simpl` [#5]_. + ``//=`` combines both kinds of simplification; it is equivalent to ``/= //``, i.e., ``simpl; try done``. When an :token:`s_item` immediately precedes a :token:`clear_switch`, then the :token:`clear_switch` is executed *after* the :token:`s_item`, e.g., ``{IHn}//`` will solve some subgoals, possibly using the fact ``IHn``, and will erase ``IHn`` from the context of the remaining subgoals. Views ````` The first entry in the :token:`i_view` grammar rule, :n:`/@term`, represents a view (see section :ref:`views_and_reflection_ssr`). It interprets the top of the stack with the view :token:`term`. It is equivalent to :n:`move/@term`. A :token:`clear_switch` that immediately precedes an :token:`i_view` is complemented with the name of the view if an only if the :token:`i_view` is a simple proof context entry [#10]_. E.g. ``{}/v`` is equivalent to ``/v{v}``. This behavior can be avoided by separating the :token:`clear_switch` from the :token:`i_view` with the ``-`` intro pattern or by putting parentheses around the view. A :token:`clear_switch` that immediately precedes an :token:`i_view` is executed after the view application. If the next :token:`i_item` is a view, then the view is applied to the assumption in top position once all the previous :token:`i_item` have been performed. The second entry in the :token:`i_view` grammar rule, ``/ltac:(`` :token:`tactic` ``)``, executes :token:`tactic`. Notations can be used to name tactics, for example .. coqtop:: none Tactic Notation "my" "ltac" "code" := idtac. .. coqtop:: in warn Notation "'myop'" := (ltac:(my ltac code)) : ssripat_scope. lets one write just ``/myop`` in the intro pattern. Note the scope annotation: views are interpreted opening the ``ssripat`` scope. We provide the following ltac views: ``/[dup]`` to duplicate the top of the stack, ``/[swap]`` to swap the two first elements and ``/[apply]`` to apply the top of the stack to the next. Intro patterns `````````````` |SSR| supports the following :token:`i_pattern`\s: :token:`ident` pops the top variable, assumption, or local definition into a new constant, fact, or defined constant :token:`ident`, respectively. Note that defined constants cannot be introduced when δ-expansion is required to expose the top variable or assumption. A :token:`clear_switch` (even an empty one) immediately preceding an :token:`ident` is complemented with that :token:`ident` if and only if the identifier is a simple proof context entry [#10]_. As a consequence by prefixing the :token:`ident` with ``{}`` one can *replace* a context entry. This behavior can be avoided by separating the :token:`clear_switch` from the :token:`ident` with the ``-`` intro pattern. ``>`` pops every variable occurring in the rest of the stack. Type class instances are popped even if they don't occur in the rest of the stack. The tactic ``move=> >`` is equivalent to ``move=> ? ?`` on a goal such as:: forall x y, x < y -> G A typical use if ``move=>> H`` to name ``H`` the first assumption, in the example above ``x < y``. ``?`` pops the top variable into an anonymous constant or fact, whose name is picked by the tactic interpreter. |SSR| only generates names that cannot appear later in the user script [#6]_. ``_`` pops the top variable into an anonymous constant that will be deleted from the proof context of all the subgoals produced by the ``=>`` tactical. They should thus never be displayed, except in an error message if the constant is still actually used in the goal or context after the last :token:`i_item` has been executed (:token:`s_item` can erase goals or terms where the constant appears). ``*`` pops all the remaining apparent variables/assumptions as anonymous constants/facts. Unlike ``?`` and ``move`` the ``*`` :token:`i_item` does not expand definitions in the goal to expose quantifiers, so it may be useful to repeat a ``move=> *`` tactic, e.g., on the goal:: forall a b : bool, a <> b a first ``move=> *`` adds only ``_a_ : bool`` and ``_b_ : bool`` to the context; it takes a second ``move=> *`` to add ``_Hyp_ : _a_ = _b_``. ``+`` temporarily introduces the top variable. It is discharged at the end of the intro pattern. For example ``move=> + y`` on a goal:: forall x y, P is equivalent to ``move=> _x_ y; move: _x_`` that results in the goal:: forall x, P :n:`{? occ_switch } ->` (resp. :token:`occ_switch` ``<-``) pops the top assumption (which should be a rewritable proposition) into an anonymous fact, rewrites (resp. rewrites right to left) the goal with this fact (using the |SSR| ``rewrite`` tactic described in section :ref:`rewriting_ssr`, and honoring the optional occurrence selector), and finally deletes the anonymous fact from the context. ``[`` :token:`i_item` * ``| … |`` :token:`i_item` * ``]`` when it is the very *first* :token:`i_pattern` after tactic ``=>`` tactical *and* tactic is not a move, is a *branching*:token:`i_pattern`. It executes the sequence :n:`@i_item__i` on the i-th subgoal produced by tactic. The execution of tactic should thus generate exactly m subgoals, unless the ``[…]`` :token:`i_pattern` comes after an initial ``//`` or ``//=`` :token:`s_item` that closes some of the goals produced by ``tactic``, in which case exactly m subgoals should remain after the :token:`s_item`, or we have the trivial branching :token:`i_pattern` [], which always does nothing, regardless of the number of remaining subgoals. ``[`` :token:`i_item` * ``| … |`` :token:`i_item` * ``]`` when it is *not* the first :token:`i_pattern` or when tactic is a ``move``, is a *destructing* :token:`i_pattern`. It starts by destructing the top variable, using the |SSR| ``case`` tactic described in :ref:`the_defective_tactics_ssr`. It then behaves as the corresponding branching :token:`i_pattern`, executing the sequence :n:`@i_item__i` in the i-th subgoal generated by the case analysis; unless we have the trivial destructing :token:`i_pattern` ``[]``, the latter should generate exactly m subgoals, i.e., the top variable should have an inductive type with exactly m constructors [#7]_. While it is good style to use the :token:`i_item` i * to pop the variables and assumptions corresponding to each constructor, this is not enforced by |SSR|. ``-`` does nothing, but counts as an intro pattern. It can also be used to force the interpretation of ``[`` :token:`i_item` * ``| … |`` :token:`i_item` * ``]`` as a case analysis like in ``move=> -[H1 H2]``. It can also be used to indicate explicitly the link between a view and a name like in ``move=> /eqP-H1``. Last, it can serve as a separator between views. Section :ref:`views_and_reflection_ssr` [#9]_ explains in which respect the tactic ``move=> /v1/v2`` differs from the tactic ``move=> /v1-/v2``. ``[:`` :token:`ident` ``…]`` introduces in the context an abstract constant for each :token:`ident`. Its type has to be fixed later on by using the ``abstract`` tactic. Before then the type displayed is ````. Note that |SSR| does not support the syntax ``(ipat, …, ipat)`` for destructing intro patterns. Clear switch ```````````` Clears are deferred until the end of the intro pattern. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y : Nat.leb 0 x = true -> (Nat.leb 0 x) && (Nat.leb y 2) = true. move=> {x} ->. If the cleared names are reused in the same intro pattern, a renaming is performed behind the scenes. Facts mentioned in a clear switch must be valid names in the proof context (excluding the section context). Branching and destructuring ``````````````````````````` The rules for interpreting branching and destructing :token:`i_pattern` are motivated by the fact that it would be pointless to have a branching pattern if tactic is a ``move``, and in most of the remaining cases tactic is ``case`` or ``elim``, which implies destruction. The rules above imply that: + ``move=> [a b].`` + ``case=> [a b].`` + ``case=> a b.`` are all equivalent, so which one to use is a matter of style; ``move`` should be used for casual decomposition, such as splitting a pair, and ``case`` should be used for actual decompositions, in particular for type families (see :ref:`type_families_ssr`) and proof by contradiction. The trivial branching :token:`i_pattern` can be used to force the branching interpretation, e.g.: + ``case=> [] [a b] c.`` + ``move=> [[a b] c].`` + ``case; case=> a b c.`` are all equivalent. Block introduction `````````````````` |SSR| supports the following :token:`i_block`\s: :n:`[^ @ident ]` *block destructing* :token:`i_pattern`. It performs a case analysis on the top variable and introduces, in one go, all the variables coming from the case analysis. The names of these variables are obtained by taking the names used in the inductive type declaration and prefixing them with :token:`ident`. If the intro pattern immediately follows a call to ``elim`` with a custom eliminator (see :ref:`custom_elim_ssr`) then the names are taken from the ones used in the type of the eliminator. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Record r := { a : nat; b := (a, 3); _ : bool; }. Lemma test : r -> True. Proof. move => [^ x ]. :n:`[^~ @ident ]` *block destructing* using :token:`ident` as a suffix. :n:`[^~ @natural ]` *block destructing* using :token:`natural` as a suffix. Only a :token:`s_item` is allowed between the elimination tactic and the block destructing. .. _generation_of_equations_ssr: Generation of equations ~~~~~~~~~~~~~~~~~~~~~~~ The generation of named equations option stores the definition of a new constant as an equation. The tactic: .. coqdoc:: move En: (size l) => n. where ``l`` is a list, replaces ``size l`` by ``n`` in the goal and adds the fact ``En : size l = n`` to the context. This is quite different from: .. coqdoc:: pose n := (size l). which generates a definition ``n := (size l)``. It is not possible to generalize or rewrite such a definition; on the other hand, it is automatically expanded during computation, whereas expanding the equation ``En`` requires explicit rewriting. The use of this equation name generation option with a ``case`` or an ``elim`` tactic changes the status of the first :token:`i_item`, in order to deal with the possible parameters of the constants introduced. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test (a b :nat) : a <> b. case E : a => [|n]. If the user does not provide a branching :token:`i_item` as first :token:`i_item`, or if the :token:`i_item` does not provide enough names for the arguments of a constructor, then the constants generated are introduced under fresh |SSR| names. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test (a b :nat) : a <> b. case E : a => H. Show 2. Combining the generation of named equations mechanism with the :tacn:`case` tactic strengthens the power of a case analysis. On the other hand, when combined with the :tacn:`elim` tactic, this feature is mostly useful for debug purposes, to trace the values of decomposed parameters and pinpoint failing branches. .. _type_families_ssr: Type families ~~~~~~~~~~~~~ When the top assumption of a goal has an inductive type, two specific operations are possible: the case analysis performed by the :tacn:`case` tactic, and the application of an induction principle, performed by the :tacn:`elim` tactic. When this top assumption has an inductive type, which is moreover an instance of a type family, Coq may need help from the user to specify which occurrences of the parameters of the type should be substituted. .. tacv:: case: {+ @d_item } / {+ @d_item } elim: {+ @d_item } / {+ @d_item } A specific ``/`` switch indicates the type family parameters of the type of a :token:`d_item` immediately following this ``/`` switch. The :token:`d_item` on the right side of the ``/`` switch are discharged as described in section :ref:`discharge_ssr`. The case analysis or elimination will be done on the type of the top assumption after these discharge operations. Every :token:`d_item` preceding the ``/`` is interpreted as arguments of this type, which should be an instance of an inductive type family. These terms are not actually generalized, but rather selected for substitution. Occurrence switches can be used to restrict the substitution. If a term is left completely implicit (e.g. writing just ``_``), then a pattern is inferred looking at the type of the top assumption. This allows for the compact syntax: .. coqdoc:: case: {2}_ / eqP. where ``_`` is interpreted as ``(_ == _)`` since ``eqP T a b : reflect (a = b) (a == b)`` and reflect is a type family with one index. Moreover if the :token:`d_item` list is too short, it is padded with an initial sequence of ``_`` of the right length. .. example:: Here is a small example on lists. We define first a function which adds an element at the end of a given list. .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Require Import List. Section LastCases. Variable A : Type. Implicit Type l : list A. Fixpoint add_last a l : list A := match l with | nil => a :: nil | hd :: tl => hd :: (add_last a tl) end. Then we define an inductive predicate for case analysis on lists according to their last element: .. coqtop:: all Inductive last_spec : list A -> Type := | LastSeq0 : last_spec nil | LastAdd s x : last_spec (add_last x s). Theorem lastP : forall l : list A, last_spec l. Admitted. We are now ready to use ``lastP`` in conjunction with ``case``. .. coqtop:: all Lemma test l : (length l) * 2 = length (l ++ l). case: (lastP l). Applied to the same goal, the tactc ``case: l / (lastP l)`` generates the same subgoals but ``l`` has been cleared from both contexts: .. coqtop:: all restart case: l / (lastP l). Again applied to the same goal: .. coqtop:: all restart abort case: {1 3}l / (lastP l). Note that selected occurrences on the left of the ``/`` switch have been substituted with l instead of being affected by the case analysis. The equation name generation feature combined with a type family ``/`` switch generates an equation for the *first* dependent :token:`d_item` specified by the user. Again starting with the above goal, the command: .. example:: .. coqtop:: all Lemma test l : (length l) * 2 = length (l ++ l). case E: {1 3}l / (lastP l) => [|s x]. Show 2. There must be at least one :token:`d_item` to the left of the ``/`` switch; this prevents any confusion with the view feature. However, the :token:`d_item` to the right of the ``/`` are optional, and if they are omitted the first assumption provides the instance of the type family. The equation always refers to the first :token:`d_item` in the actual tactic call, before any padding with initial ``_``. Thus, if an inductive type has two family parameters, it is possible to have |SSR| generate an equation for the second one by omitting the pattern for the first; note however that this will fail if the type of the second parameter depends on the value of the first parameter. Control flow ------------ .. _indentation_ssr: Indentation and bullets ~~~~~~~~~~~~~~~~~~~~~~~ A linear development of Coq scripts gives little information on the structure of the proof. In addition, replaying a proof after some changes in the statement to be proved will usually not display information to distinguish between the various branches of case analysis for instance. To help the user in this organization of the proof script at development time, |SSR| provides some bullets to highlight the structure of branching proofs. The available bullets are ``-``, ``+`` and ``*``. Combined with tabulation, this lets us highlight four nested levels of branching; the most we have ever needed is three. Indeed, the use of “simpl and closing” switches, of terminators (see above section :ref:`terminators_ssr`) and selectors (see section :ref:`selectors_ssr`) is powerful enough to avoid most of the time more than two levels of indentation. Here is a fragment of such a structured script:: case E1: (abezoutn _ _) => [[| k1] [| k2]]. - rewrite !muln0 !gexpn0 mulg1 => H1. move/eqP: (sym_equal F0); rewrite -H1 orderg1 eqn_mul1. by case/andP; move/eqP. - rewrite muln0 gexpn0 mulg1 => H1. have F1: t %| t * S k2.+1 - 1. apply: (@dvdn_trans (orderg x)); first by rewrite F0; exact: dvdn_mull. rewrite orderg_dvd; apply/eqP; apply: (mulgI x). rewrite -{1}(gexpn1 x) mulg1 gexpn_add leq_add_sub //. by move: P1; case t. rewrite dvdn_subr in F1; last by exact: dvdn_mulr. + rewrite H1 F0 -{2}(muln1 (p ^ l)); congr (_ * _). by apply/eqP; rewrite -dvdn1. + by move: P1; case: (t) => [| [| s1]]. - rewrite muln0 gexpn0 mul1g => H1. ... .. _terminators_ssr: Terminators ~~~~~~~~~~~ To further structure scripts, |SSR| supplies *terminating* tacticals to explicitly close off tactics. When replaying scripts, we then have the nice property that an error immediately occurs when a closed tactic fails to prove its subgoal. It is hence recommended practice that the proof of any subgoal should end with a tactic which *fails if it does not solve the current goal*, like :tacn:`discriminate`, :tacn:`contradiction` or :tacn:`assumption`. In fact, |SSR| provides a generic tactical which turns any tactic into a closing one (similar to :tacn:`now`). Its general syntax is: .. tacn:: by @tactic :name: by :undocumented: The Ltac expression :n:`by [@tactic | @tactic | …]` is equivalent to :n:`do [done | by @tactic | by @tactic | …]`, which corresponds to the standard Ltac expression :n:`first [done | @tactic; done | @tactic; done | …]`. In the script provided as example in section :ref:`indentation_ssr`, the paragraph corresponding to each sub-case ends with a tactic line prefixed with a ``by``, like in: .. coqdoc:: by apply/eqP; rewrite -dvdn1. .. tacn:: done :name: done The :tacn:`by` tactical is implemented using the user-defined, and extensible :tacn:`done` tactic. This :tacn:`done` tactic tries to solve the current goal by some trivial means and fails if it doesn’t succeed. Indeed, the tactic expression :n:`by @tactic` is equivalent to :n:`@tactic; done`. Conversely, the tactic ``by [ ]`` is equivalent to :tacn:`done`. The default implementation of the done tactic, in the ``ssreflect.v`` file, is: .. coqdoc:: Ltac done := trivial; hnf; intros; solve [ do ![solve [trivial | apply: sym_equal; trivial] | discriminate | contradiction | split] | case not_locked_false_eq_true; assumption | match goal with H : ~ _ |- _ => solve [case H; trivial] end ]. The lemma :g:`not_locked_false_eq_true` is needed to discriminate *locked* boolean predicates (see section :ref:`locking_ssr`). The iterator tactical do is presented in section :ref:`iteration_ssr`. This tactic can be customized by the user, for instance to include an :tacn:`auto` tactic. A natural and common way of closing a goal is to apply a lemma which is the exact one needed for the goal to be solved. The defective form of the tactic: .. coqdoc:: exact. is equivalent to: .. coqdoc:: do [done | by move=> top; apply top]. where ``top`` is a fresh name assigned to the top assumption of the goal. This applied form is supported by the ``:`` discharge tactical, and the tactic: .. coqdoc:: exact: MyLemma. is equivalent to: .. coqdoc:: by apply: MyLemma. (see section :ref:`discharge_ssr` for the documentation of the apply: combination). .. warning:: The list of tactics (possibly chained by semicolons) that follows the ``by`` keyword is considered to be a parenthesized block applied to the current goal. Hence for example if the tactic: .. coqdoc:: by rewrite my_lemma1. succeeds, then the tactic: .. coqdoc:: by rewrite my_lemma1; apply my_lemma2. usually fails since it is equivalent to: .. coqdoc:: by (rewrite my_lemma1; apply my_lemma2). .. _selectors_ssr: Selectors ~~~~~~~~~ .. tacn:: last first :name: last; first (ssreflect) When composing tactics, the two tacticals ``first`` and ``last`` let the user restrict the application of a tactic to only one of the subgoals generated by the previous tactic. This covers the frequent cases where a tactic generates two subgoals one of which can be easily disposed of. This is another powerful way of linearization of scripts, since it happens very often that a trivial subgoal can be solved in a less than one line tactic. For instance, :n:`@tactic ; last by @tactic` tries to solve the last subgoal generated by the first tactic using the given second tactic, and fails if it does not succeed. Its analogue :n:`@tactic ; first by @tactic` tries to solve the first subgoal generated by the first tactic using the second given tactic, and fails if it does not succeed. |SSR| also offers an extension of this facility, by supplying tactics to *permute* the subgoals generated by a tactic. .. tacv:: last first first last :name: last first; first last These two equivalent tactics invert the order of the subgoals in focus. .. tacv:: last @natural first If :token:`natural`\'s value is :math:`k`, this tactic rotates the :math:`n` subgoals :math:`G_1` , …, :math:`G_n` in focus. Subgoal :math:`G_{n + 1 − k}` becomes the first, and the circular order of subgoals remains unchanged. .. tacn:: first @natural last :name: first (ssreflect) If :token:`natural`\'s value is :math:`k`, this tactic rotates the :math:`n` subgoals :math:`G_1` , …, :math:`G_n` in focus. Subgoal :math:`G_{k + 1 \bmod n}` becomes the first, and the circular order of subgoals remains unchanged. Finally, the tactics ``last`` and ``first`` combine with the branching syntax of Ltac: if the tactic generates n subgoals on a given goal, then the tactic .. coqdoc:: tactic ; last k [ tactic1 |…| tacticm ] || tacticn. where natural denotes the integer :math:`k` as above, applies tactic1 to the :math:`n−k+1`\-th goal, … tacticm to the :math:`n−k+2`\-th goal and tacticn to the others. .. example:: Here is a small example on lists. We define first a function which adds an element at the end of a given list. .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Inductive test : nat -> Prop := | C1 n of n = 1 : test n | C2 n of n = 2 : test n | C3 n of n = 3 : test n | C4 n of n = 4 : test n. Lemma example n (t : test n) : True. case: t; last 2 [move=> k| move=> l]; idtac. .. _iteration_ssr: Iteration ~~~~~~~~~ .. tacn:: do {? @mult } {| @tactic | [ {+| @tactic } ] } :name: do (ssreflect) This tactical offers an accurate control on the repetition of tactics. :token:`mult` is a *multiplier*. Brackets can only be omitted if a single tactic is given *and* a multiplier is present. A tactic of the form: .. coqdoc:: do [ tactic 1 | … | tactic n ]. is equivalent to the standard Ltac expression: .. coqdoc:: first [ tactic 1 | … | tactic n ]. The optional multiplier :token:`mult` specifies how many times the action of tactic should be repeated on the current subgoal. There are four kinds of multipliers: .. prodn:: mult ::= {| @natural ! | ! | @natural ? | ? } Their meaning is: + ``n!`` the step tactic is repeated exactly n times (where n is a positive integer argument). + ``!`` the step tactic is repeated as many times as possible, and done at least once. + ``?`` the step tactic is repeated as many times as possible, optionally. + ``n?`` the step tactic is repeated up to n times, optionally. For instance, the tactic: .. coqdoc:: tactic; do 1? rewrite mult_comm. rewrites at most one time the lemma ``mult_comm`` in all the subgoals generated by tactic, whereas the tactic: .. coqdoc:: tactic; do 2! rewrite mult_comm. rewrites exactly two times the lemma ``mult_comm`` in all the subgoals generated by tactic, and fails if this rewrite is not possible in some subgoal. Note that the combination of multipliers and rewrite is so often used that multipliers are in fact integrated to the syntax of the |SSR| rewrite tactic, see section :ref:`rewriting_ssr`. .. _localization_ssr: Localization ~~~~~~~~~~~~ In sections :ref:`basic_localization_ssr` and :ref:`bookkeeping_ssr`, we have already presented the *localization* tactical in, whose general syntax is: .. tacn:: @tactic in {+ @ident} {? * } :name: in :undocumented: where :token:`ident` is a name in the context. On the left side of ``in``, :token:`tactic` can be ``move``, ``case``, ``elim``, ``rewrite``, ``set``, or any tactic formed with the general iteration tactical ``do`` (see section :ref:`iteration_ssr`). The operation described by tactic is performed in the facts listed after ``in`` and in the goal if a ``*`` ends the list of names. The ``in`` tactical successively: + generalizes the selected hypotheses, possibly “protecting” the goal if ``*`` is not present, + performs :token:`tactic`, on the obtained goal, + reintroduces the generalized facts, under the same names. This defective form of the ``do`` tactical is useful to avoid clashes between standard Ltac in and the |SSR| tactical in. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Ltac mytac H := rewrite H. Lemma test x y (H1 : x = y) (H2 : y = 3) : x + y = 6. do [mytac H2] in H1 *. the last tactic rewrites the hypothesis ``H2 : y = 3`` both in ``H1 : x = y`` and in the goal ``x + y = 6``. By default ``in`` keeps the body of local definitions. To erase the body of a local definition during the generalization phase, the name of the local definition must be written between parentheses, like in ``rewrite H in H1 (def_n) H2.`` .. tacv:: @tactic in {+ {| @clear_switch | {? @}@ident | ( @ident ) | ( {? @}@ident := @c_pattern ) } } {? * } This is the most general form of the ``in`` tactical. In its simplest form the last option lets one rename hypotheses that can’t be cleared (like section variables). For example, ``(y := x)`` generalizes over ``x`` and reintroduces the generalized variable under the name ``y`` (and does not clear ``x``). For a more precise description of this form of localization refer to :ref:`advanced_generalization_ssr`. .. _structure_ssr: Structure ~~~~~~~~~ Forward reasoning structures the script by explicitly specifying some assumptions to be added to the proof context. It is closely associated with the declarative style of proof, since an extensive use of these highlighted statements make the script closer to a (very detailed) textbook proof. Forward chaining tactics allow to state an intermediate lemma and start a piece of script dedicated to the proof of this statement. The use of closing tactics (see section :ref:`terminators_ssr`) and of indentation makes syntactically explicit the portion of the script building the proof of the intermediate statement. The have tactic. ```````````````` .. tacn:: have : @term :name: have This is the main |SSR| forward reasoning tactic. It can be used in two modes: one starts a new (sub)proof for an intermediate result in the main proof, and the other provides explicitly a proof term for this intermediate step. This tactic supports open syntax for :token:`term`. Applied to a goal ``G``, it generates a first subgoal requiring a proof of :token:`term` in the context of ``G``. The second generated subgoal is of the form :n:`term -> G`, where term becomes the new top assumption, instead of being introduced with a fresh name. At the proof-term level, the have tactic creates a β redex, and introduces the lemma under a fresh name, automatically chosen. Like in the case of the :n:`pose ` tactic (see section :ref:`definitions_ssr`), the types of the holes are abstracted in term. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : True. have: _ * 0 = 0. The invocation of ``have`` is equivalent to: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Lemma test : True. .. coqtop:: all have: forall n : nat, n * 0 = 0. The have tactic also enjoys the same abstraction mechanism as the ``pose`` tactic for the non-inferred implicit arguments. For instance, the tactic: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Lemma test : True. .. coqtop:: all have: forall x y, (x, y) = (x, y + 0). opens a new subgoal where the type of ``x`` is quantified. The behavior of the defective have tactic makes it possible to generalize it in the following general construction: .. tacn:: have {* @i_item } {? @i_pattern } {? {| @s_item | {+ @ssr_binder } } } {? : @term } {? {| := @term | by @tactic } } :undocumented: Open syntax is supported for both :token:`term`. For the description of :token:`i_item` and :token:`s_item` see section :ref:`introduction_ssr`. The first mode of the have tactic, which opens a sub-proof for an intermediate result, uses tactics of the form: .. tacv:: have @clear_switch @i_item : @term by @tactic :undocumented: which behave like: .. coqdoc:: have: term ; first by tactic. move=> clear_switch i_item. Note that the :token:`clear_switch` *precedes* the :token:`i_item`, which allows to reuse a name of the context, possibly used by the proof of the assumption, to introduce the new assumption itself. The ``by`` feature is especially convenient when the proof script of the statement is very short, basically when it fits in one line like in: .. coqdoc:: have H23 : 3 + 2 = 2 + 3 by rewrite addnC. The possibility of using :token:`i_item` supplies a very concise syntax for the further use of the intermediate step. For instance, .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test a : 3 * a - 1 = a. have -> : forall x, x * a = a. Note how the second goal was rewritten using the stated equality. Also note that in this last subgoal, the intermediate result does not appear in the context. Thanks to the deferred execution of clears, the following idiom is also supported (assuming x occurs in the goal only): .. coqdoc:: have {x} -> : x = y. Another frequent use of the intro patterns combined with ``have`` is the destruction of existential assumptions like in the tactic: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : True. have [x Px]: exists x : nat, x > 0; last first. An alternative use of the ``have`` tactic is to provide the explicit proof term for the intermediate lemma, using tactics of the form: .. tacv:: have {? @ident } := @term This tactic creates a new assumption of type the type of :token:`term`. If the optional :token:`ident` is present, this assumption is introduced under the name :token:`ident`. Note that the body of the constant is lost for the user. Again, non-inferred implicit arguments and explicit holes are abstracted. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : True. have H := forall x, (x, x) = (x, x). adds to the context ``H : Type -> Prop.`` This is a schematic example but the feature is specially useful when the proof term to give involves for instance a lemma with some hidden implicit arguments. After the :token:`i_pattern`, a list of binders is allowed. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. From Coq Require Import ZArith Lia. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test : True. have H x (y : nat) : 2 * x + y = x + x + y by lia. A proof term provided after ``:=`` can mention these bound variables (that are automatically introduced with the given names). Since the :token:`i_pattern` can be omitted, to avoid ambiguity, bound variables can be surrounded with parentheses even if no type is specified: .. coqtop:: all restart have (x) : 2 * x = x + x by lia. The :token:`i_item` and :token:`s_item` can be used to interpret the asserted hypothesis with views (see section :ref:`views_and_reflection_ssr`) or simplify the resulting goals. The :tacn:`have` tactic also supports a ``suff`` modifier which allows for asserting that a given statement implies the current goal without copying the goal itself. .. example:: .. coqtop:: all restart abort have suff H : 2 + 2 = 3; last first. Note that H is introduced in the second goal. The ``suff`` modifier is not compatible with the presence of a list of binders. .. _generating_let_ssr: Generating let in context entries with have ``````````````````````````````````````````` Since |SSR| 1.5 the :tacn:`have` tactic supports a “transparent” modifier to generate let in context entries: the ``@`` symbol in front of the context entry name. .. example:: .. coqtop:: none Set Printing Depth 15. .. coqtop:: all abort Inductive Ord n := Sub x of x < n. Notation "'I_ n" := (Ord n) (at level 8, n at level 2, format "''I_' n"). Arguments Sub {_} _ _. Lemma test n m (H : m + 1 < n) : True. have @i : 'I_n by apply: (Sub m); lia. Note that the subterm produced by :tacn:`lia` is in general huge and uninteresting, and hence one may want to hide it. For this purpose the ``[: name ]`` intro pattern and the tactic ``abstract`` (see :ref:`abstract_ssr`) are provided. .. example:: .. coqtop:: all abort Lemma test n m (H : m + 1 < n) : True. have [:pm] @i : 'I_n by apply: (Sub m); abstract: pm; lia. The type of ``pm`` can be cleaned up by its annotation ``(*1*)`` by just simplifying it. The annotations are there for technical reasons only. When intro patterns for abstract constants are used in conjunction with have and an explicit term, they must be used as follows: .. example:: .. coqtop:: all abort Lemma test n m (H : m + 1 < n) : True. have [:pm] @i : 'I_n := Sub m pm. by lia. In this case the abstract constant ``pm`` is assigned by using it in the term that follows ``:=`` and its corresponding goal is left to be solved. Goals corresponding to intro patterns for abstract constants are opened in the order in which the abstract constants are declared (not in the “order” in which they are used in the term). Note that abstract constants do respect scopes. Hence, if a variable is declared after their introduction, it has to be properly generalized (i.e. explicitly passed to the abstract constant when one makes use of it). .. example:: .. coqtop:: all abort Lemma test n m (H : m + 1 < n) : True. have [:pm] @i k : 'I_(n+k) by apply: (Sub m); abstract: pm k; lia. Last, notice that the use of intro patterns for abstract constants is orthogonal to the transparent flag ``@`` for have. The have tactic and typeclass resolution ``````````````````````````````````````````` Since |SSR| 1.5 the ``have`` tactic behaves as follows with respect to typeclass inference. .. coqtop:: none Axiom ty : Type. Axiom t : ty. Goal True. .. coqtop:: all have foo : ty. Full inference for ``ty``. The first subgoal demands a proof of such instantiated statement. .. A strange bug prevents using the coqtop directive here .. coqdoc:: have foo : ty := . No inference for ``ty``. Unresolved instances are quantified in ``ty``. The first subgoal demands a proof of such quantified statement. Note that no proof term follows ``:=``, hence two subgoals are generated. .. coqtop:: all restart have foo : ty := t. No inference for ``ty`` and ``t``. .. coqtop:: all restart abort have foo := t. No inference for ``t``. Unresolved instances are quantified in the (inferred) type of ``t`` and abstracted in ``t``. .. flag:: SsrHave NoTCResolution This :term:`flag` restores the behavior of |SSR| 1.4 and below (never resolve typeclasses). Variants: the suff and wlog tactics ``````````````````````````````````` As it is often the case in mathematical textbooks, forward reasoning may be used in slightly different variants. One of these variants is to show that the intermediate step L easily implies the initial goal G. By easily we mean here that the proof of L ⇒ G is shorter than the one of L itself. This kind of reasoning step usually starts with: “It suffices to show that …”. This is such a frequent way of reasoning that |SSR| has a variant of the ``have`` tactic called ``suffices`` (whose abridged name is ``suff``). The ``have`` and ``suff`` tactics are equivalent and have the same syntax but: + the order of the generated subgoals is inverted + the optional clear item is still performed in the *second* branch. This means that the tactic: .. coqdoc:: suff {H} H : forall x : nat, x >= 0. fails if the context of the current goal indeed contains an assumption named ``H``. The rationale of this clearing policy is to make possible “trivial” refinements of an assumption, without changing its name in the main branch of the reasoning. The ``have`` modifier can follow the ``suff`` tactic. .. example:: .. coqtop:: none Axioms G P : Prop. .. coqtop:: all abort Lemma test : G. suff have H : P. Note that, in contrast with ``have suff``, the name H has been introduced in the first goal. Another useful construct is reduction, showing that a particular case is in fact general enough to prove a general property. This kind of reasoning step usually starts with: “Without loss of generality, we can suppose that …”. Formally, this corresponds to the proof of a goal ``G`` by introducing a cut ``wlog_statement -> G``. Hence the user shall provide a proof for both ``(wlog_statement -> G) -> G`` and ``wlog_statement -> G``. However, such cuts are usually rather painful to perform by hand, because the statement ``wlog_statement`` is tedious to write by hand, and sometimes even to read. |SSR| implements this kind of reasoning step through the :tacn:`without loss` tactic, whose short name is :tacn:`wlog`. It offers support to describe the shape of the cut statements, by providing the simplifying hypothesis and by pointing at the elements of the initial goals which should be generalized. The general syntax of without loss is: .. tacn:: wlog {? suff } {? @clear_switch } {? @i_item } : {* @ident } / @term without loss {? suff } {? @clear_switch } {? @i_item } : {* @ident } / @term :name: wlog; without loss :undocumented: where each :token:`ident` is a constant in the context of the goal. Open syntax is supported for :token:`term`. In its defective form: .. tacv:: wlog: / @term without loss: / @term :undocumented: on a goal G, it creates two subgoals: a first one to prove the formula (term -> G) -> G and a second one to prove the formula term -> G. If the optional list of :token:`ident` is present on the left side of ``/``, these constants are generalized in the premise (term -> G) of the first subgoal. By default bodies of local definitions are erased. This behavior can be inhibited by prefixing the name of the local definition with the ``@`` character. In the second subgoal, the tactic: .. coqdoc:: move=> clear_switch i_item. is performed if at least one of these optional switches is present in the :tacn:`wlog` tactic. The :tacn:`wlog` tactic is specially useful when a symmetry argument simplifies a proof. Here is an example showing the beginning of the proof that quotient and reminder of natural number euclidean division are unique. .. example:: .. coqtop:: all Lemma quo_rem_unicity d q1 q2 r1 r2 : q1*d + r1 = q2*d + r2 -> r1 < d -> r2 < d -> (q1, r1) = (q2, r2). wlog: q1 q2 r1 r2 / q1 <= q2. by case (le_gt_dec q1 q2)=> H; last symmetry; eauto with arith. The ``wlog suff`` variant is simpler, since it cuts ``wlog_statement`` instead of ``wlog_statement -> G``. It thus opens the goals ``wlog_statement -> G`` and ``wlog_statement``. In its simplest form the ``generally have : …`` tactic is equivalent to ``wlog suff : …`` followed by last first. When the ``have`` tactic is used with the ``generally`` (or ``gen``) modifier it accepts an extra identifier followed by a comma before the usual intro pattern. The identifier will name the new hypothesis in its more general form, while the intro pattern will be used to process its instance. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrfun ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Axiom P : nat -> Prop. Axioms eqn leqn : nat -> nat -> bool. Declare Scope this_scope. Notation "a != b" := (eqn a b) (at level 70) : this_scope. Notation "a <= b" := (leqn a b) (at level 70) : this_scope. Open Scope this_scope. .. coqtop:: all Lemma simple n (ngt0 : 0 < n ) : P n. gen have ltnV, /andP[nge0 neq0] : n ngt0 / (0 <= n) && (n != 0); last first. .. _advanced_generalization_ssr: Advanced generalization +++++++++++++++++++++++ The complete syntax for the items on the left hand side of the ``/`` separator is the following one: .. tacv:: wlog … : {? {| @clear_switch | {? @}@ident | ( {? @}@ident := @c_pattern) } } / @term :undocumented: Clear operations are intertwined with generalization operations. This helps in particular avoiding dependency issues while generalizing some facts. If an :token:`ident` is prefixed with the ``@`` mark, then a let-in redex is created, which keeps track if its body (if any). The syntax :n:`(@ident := @c_pattern)` allows to generalize an arbitrary term using a given name. Note that its simplest form ``(x := y)`` is just a renaming of ``y`` into ``x``. In particular, this can be useful in order to simulate the generalization of a section variable, otherwise not allowed. Indeed renaming does not require the original variable to be cleared. The syntax ``(@x := y)`` generates a let-in abstraction but with the following caveat: ``x`` will not bind ``y``, but its body, whenever ``y`` can be unfolded. This covers the case of both local and global definitions, as illustrated in the following example. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Section Test. Variable x : nat. Definition addx z := z + x. Lemma test : x <= addx x. wlog H : (y := x) (@twoy := addx x) / twoy = 2 * y. To avoid unfolding the term captured by the pattern add x one can use the pattern ``id (addx x)``, that would produce the following first subgoal .. coqtop:: reset none From Coq Require Import ssreflect Lia. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Variable x : nat. Definition addx z := z + x. Lemma test : x <= addx x. .. coqtop:: all wlog H : (y := x) (@twoy := id (addx x)) / twoy = 2 * y. .. _rewriting_ssr: Rewriting --------- The generalized use of reflection implies that most of the intermediate results handled are properties of effectively computable functions. The most efficient mean of establishing such results are computation and simplification of expressions involving such functions, i.e., rewriting. |SSR| therefore includes an extended ``rewrite`` tactic, that unifies and combines most of the rewriting functionalities. An extended rewrite tactic ~~~~~~~~~~~~~~~~~~~~~~~~~~ The main features of the rewrite tactic are: + It can perform an entire series of such operations in any subset of the goal and/or context; + It allows to perform rewriting, simplifications, folding/unfolding of definitions, closing of goals; + Several rewriting operations can be chained in a single tactic; + Control over the occurrence at which rewriting is to be performed is significantly enhanced. The general form of an |SSR| rewrite tactic is: .. tacn:: rewrite {+ @rstep } :name: rewrite (ssreflect) :undocumented: The combination of a rewrite tactic with the ``in`` tactical (see section :ref:`localization_ssr`) performs rewriting in both the context and the goal. A rewrite step :token:`rstep` has the general form: .. prodn:: rstep ::= {? @r_prefix } @r_item .. prodn:: r_prefix ::= {? - } {? @mult } {? {| @occ_switch | @clear_switch } } {? [ @r_pattern ] } .. prodn:: r_pattern ::= {| @term | in {? @ident in } @term | {| @term in | @term as } @ident in @term } .. prodn:: r_item ::= {| {? / } @term | @s_item } An :token:`r_prefix` contains annotations to qualify where and how the rewrite operation should be performed: + The optional initial ``-`` indicates the direction of the rewriting of :token:`r_item`: if present the direction is right-to-left and it is left-to-right otherwise. + The multiplier :token:`mult` (see section :ref:`iteration_ssr`) specifies if and how the rewrite operation should be repeated. + A rewrite operation matches the occurrences of a *rewrite pattern*, and replaces these occurrences by another term, according to the given :token:`r_item`. The optional *redex switch* ``[r_pattern]``, which should always be surrounded by brackets, gives explicitly this rewrite pattern. In its simplest form, it is a regular term. If no explicit redex switch is present the rewrite pattern to be matched is inferred from the :token:`r_item`. + This optional term, or the :token:`r_item`, may be preceded by an :token:`occ_switch` (see section :ref:`selectors_ssr`) or a :token:`clear_switch` (see section :ref:`discharge_ssr`), these two possibilities being exclusive. An occurrence switch selects the occurrences of the rewrite pattern which should be affected by the rewrite operation. A clear switch, even an empty one, is performed *after* the :token:`r_item` is actually processed and is complemented with the name of the rewrite rule if an only if it is a simple proof context entry [#10]_. As a consequence one can write ``rewrite {}H`` to rewrite with ``H`` and dispose ``H`` immediately afterwards. This behavior can be avoided by putting parentheses around the rewrite rule. An :token:`r_item` can be: + A *simplification* :token:`r_item`, represented by a :token:`s_item` (see section :ref:`introduction_ssr`). Simplification operations are intertwined with the possible other rewrite operations specified by the list of :token:`r_item`. + A *folding/unfolding* :token:`r_item`. The tactic: ``rewrite /term`` unfolds the head constant of term in every occurrence of the first matching of term in the goal. In particular, if ``my_def`` is a (local or global) defined constant, the tactic: ``rewrite /my_def.`` is analogous to: ``unfold my_def``. Conversely: ``rewrite -/my_def.`` is equivalent to: ``fold my_def``. When an unfold :token:`r_item` is combined with a redex pattern, a conversion operation is performed. A tactic of the form: ``rewrite -[term1]/term2.`` is equivalent to: ``change term1 with term2.`` If ``term2`` is a single constant and ``term1`` head symbol is not ``term2``, then the head symbol of ``term1`` is repeatedly unfolded until ``term2`` appears. + A :token:`term`, which can be: + A term whose type has the form: ``forall (x1 : A1 )…(xn : An ), eq term1 term2`` where ``eq`` is the Leibniz equality or a registered setoid equality. + A list of terms ``(t1 ,…,tn)``, each ``ti`` having a type above. The tactic: ``rewrite r_prefix (t1 ,…,tn ).`` is equivalent to: ``do [rewrite r_prefix t1 | … | rewrite r_prefix tn ].`` + An anonymous rewrite lemma ``(_ : term)``, where term has a type as above. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all abort Definition double x := x + x. Definition ddouble x := double (double x). Lemma test x : ddouble x = 4 * x. rewrite [ddouble _]/double. .. warning:: The |SSR| terms containing holes are *not* typed as abstractions in this context. Hence the following script fails. .. coqtop:: all Definition f := fun x y => x + y. Lemma test x y : x + y = f y x. .. coqtop:: all fail rewrite -[f y]/(y + _). but the following script succeeds .. coqtop:: all rewrite -[f y x]/(y + _). .. flag:: SsrOldRewriteGoalsOrder Controls the order in which generated subgoals (side conditions) are added to the proof context. The :term:`flag` is off by default, which puts subgoals generated by conditional rules first, followed by the main goal. When it is on, the main goal appears first. If your proofs are organized to complete proving the main goal before side conditions, turning the flag on will save you from having to add :tacn:`last first` tactics that would be needed to keep the main goal as the currently focused goal. Remarks and examples ~~~~~~~~~~~~~~~~~~~~ Rewrite redex selection ``````````````````````` The general strategy of |SSR| is to grasp as many redexes as possible and to let the user select the ones to be rewritten thanks to the improved syntax for the control of rewriting. This may be a source of incompatibilities between the two rewrite tactics. In a rewrite tactic of the form: .. coqdoc:: rewrite occ_switch [term1]term2. ``term1`` is the explicit rewrite redex and ``term2`` is the rewrite rule. This execution of this tactic unfolds as follows: + First ``term1`` and ``term2`` are βι normalized. Then ``term2`` is put in head normal form if the Leibniz equality constructor ``eq`` is not the head symbol. This may involve ζ reductions. + Then, the matching algorithm (see section :ref:`abbreviations_ssr`) determines the first subterm of the goal matching the rewrite pattern. The rewrite pattern is given by ``term1``, if an explicit redex pattern switch is provided, or by the type of ``term2`` otherwise. However, matching skips over matches that would lead to trivial rewrites. All the occurrences of this subterm in the goal are candidates for rewriting. + Then only the occurrences coded by :token:`occ_switch` (see again section :ref:`abbreviations_ssr`) are finally selected for rewriting. + The left hand side of ``term2`` is unified with the subterm found by the matching algorithm, and if this succeeds, all the selected occurrences in the goal are replaced by the right hand side of ``term2``. + Finally the goal is βι normalized. In the case ``term2`` is a list of terms, the first top-down (in the goal) left-to-right (in the list) matching rule gets selected. Chained rewrite steps ````````````````````` The possibility to chain rewrite operations in a single tactic makes scripts more compact and gathers in a single command line a bunch of surgical operations which would be described by a one sentence in a pen and paper proof. Performing rewrite and simplification operations in a single tactic enhances significantly the concision of scripts. For instance the tactic: .. coqdoc:: rewrite /my_def {2}[f _]/= my_eq //=. unfolds ``my_def`` in the goal, simplifies the second occurrence of the first subterm matching pattern ``[f _]``, rewrites ``my_eq``, simplifies the goals and closes trivial goals. Here are some concrete examples of chained rewrite operations, in the proof of basic results on natural numbers arithmetic. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Axiom addn0 : forall m, m + 0 = m. Axiom addnS : forall m n, m + S n = S (m + n). Axiom addSnnS : forall m n, S m + n = m + S n. Lemma addnCA m n p : m + (n + p) = n + (m + p). by elim: m p => [ | m Hrec] p; rewrite ?addSnnS -?addnS. Qed. Lemma addnC n m : m + n = n + m. by rewrite -{1}[n]addn0 addnCA addn0. Qed. Note the use of the ``?`` switch for parallel rewrite operations in the proof of ``addnCA``. Explicit redex switches are matched first ````````````````````````````````````````` If an :token:`r_prefix` involves a *redex switch*, the first step is to find a subterm matching this redex pattern, independently from the left hand side of the equality the user wants to rewrite. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test (H : forall t u, t + u = u + t) x y : x + y = y + x. rewrite [y + _]H. Note that if this first pattern matching is not compatible with the :token:`r_item`, the rewrite fails, even if the goal contains a correct redex matching both the redex switch and the left hand side of the equality. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test (H : forall t u, t + u * 0 = t) x y : x + y * 4 + 2 * 0 = x + 2 * 0. Fail rewrite [x + _]H. Indeed the left hand side of ``H`` does not match the redex identified by the pattern ``x + y * 4``. .. _ssr_rewrite_occ_switch: Occurrence switches and redex switches `````````````````````````````````````` .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y : x + y + 0 = x + y + y + 0 + 0 + (x + y + 0). rewrite {2}[_ + y + 0](_: forall z, z + 0 = z). The second subgoal is generated by the use of an anonymous lemma in the rewrite tactic. The effect of the tactic on the initial goal is to rewrite this lemma at the second occurrence of the first matching ``x + y + 0`` of the explicit rewrite redex ``_ + y + 0``. Occurrence selection and repetition ``````````````````````````````````` Occurrence selection has priority over repetition switches. This means the repetition of a rewrite tactic specified by a multiplier will perform matching each time an elementary rewrite operation is performed. Repeated rewrite tactics apply to every subgoal generated by the previous tactic, including the previous instances of the repetition. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: all Lemma test x y (z : nat) : x + 1 = x + y + 1. rewrite 2!(_ : _ + 1 = z). This last tactic generates *three* subgoals because the second rewrite operation specified with the ``2!`` multiplier applies to the two subgoals generated by the first rewrite. Multi-rule rewriting ```````````````````` The rewrite tactic can be provided a *tuple* of rewrite rules, or more generally a tree of such rules, since this tuple can feature arbitrary inner parentheses. We call *multirule* such a generalized rewrite rule. This feature is of special interest when it is combined with multiplier switches, which makes the rewrite tactic iterate the rewrite operations prescribed by the rules on the current goal. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all abort Variables (a b c : nat). Hypothesis eqab : a = b. Hypothesis eqac : a = c. Lemma test : a = a. rewrite (eqab, eqac). Indeed rule ``eqab`` is the first to apply among the ones gathered in the tuple passed to the rewrite tactic. This multirule ``(eqab, eqac)`` is actually a Coq term and we can name it with a definition: .. coqtop:: all Definition multi1 := (eqab, eqac). In this case, the tactic ``rewrite multi1`` is a synonym for ``rewrite (eqab, eqac)``. More precisely, a multirule rewrites the first subterm to which one of the rules applies in a left-to-right traversal of the goal, with the first rule from the multirule tree in left-to-right order. Matching is performed according to the algorithm described in Section :ref:`abbreviations_ssr`, but literal matches have priority. .. example:: .. coqtop:: all abort Definition d := a. Hypotheses eqd0 : d = 0. Definition multi2 := (eqab, eqd0). Lemma test : d = b. rewrite multi2. Indeed rule ``eqd0`` applies without unfolding the definition of ``d``. For repeated rewrites the selection process is repeated anew. .. example:: .. coqtop:: all abort Hypothesis eq_adda_b : forall x, x + a = b. Hypothesis eq_adda_c : forall x, x + a = c. Hypothesis eqb0 : b = 0. Definition multi3 := (eq_adda_b, eq_adda_c, eqb0). Lemma test : 1 + a = 12 + a. rewrite 2!multi3. It uses ``eq_adda_b`` then ``eqb0`` on the left-hand side only. Without the bound ``2`` one would obtain ``0 = 0``. The grouping of rules inside a multirule does not affect the selection strategy but can make it easier to include one rule set in another or to (universally) quantify over the parameters of a subset of rules (as there is special code that will omit unnecessary quantifiers for rules that can be syntactically extracted). It is also possible to reverse the direction of a rule subset, using a special dedicated syntax: the tactic rewrite ``(=~ multi1)`` is equivalent to ``rewrite multi1_rev``. .. example:: .. coqtop:: all Hypothesis eqba : b = a. Hypothesis eqca : c = a. Definition multi1_rev := (eqba, eqca). except that the constants ``eqba``, ``eqab``, ``mult1_rev`` have not been created. Rewriting with multirules is useful to implement simplification or transformation procedures, to be applied on terms of small to medium size. For instance the library `ssrnat` (Mathematical Components library) provides two implementations for arithmetic operations on natural numbers: an elementary one and a tail recursive version, less inefficient but also less convenient for reasoning purposes. The library also provides one lemma per such operation, stating that both versions return the same values when applied to the same arguments: .. coqdoc:: Lemma addE : add =2 addn. Lemma doubleE : double =1 doublen. Lemma add_mulE n m s : add_mul n m s = addn (muln n m) s. Lemma mulE : mul =2 muln. Lemma mul_expE m n p : mul_exp m n p = muln (expn m n) p. Lemma expE : exp =2 expn. Lemma oddE : odd =1 oddn. The operation on the left hand side of each lemma is the efficient version, and the corresponding naive implementation is on the right hand side. In order to reason conveniently on expressions involving the efficient operations, we gather all these rules in the definition ``trecE``: .. coqdoc:: Definition trecE := (addE, (doubleE, oddE), (mulE, add_mulE, (expE, mul_expE))). The tactic: ``rewrite !trecE.`` restores the naive versions of each operation in a goal involving the efficient ones, e.g. for the purpose of a correctness proof. Wildcards vs abstractions ````````````````````````` The rewrite tactic supports :token:`r_item`\s containing holes. For example, in the tactic ``rewrite (_ : _ * 0 = 0).`` the term ``_ * 0 = 0`` is interpreted as ``forall n : nat, n * 0 = 0.`` Anyway this tactic is *not* equivalent to ``rewrite (_ : forall x, x * 0 = 0).``. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test y z : y * 0 + y * (z * 0) = 0. rewrite (_ : _ * 0 = 0). while the other tactic results in .. coqtop:: all restart abort rewrite (_ : forall x, x * 0 = 0). The first tactic requires you to prove the instance of the (missing) lemma that was used, while the latter requires you prove the quantified form. When |SSR| rewrite fails on standard Coq licit rewrite ```````````````````````````````````````````````````````` In a few cases, the |SSR| rewrite tactic fails rewriting some redexes which standard Coq successfully rewrites. There are two main cases: + |SSR| never accepts to rewrite indeterminate patterns like: .. coqdoc:: Lemma foo (x : unit) : x = tt. |SSR| will however accept the ηζ expansion of this rule: .. coqdoc:: Lemma fubar (x : unit) : (let u := x in u) = tt. + The standard rewrite tactic provided by Coq uses a different algorithm to find instances of the rewrite rule. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variable g : nat -> nat. Definition f := g. Axiom H : forall x, g x = 0. Lemma test : f 3 + f 3 = f 6. (* we call the standard rewrite tactic here *) rewrite -> H. This rewriting is not possible in |SSR| because there is no occurrence of the head symbol ``f`` of the rewrite rule in the goal. .. coqtop:: all restart fail rewrite H. Rewriting with ``H`` first requires unfolding the occurrences of ``f`` where the substitution is to be performed (here there is a single such occurrence), using tactic ``rewrite /f`` (for a global replacement of f by g) or ``rewrite pattern/f``, for a finer selection. .. coqtop:: all restart rewrite /f H. alternatively one can override the pattern inferred from ``H`` .. coqtop:: all restart rewrite [f _]H. Existential metavariables and rewriting ``````````````````````````````````````` The rewrite tactic will not instantiate existing existential metavariables when matching a redex pattern. If a rewrite rule generates a goal with new existential metavariables in the ``Prop`` sort, these will be generalized as for ``apply`` (see :ref:`apply_ssr`) and corresponding new goals will be generated. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrfun ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Set Warnings "-notation-overridden". .. coqtop:: all abort Axiom leq : nat -> nat -> bool. Notation "m <= n" := (leq m n) : nat_scope. Notation "m < n" := (S m <= n) : nat_scope. Inductive Ord n := Sub x of x < n. Notation "'I_ n" := (Ord n) (at level 8, n at level 2, format "''I_' n"). Arguments Sub {_} _ _. Definition val n (i : 'I_n) := let: Sub a _ := i in a. Definition insub n x := if @idP (x < n) is ReflectT _ Px then Some (Sub x Px) else None. Axiom insubT : forall n x Px, insub n x = Some (Sub x Px). Lemma test (x : 'I_2) y : Some x = insub 2 y. rewrite insubT. Since the argument corresponding to Px is not supplied by the user, the resulting goal should be ``Some x = Some (Sub y ?Goal).`` Instead, |SSR| ``rewrite`` tactic hides the existential variable. As in :ref:`apply_ssr`, the ``ssrautoprop`` tactic is used to try to solve the existential variable. .. coqtop:: all abort Lemma test (x : 'I_2) y (H : y < 2) : Some x = insub 2 y. rewrite insubT. As a temporary limitation, this behavior is available only if the rewriting rule is stated using Leibniz equality (as opposed to setoid relations). It will be extended to other rewriting relations in the future. .. _under_ssr: Rewriting under binders ~~~~~~~~~~~~~~~~~~~~~~~ Goals involving objects defined with higher-order functions often require "rewriting under binders". While setoid rewriting is a possible approach in this case, it is common to use regular rewriting along with dedicated extensionality lemmas. This may cause some practical issues during the development of the corresponding scripts, notably as we might be forced to provide the rewrite tactic with complete terms, as shown by the simple example below. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. .. coqtop:: in Axiom subnn : forall n : nat, n - n = 0. Parameter map : (nat -> nat) -> list nat -> list nat. Parameter sumlist : list nat -> nat. Axiom eq_map : forall F1 F2 : nat -> nat, (forall n : nat, F1 n = F2 n) -> forall l : list nat, map F1 l = map F2 l. .. coqtop:: all Lemma example_map l : sumlist (map (fun m => m - m) l) = 0. In this context, one cannot directly use ``eq_map``: .. coqtop:: all fail rewrite eq_map. as we need to explicitly provide the non-inferable argument ``F2``, which corresponds here to the term we want to obtain *after* the rewriting step. In order to perform the rewrite step one has to provide the term by hand as follows: .. coqtop:: all abort rewrite (@eq_map _ (fun _ : nat => 0)). by move=> m; rewrite subnn. The :tacn:`under` tactic lets one perform the same operation in a more convenient way: .. coqtop:: all abort Lemma example_map l : sumlist (map (fun m => m - m) l) = 0. under eq_map => m do rewrite subnn. The under tactic ```````````````` The convenience :tacn:`under` tactic supports the following syntax: .. tacn:: under {? @r_prefix } @term {? => {+ @i_item}} {? do {| @tactic | [ {*| @tactic } ] } } :name: under Operate under the context proved to be extensional by lemma :token:`term`. .. exn:: Incorrect number of tactics (expected N tactics, was given M). This error can occur when using the version with a ``do`` clause. The multiplier part of :token:`r_prefix` is not supported. We distinguish two modes, :ref:`interactive mode ` without a ``do`` clause, and :ref:`one-liner mode ` with a ``do`` clause, which are explained in more detail below. .. _under_interactive: Interactive mode ```````````````` Let us redo the running example in interactive mode. .. example:: .. coqtop:: all abort Lemma example_map l : sumlist (map (fun m => m - m) l) = 0. under eq_map => m. rewrite subnn. over. The execution of the Ltac expression: :n:`under @term => [ @i_item__1 | … | @i_item__n ].` involves the following steps: 1. It performs a :n:`rewrite @term` without failing like in the first example with ``rewrite eq_map.``, but creating evars (see :tacn:`evar`). If :n:`term` is prefixed by a pattern or an occurrence selector, then the modifiers are honoured. 2. As a n-branches intro pattern is provided :tacn:`under` checks that n+1 subgoals have been created. The last one is the main subgoal, while the other ones correspond to premises of the rewrite rule (such as ``forall n, F1 n = F2 n`` for ``eq_map``). 3. If so :tacn:`under` puts these n goals in head normal form (using the defective form of the tactic :tacn:`move `), then executes the corresponding intro pattern :n:`@i_pattern__i` in each goal. 4. Then :tacn:`under` checks that the first n subgoals are (quantified) Leibniz equalities, double implications or registered relations (w.r.t. Class ``RewriteRelation``) between a term and an evar, e.g. ``m - m = ?F2 m`` in the running example. (This support for setoid-like relations is enabled as soon as we do both ``Require Import ssreflect.`` and ``Require Setoid.``) 5. If so :tacn:`under` protects these n goals against an accidental instantiation of the evar. These protected goals are displayed using the ``'Under[ … ]`` notation (e.g. ``'Under[ m - m ]`` in the running example). 6. The expression inside the ``'Under[ … ]`` notation can be proved equivalent to the desired expression by using a regular :tacn:`rewrite` tactic. 7. Interactive editing of the first n goals has to be signalled by using the :tacn:`over` tactic or rewrite rule (see below), which requires that the underlying relation is reflexive. (The running example deals with Leibniz equality, but ``PreOrder`` relations are also supported, for example.) 8. Finally, a post-processing step is performed in the main goal to keep the name(s) for the bound variables chosen by the user in the intro pattern for the first branch. .. _over_ssr: The over tactic +++++++++++++++ Two equivalent facilities (a terminator and a lemma) are provided to close intermediate subgoals generated by :tacn:`under` (i.e. goals displayed as ``'Under[ … ]``): .. tacn:: over :name: over This terminator tactic allows one to close goals of the form ``'Under[ … ]``. .. tacv:: by rewrite over This is a variant of :tacn:`over` in order to close ``'Under[ … ]`` goals, relying on the ``over`` rewrite rule. Note that a rewrite rule ``UnderE`` is available as well, if one wants to "unprotect" the evar, without closing the goal automatically (e.g., to instantiate it manually with another rule than reflexivity). .. _under_one_liner: One-liner mode `````````````` The Ltac expression: :n:`under @term => [ @i_item__1 | … | @i_item__n ] do [ @tactic__1 | … | @tactic__n ].` can be seen as a shorter form for the following expression: :n:`(under @term) => [ @i_item__1 | … | @i_item__n | ]; [ @tactic__1; over | … | @tactic__n; over | cbv beta iota ].` Notes: + The ``beta-iota`` reduction here is useful to get rid of the beta redexes that could be introduced after the substitution of the evars by the :tacn:`under` tactic. + Note that the provided tactics can as well involve other :tacn:`under` tactics. See below for a typical example involving the `bigop` theory from the Mathematical Components library. + If there is only one tactic, the brackets can be omitted, e.g.: :n:`under @term => i do @tactic.` and that shorter form should be preferred. + If the ``do`` clause is provided and the intro pattern is omitted, then the default :token:`i_item` ``*`` is applied to each branch. E.g., the Ltac expression: :n:`under @term do [ @tactic__1 | … | @tactic__n ]` is equivalent to: :n:`under @term => [ * | … | * ] do [ @tactic__1 | … | @tactic__n ]` (and it can be noted here that the :tacn:`under` tactic performs a ``move.`` before processing the intro patterns ``=> [ * | … | * ]``). .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Coercion is_true : bool >-> Sortclass. Reserved Notation "\big [ op / idx ]_ ( m <= i < n | P ) F" (at level 36, F at level 36, op, idx at level 10, m, i, n at level 50, format "'[' \big [ op / idx ]_ ( m <= i < n | P ) F ']'"). Variant bigbody (R I : Type) : Type := BigBody : forall (_ : I) (_ : forall (_ : R) (_ : R), R) (_ : bool) (_ : R), bigbody R I. Parameter bigop : forall (R I : Type) (_ : R) (_ : list I) (_ : forall _ : I, bigbody R I), R. Axiom eq_bigr_ : forall (R : Type) (idx : R) (op : forall (_ : R) (_ : R), R) (I : Type) (r : list I) (P : I -> bool) (F1 F2 : I -> R), (forall x : I, is_true (P x) -> F1 x = F2 x) -> bigop idx r (fun i : I => BigBody i op (P i) (F1 i)) = bigop idx r (fun i : I => BigBody i op (P i) (F2 i)). Axiom eq_big_ : forall (R : Type) (idx : R) (op : R -> R -> R) (I : Type) (r : list I) (P1 P2 : I -> bool) (F1 F2 : I -> R), (forall x : I, P1 x = P2 x) -> (forall i : I, is_true (P1 i) -> F1 i = F2 i) -> bigop idx r (fun i : I => BigBody i op (P1 i) (F1 i)) = bigop idx r (fun i : I => BigBody i op (P2 i) (F2 i)). Reserved Notation "\sum_ ( m <= i < n | P ) F" (at level 41, F at level 41, i, m, n at level 50, format "'[' \sum_ ( m <= i < n | P ) '/ ' F ']'"). Parameter index_iota : nat -> nat -> list nat. Notation "\big [ op / idx ]_ ( m <= i < n | P ) F" := (bigop idx (index_iota m n) (fun i : nat => BigBody i op P%bool F)). Notation "\sum_ ( m <= i < n | P ) F" := (\big[plus/O]_(m <= i < n | P%bool) F%nat). Notation eq_bigr := (fun n m => eq_bigr_ 0 plus (index_iota n m)). Notation eq_big := (fun n m => eq_big_ 0 plus (index_iota n m)). Parameter odd : nat -> bool. Parameter prime : nat -> bool. .. coqtop:: in Parameter addnC : forall m n : nat, m + n = n + m. Parameter muln1 : forall n : nat, n * 1 = n. .. coqtop:: all Check eq_bigr. Check eq_big. Lemma test_big_nested (m n : nat) : \sum_(0 <= a < m | prime a) \sum_(0 <= j < n | odd (j * 1)) (a + j) = \sum_(0 <= i < m | prime i) \sum_(0 <= j < n | odd j) (j + i). under eq_bigr => i prime_i do under eq_big => [ j | j odd_j ] do [ rewrite (muln1 j) | rewrite (addnC i j) ]. Remark how the final goal uses the name ``i`` (the name given in the intro pattern) rather than ``a`` in the binder of the first summation. .. _locking_ssr: Locking, unlocking ~~~~~~~~~~~~~~~~~~ As program proofs tend to generate large goals, it is important to be able to control the partial evaluation performed by the simplification operations that are performed by the tactics. These evaluations can for example come from a ``/=`` simplification switch, or from rewrite steps which may expand large terms while performing conversion. We definitely want to avoid repeating large subterms of the goal in the proof script. We do this by “clamping down” selected function symbols in the goal, which prevents them from being considered in simplification or rewriting steps. This clamping is accomplished by using the occurrence switches (see section :ref:`abbreviations_ssr`) together with “term tagging” operations. |SSR| provides two levels of tagging. The first one uses auxiliary definitions to introduce a provably equal copy of any term t. However this copy is (on purpose) *not convertible* to t in the Coq system [#8]_. The job is done by the following construction: .. coqdoc:: Lemma master_key : unit. Proof. exact tt. Qed. Definition locked A := let: tt := master_key in fun x : A => x. Lemma lock : forall A x, x = locked x :> A. Note that the definition of *master_key* is explicitly opaque. The equation ``t = locked t`` given by the ``lock`` lemma can be used for selective rewriting, blocking on the fly the reduction in the term ``t``. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrfun ssrbool. From Coq Require Import List. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variable A : Type. Fixpoint has (p : A -> bool) (l : list A) : bool := if l is cons x l then p x || (has p l) else false. Lemma test p x y l (H : p x = true) : has p ( x :: y :: l) = true. rewrite {2}[cons]lock /= -lock. It is sometimes desirable to globally prevent a definition from being expanded by simplification; this is done by adding locked in the definition. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Definition lid := locked (fun x : nat => x). Lemma test : lid 3 = 3. rewrite /=. unlock lid. .. tacn:: unlock {? @occ_switch } @ident :name: unlock This tactic unfolds such definitions while removing “locks”, i.e. it replaces the occurrence(s) of :token:`ident` coded by the :token:`occ_switch` with the corresponding body. We found that it was usually preferable to prevent the expansion of some functions by the partial evaluation switch ``/=``, unless this allowed the evaluation of a condition. This is possible thanks to another mechanism of term tagging, resting on the following *Notation*: .. coqdoc:: Notation "'nosimpl' t" := (let: tt := tt in t). The term ``(nosimpl t)`` simplifies to ``t`` *except* in a definition. More precisely, given: .. coqdoc:: Definition foo := (nosimpl bar). the term ``foo`` (or ``(foo t’)``) will *not* be expanded by the *simpl* tactic unless it is in a forcing context (e.g., in ``match foo t’ with … end``, ``foo t’`` will be reduced if this allows ``match`` to be reduced). Note that ``nosimpl bar`` is simply notation for a term that reduces to ``bar``; hence ``unfold foo`` will replace ``foo`` by ``bar``, and ``fold foo`` will replace ``bar`` by ``foo``. .. warning:: The ``nosimpl`` trick only works if no reduction is apparent in ``t``; in particular, the declaration: .. coqdoc:: Definition foo x := nosimpl (bar x). will usually not work. Anyway, the common practice is to tag only the function, and to use the following definition, which blocks the reduction as expected: .. coqdoc:: Definition foo x := nosimpl bar x. A standard example making this technique shine is the case of arithmetic operations. We define for instance: .. coqdoc:: Definition addn := nosimpl plus. The operation ``addn`` behaves exactly like ``plus``, except that ``(addn (S n) m)`` will not simplify spontaneously to ``(S (addn n m))`` (the two terms, however, are convertible). In addition, the unfolding step: ``rewrite /addn`` will replace ``addn`` directly with ``plus``, so the ``nosimpl`` form is essentially invisible. .. _congruence_ssr: Congruence ~~~~~~~~~~ Because of the way matching interferes with parameters of type families, the tactic: .. coqdoc:: apply: my_congr_property. will generally fail to perform congruence simplification, even on rather simple cases. We therefore provide a more robust alternative in which the function is supplied: .. tacn:: congr {? @natural } @term :name: congr This tactic: + checks that the goal is a Leibniz equality; + matches both sides of this equality with “term applied to some arguments”, inferring the right number of arguments from the goal and the type of term. This may expand some definitions or fixpoints; + generates the subgoals corresponding to pairwise equalities of the arguments present in the goal. The goal can be a non-dependent product ``P -> Q``. In that case, the system asserts the equation ``P = Q``, uses it to solve the goal, and calls the ``congr`` tactic on the remaining goal ``P = Q``. This can be useful for instance to perform a transitivity step, like in the following situation. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test (x y z : nat) (H : x = y) : x = z. congr (_ = _) : H. Abort. Lemma test (x y z : nat) : x = y -> x = z. congr (_ = _). The optional :token:`natural` forces the number of arguments for which the tactic should generate equality proof obligations. This tactic supports equalities between applications with dependent arguments. Yet dependent arguments should have exactly the same parameters on both sides, and these parameters should appear as first arguments. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Definition f n := if n is 0 then plus else mult. Definition g (n m : nat) := plus. Lemma test x y : f 0 x y = g 1 1 x y. congr plus. This script shows that the ``congr`` tactic matches ``plus`` with ``f 0`` on the left hand side and ``g 1 1`` on the right hand side, and solves the goal. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test n m (Hnm : m <= n) : S m + (S n - S m) = S n. congr S; rewrite -/plus. The tactic ``rewrite -/plus`` folds back the expansion of plus which was necessary for matching both sides of the equality with an application of ``S``. Like most |SSR| arguments, :token:`term` can contain wildcards. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test x y : x + (y * (y + x - x)) = x * 1 + (y + 0) * y. congr ( _ + (_ * _)). .. _contextual_patterns_ssr: Contextual patterns ------------------- The simple form of patterns used so far, terms possibly containing wild cards, often require an additional :token:`occ_switch` to be specified. While this may work pretty fine for small goals, the use of polymorphic functions and dependent types may lead to an invisible duplication of function arguments. These copies usually end up in types hidden by the implicit arguments machinery or by user-defined notations. In these situations computing the right occurrence numbers is very tedious because they must be counted on the goal as printed after setting the :flag:`Printing All` flag. Moreover the resulting script is not really informative for the reader, since it refers to occurrence numbers he cannot easily see. Contextual patterns mitigate these issues allowing to specify occurrences according to the context they occur in. Syntax ~~~~~~ The following table summarizes the full syntax of :token:`c_pattern` and the corresponding subterm(s) identified by the pattern. In the third column we use s.m.r. for “the subterms matching the redex” specified in the second column. .. list-table:: :header-rows: 1 * - :token:`c_pattern` - redex - subterms affected * - ``term`` - ``term`` - all occurrences of ``term`` * - ``ident in term`` - subterm of ``term`` selected by ``ident`` - all the subterms identified by ``ident`` in all the occurrences of ``term`` * - ``term1 in ident in term2`` - ``term1`` in all s.m.r. - in all the subterms identified by ``ident`` in all the occurrences of ``term2`` * - ``term1 as ident in term2`` - ``term 1`` - in all the subterms identified by ``ident`` in all the occurrences of ``term2[term 1 /ident]`` The rewrite tactic supports two more patterns obtained prefixing the first two with in. The intended meaning is that the pattern identifies all subterms of the specified context. The ``rewrite`` tactic will infer a pattern for the redex looking at the rule used for rewriting. .. list-table:: :header-rows: 1 * - :token:`r_pattern` - redex - subterms affected * - ``in term`` - inferred from rule - in all s.m.r. in all occurrences of ``term`` * - ``in ident in term`` - inferred from rule - in all s.m.r. in all the subterms identified by ``ident`` in all the occurrences of ``term`` The first :token:`c_pattern` is the simplest form matching any context but selecting a specific redex and has been described in the previous sections. We have seen so far that the possibility of selecting a redex using a term with holes is already a powerful means of redex selection. Similarly, any terms provided by the user in the more complex forms of :token:`c_pattern`\s presented in the tables above can contain holes. For a quick glance at what can be expressed with the last :token:`r_pattern` consider the goal ``a = b`` and the tactic .. coqdoc:: rewrite [in X in _ = X]rule. It rewrites all occurrences of the left hand side of ``rule`` inside ``b`` only (``a``, and the hidden type of the equality, are ignored). Note that the variant ``rewrite [X in _ = X]rule`` would have rewritten ``b`` exactly (i.e., it would only work if ``b`` and the left hand side of rule can be unified). Matching contextual patterns ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The :token:`c_pattern` and :token:`r_pattern` involving terms with holes are matched against the goal in order to find a closed instantiation. This matching proceeds as follows: .. list-table:: :header-rows: 1 * - :token:`c_pattern` - instantiation order and place for ``term_i`` and redex * - ``term`` - ``term`` is matched against the goal, redex is unified with the instantiation of ``term`` * - ``ident in term`` - ``term`` is matched against the goal, redex is unified with the subterm of the instantiation of ``term`` identified by ``ident`` * - ``term1 in ident in term2`` - ``term2`` is matched against the goal, ``term1`` is matched against the subterm of the instantiation of ``term1`` identified by ``ident``, redex is unified with the instantiation of ``term1`` * - ``term1 as ident in term2`` - ``term2[term1/ident]`` is matched against the goal, redex is unified with the instantiation of ``term1`` In the following patterns, the redex is intended to be inferred from the rewrite rule. .. list-table:: :header-rows: 1 * - :token:`r_pattern` - instantiation order and place for ``term_i`` and redex * - ``in ident in term`` - ``term`` is matched against the goal, the redex is matched against the subterm of the instantiation of ``term`` identified by ``ident`` * - ``in term`` - ``term`` is matched against the goal, redex is matched against the instantiation of ``term`` Examples ~~~~~~~~ Contextual pattern in set and the : tactical ```````````````````````````````````````````` As already mentioned in section :ref:`abbreviations_ssr` the ``set`` tactic takes as an argument a term in open syntax. This term is interpreted as the simplest form of :token:`c_pattern`. To avoid confusion in the grammar, open syntax is supported only for the simplest form of patterns, while parentheses are required around more complex patterns. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test a b : a + b + 1 = b + (a + 1). set t := (X in _ = X). rewrite {}/t. set t := (a + _ in X in _ = X). Since the user may define an infix notation for ``in`` the result of the former tactic may be ambiguous. The disambiguation rule implemented is to prefer patterns over simple terms, but to interpret a pattern with double parentheses as a simple term. For example, the following tactic would capture any occurrence of the term ``a in A``. .. coqdoc:: set t := ((a in A)). Contextual patterns can also be used as arguments of the ``:`` tactical. For example: .. coqdoc:: elim: n (n in _ = n) (refl_equal n). Contextual patterns in rewrite `````````````````````````````` .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Notation "n .+1" := (Datatypes.S n) (at level 2, left associativity, format "n .+1") : nat_scope. Axiom addSn : forall m n, m.+1 + n = (m + n).+1. Axiom addn0 : forall m, m + 0 = m. Axiom addnC : forall m n, m + n = n + m. Lemma test x y z f : (x.+1 + y) + f (x.+1 + y) (z + (x + y).+1) = 0. rewrite [in f _ _]addSn. Note: the simplification rule ``addSn`` is applied only under the ``f`` symbol. Then we simplify also the first addition and expand ``0`` into ``0 + 0``. .. coqtop:: all rewrite addSn -[X in _ = X]addn0. Note that the right hand side of ``addn0`` is undetermined, but the rewrite pattern specifies the redex explicitly. The right hand side of ``addn0`` is unified with the term identified by ``X``, here ``0``. The following pattern does not specify a redex, since it identifies an entire region, hence the rewrite rule has to be instantiated explicitly. Thus the tactic: .. coqtop:: all rewrite -{2}[in X in _ = X](addn0 0). The following tactic is quite tricky: .. coqtop:: all rewrite [_.+1 in X in f _ X](addnC x.+1). The explicit redex ``_.+1`` is important since its head constant ``S`` differs from the head constant inferred from ``(addnC x.+1)`` (that is ``+``). Moreover, the pattern ``f _ X`` is important to rule out the first occurrence of ``(x + y).+1``. Last, only the subterms of ``f _ X`` identified by ``X`` are rewritten, thus the first argument of ``f`` is skipped too. Also note the pattern ``_.+1`` is interpreted in the context identified by ``X``, thus it gets instantiated to ``(y + x).+1`` and not ``(x + y).+1``. The last rewrite pattern allows to specify exactly the shape of the term identified by X, that is thus unified with the left hand side of the rewrite rule. .. coqtop:: all rewrite [x.+1 + y as X in f X _]addnC. Patterns for recurrent contexts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The user can define shortcuts for recurrent contexts corresponding to the ``ident in term`` part. The notation scope identified with ``%pattern`` provides a special notation ``(X in t)`` the user must adopt in order to define context shortcuts. The following example is taken from ``ssreflect.v`` where the ``LHS`` and ``RHS`` shortcuts are defined. .. coqdoc:: Notation RHS := (X in _ = X)%pattern. Notation LHS := (X in X = _)%pattern. Shortcuts defined this way can be freely used in place of the trailing ``ident in term`` part of any contextual pattern. Some examples follow: .. coqdoc:: set rhs := RHS. rewrite [in RHS]rule. case: (a + _ in RHS). .. _views_and_reflection_ssr: Views and reflection -------------------- The bookkeeping facilities presented in section :ref:`basic_tactics_ssr` are crafted to ease simultaneous introductions and generalizations of facts and operations of casing, naming etc. It also a common practice to make a stack operation immediately followed by an *interpretation* of the fact being pushed, that is, to apply a lemma to this fact before passing it to a tactic for decomposition, application and so on. |SSR| provides a convenient, unified syntax to combine these interpretation operations with the proof stack operations. This *view mechanism* relies on the combination of the ``/`` view switch with bookkeeping tactics and tacticals. .. _custom_elim_ssr: Interpreting eliminations ~~~~~~~~~~~~~~~~~~~~~~~~~ The view syntax combined with the ``elim`` tactic specifies an elimination scheme to be used instead of the default, generated, one. Hence the |SSR| tactic: .. coqdoc:: elim/V. is a synonym for: .. coqdoc:: intro top; elim top using V; clear top. where top is a fresh name and V any second-order lemma. Since an elimination view supports the two bookkeeping tacticals of discharge and introduction (see section :ref:`basic_tactics_ssr`), the |SSR| tactic: .. coqdoc:: elim/V: x => y. is a synonym for: .. coqdoc:: elim x using V; clear x; intro y. where ``x`` is a variable in the context, ``y`` a fresh name and ``V`` any second order lemma; |SSR| relaxes the syntactic restrictions of the Coq ``elim``. The first pattern following ``:`` can be a ``_`` wildcard if the conclusion of the view ``V`` specifies a pattern for its last argument (e.g., if ``V`` is a functional induction lemma generated by the ``Function`` command). The elimination view mechanism is compatible with the equation name generation (see section :ref:`generation_of_equations_ssr`). .. example:: The following script illustrates a toy example of this feature. Let us define a function adding an element at the end of a list: .. coqtop:: reset none From Coq Require Import ssreflect List. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variable d : Type. Fixpoint add_last (s : list d) (z : d) {struct s} : list d := if s is cons x s' then cons x (add_last s' z) else z :: nil. One can define an alternative, reversed, induction principle on inductively defined lists, by proving the following lemma: .. coqtop:: all Axiom last_ind_list : forall P : list d -> Prop, P nil -> (forall s (x : d), P s -> P (add_last s x)) -> forall s : list d, P s. Then the combination of elimination views with equation names result in a concise syntax for reasoning inductively using the user-defined elimination scheme. .. coqtop:: all Lemma test (x : d) (l : list d): l = l. elim/last_ind_list E : l=> [| u v]; last first. User-provided eliminators (potentially generated with Coq’s ``Function`` command) can be combined with the type family switches described in section :ref:`type_families_ssr`. Consider an eliminator ``foo_ind`` of type: .. coqdoc:: foo_ind : forall …, forall x : T, P p1 … pm. and consider the tactic: .. coqdoc:: elim/foo_ind: e1 … / en. The ``elim/`` tactic distinguishes two cases: :truncated eliminator: when ``x`` does not occur in ``P p1 … pm`` and the type of ``en`` unifies with ``T`` and ``en`` is not ``_``. In that case, ``en`` is passed to the eliminator as the last argument (``x`` in ``foo_ind``) and ``en−1 … e1`` are used as patterns to select in the goal the occurrences that will be bound by the predicate ``P``, thus it must be possible to unify the subterm of the goal matched by ``en−1`` with ``pm`` , the one matched by ``en−2`` with ``pm−1`` and so on. :regular eliminator: in all the other cases. Here it must be possible to unify the term matched by ``en`` with ``pm`` , the one matched by ``en−1`` with ``pm−1`` and so on. Note that standard eliminators have the shape ``…forall x, P … x``, thus ``en`` is the pattern identifying the eliminated term, as expected. As explained in section :ref:`type_families_ssr`, the initial prefix of ``ei`` can be omitted. Here is an example of a regular, but nontrivial, eliminator. .. example:: Here is a toy example illustrating this feature. .. coqtop:: reset none From Coq Require Import ssreflect FunInd. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Function plus (m n : nat) {struct n} : nat := if n is S p then S (plus m p) else m. About plus_ind. Lemma test x y z : plus (plus x y) z = plus x (plus y z). The following tactics are all valid and perform the same elimination on this goal. .. coqdoc:: elim/plus_ind: z / (plus _ z). elim/plus_ind: {z}(plus _ z). elim/plus_ind: {z}_. elim/plus_ind: z / _. .. coqtop:: reset none From Coq Require Import ssreflect FunInd. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Function plus (m n : nat) {struct n} : nat := if n is S p then S (plus m p) else m. About plus_ind. Lemma test x y z : plus (plus x y) z = plus x (plus y z). .. coqtop:: all elim/plus_ind: z / _. The two latter examples feature a wildcard pattern: in this case, the resulting pattern is inferred from the type of the eliminator. In both these examples, it is ``(plus _ _)``, which matches the subterm ``plus (plus x y) z`` thus instantiating the last ``_`` with ``z``. Note that the tactic: .. coqtop:: reset none From Coq Require Import ssreflect FunInd. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Function plus (m n : nat) {struct n} : nat := if n is S p then S (plus m p) else m. About plus_ind. Lemma test x y z : plus (plus x y) z = plus x (plus y z). .. coqtop:: all Fail elim/plus_ind: y / _. triggers an error: in the conclusion of the ``plus_ind`` eliminator, the first argument of the predicate ``P`` should be the same as the second argument of ``plus``, in the second argument of ``P``, but ``y`` and ``z`` do no unify. Here is an example of a truncated eliminator: .. example:: Consider the goal: .. coqtop:: reset none From Coq Require Import ssreflect FunInd. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqdoc:: Lemma test p n (n_gt0 : 0 < n) (pr_p : prime p) : p %| \prod_(i <- prime_decomp n | i \in prime_decomp n) i.1 ^ i.2 -> exists2 x : nat * nat, x \in prime_decomp n & p = x.1. Proof. elim/big_prop: _ => [| u v IHu IHv | [q e] /=]. where the type of the ``big_prop`` eliminator is .. coqdoc:: big_prop: forall (R : Type) (Pb : R -> Type) (idx : R) (op1 : R -> R -> R), Pb idx -> (forall x y : R, Pb x -> Pb y -> Pb (op1 x y)) -> forall (I : Type) (r : seq I) (P : pred I) (F : I -> R), (forall i : I, P i -> Pb (F i)) -> Pb (\big[op1/idx]_(i <- r | P i) F i). Since the pattern for the argument of Pb is not specified, the inferred one is used instead: ``big[_/_]_(i <- _ | _ i) _ i``, and after the introductions, the following goals are generated: .. coqdoc:: subgoal 1 is: p %| 1 -> exists2 x : nat * nat, x \in prime_decomp n & p = x.1 subgoal 2 is: p %| u * v -> exists2 x : nat * nat, x \in prime_decomp n & p = x.1 subgoal 3 is: (q, e) \in prime_decomp n -> p %| q ^ e -> exists2 x : nat * nat, x \in prime_decomp n & p = x.1. Note that the pattern matching algorithm instantiated all the variables occurring in the pattern. .. _interpreting_assumptions_ssr: Interpreting assumptions ~~~~~~~~~~~~~~~~~~~~~~~~ Interpreting an assumption in the context of a proof consists in applying to it a lemma before generalizing, and/or decomposing this assumption. For instance, with the extensive use of boolean reflection (see section :ref:`views_and_reflection_ssr`), it is quite frequent to need to decompose the logical interpretation of (the boolean expression of) a fact, rather than the fact itself. This can be achieved by a combination of ``move : _ => _`` switches, like in the following example, where ``||`` is a notation for the boolean disjunction. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variables P Q : bool -> Prop. Hypothesis P2Q : forall a b, P (a || b) -> Q a. Lemma test a : P (a || a) -> True. move=> HPa; move: {HPa}(P2Q HPa) => HQa. which transforms the hypothesis ``HPa : P a`` which has been introduced from the initial statement into ``HQa : Q a``. This operation is so common that the tactic shell has specific syntax for it. The following scripts: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Variables P Q : bool -> Prop. Hypothesis P2Q : forall a b, P (a || b) -> Q a. Lemma test a : P (a || a) -> True. .. coqtop:: all move=> HPa; move/P2Q: HPa => HQa. or more directly: .. coqtop:: all restart move/P2Q=> HQa. are equivalent to the former one. The former script shows how to interpret a fact (already in the context), thanks to the discharge tactical (see section :ref:`discharge_ssr`) and the latter, how to interpret the top assumption of a goal. Note that the number of wildcards to be inserted to find the correct application of the view lemma to the hypothesis has been automatically inferred. The view mechanism is compatible with the ``case`` tactic and with the equation name generation mechanism (see section :ref:`generation_of_equations_ssr`): .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variables P Q: bool -> Prop. Hypothesis Q2P : forall a b, Q (a || b) -> P a \/ P b. Lemma test a b : Q (a || b) -> True. case/Q2P=> [HPa | HPb]. This view tactic performs: .. coqdoc:: move=> HQ; case: {HQ}(Q2P HQ) => [HPa | HPb]. The term on the right of the ``/`` view switch is called a *view lemma*. Any |SSR| term coercing to a product type can be used as a view lemma. The examples we have given so far explicitly provide the direction of the translation to be performed. In fact, view lemmas need not to be oriented. The view mechanism is able to detect which application is relevant for the current goal. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variables P Q: bool -> Prop. Hypothesis PQequiv : forall a b, P (a || b) <-> Q a. Lemma test a b : P (a || b) -> True. move/PQequiv=> HQab. has the same behavior as the first example above. The view mechanism can insert automatically a *view hint* to transform the double implication into the expected simple implication. The last script is in fact equivalent to: .. coqdoc:: Lemma test a b : P (a || b) -> True. move/(iffLR (PQequiv _ _)). where: .. coqdoc:: Lemma iffLR P Q : (P <-> Q) -> P -> Q. Specializing assumptions ```````````````````````` The special case when the *head symbol* of the view lemma is a wildcard is used to interpret an assumption by *specializing* it. The view mechanism hence offers the possibility to apply a higher-order assumption to some given arguments. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test z : (forall x y, x + y = z -> z = x) -> z = 0. move/(_ 0 z). Interpreting goals ~~~~~~~~~~~~~~~~~~ In a similar way, it is also often convenient to changing a goal by turning it into an equivalent proposition. The view mechanism of |SSR| has a special syntax ``apply/`` for combining in a single tactic simultaneous goal interpretation operations and bookkeeping steps. .. example:: The following example use the ``~~`` prenex notation for boolean negation: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Variables P Q: bool -> Prop. Hypothesis PQequiv : forall a b, P (a || b) <-> Q a. Lemma test a : P ((~~ a) || a). apply/PQequiv. thus in this case, the tactic ``apply/PQequiv`` is equivalent to ``apply: (iffRL (PQequiv _ _))``, where ``iffRL`` is the analogue of ``iffRL`` for the converse implication. Any |SSR| term whose type coerces to a double implication can be used as a view for goal interpretation. Note that the goal interpretation view mechanism supports both ``apply`` and ``exact`` tactics. As expected, a goal interpretation view command exact/term should solve the current goal or it will fail. .. warning:: Goal interpretation view tactics are *not* compatible with the bookkeeping tactical ``=>`` since this would be redundant with the ``apply: term => _`` construction. Boolean reflection ~~~~~~~~~~~~~~~~~~ In the Calculus of Inductive Constructions, there is an obvious distinction between logical propositions and boolean values. On the one hand, logical propositions are objects of *sort* ``Prop`` which is the carrier of intuitionistic reasoning. Logical connectives in ``Prop`` are *types*, which give precise information on the structure of their proofs; this information is automatically exploited by Coq tactics. For example, Coq knows that a proof of ``A \/ B`` is either a proof of ``A`` or a proof of ``B``. The tactics ``left`` and ``right`` change the goal ``A \/ B`` to ``A`` and ``B``, respectively; dually, the tactic ``case`` reduces the goal ``A \/ B => G`` to two subgoals ``A => G`` and ``B => G``. On the other hand, bool is an inductive *datatype* with two constructors true and false. Logical connectives on bool are *computable functions*, defined by their truth tables, using case analysis: .. example:: .. coqtop:: reset none From Coq Require Import ssreflect. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Definition orb (b1 b2 : bool) := if b1 then true else b2. Properties of such connectives are also established using case analysis .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test b : b || ~~ b = true. by case: b. Once ``b`` is replaced by ``true`` in the first goal and by ``false`` in the second one, the goals reduce by computations to the trivial ``true = true``. Thus, ``Prop`` and ``bool`` are truly complementary: the former supports robust natural deduction, the latter allows brute-force evaluation. |SSR| supplies a generic mechanism to have the best of the two worlds and move freely from a propositional version of a decidable predicate to its boolean version. First, booleans are injected into propositions using the coercion mechanism: .. coqdoc:: Coercion is_true (b : bool) := b = true. This allows any boolean formula ``b`` to be used in a context where Coq would expect a proposition, e.g., after ``Lemma … :``. It is then interpreted as ``(is_true b)``, i.e., the proposition ``b = true``. Coercions are elided by the pretty-printer, so they are essentially transparent to the user. The reflect predicate ~~~~~~~~~~~~~~~~~~~~~ To get all the benefits of the boolean reflection, it is in fact convenient to introduce the following inductive predicate ``reflect`` to relate propositions and booleans: .. coqdoc:: Inductive reflect (P: Prop): bool -> Type := | Reflect_true : P -> reflect P true | Reflect_false : ~P -> reflect P false. The statement ``(reflect P b)`` asserts that ``(is_true b)`` and ``P`` are logically equivalent propositions. For instance, the following lemma: .. coqdoc:: Lemma andP: forall b1 b2, reflect (b1 /\ b2) (b1 && b2). relates the boolean conjunction to the logical one ``/\``. Note that in ``andP``, ``b1`` and ``b2`` are two boolean variables and the proposition ``b1 /\ b2`` hides two coercions. The conjunction of ``b1`` and ``b2`` can then be viewed as ``b1 /\ b2`` or as ``b1 && b2``. Expressing logical equivalences through this family of inductive types makes possible to take benefit from *rewritable equations* associated to the case analysis of Coq’s inductive types. Since the equivalence predicate is defined in Coq as: .. coqdoc:: Definition iff (A B:Prop) := (A -> B) /\ (B -> A). where ``/\`` is a notation for ``and``: .. coqdoc:: Inductive and (A B:Prop) : Prop := conj : A -> B -> and A B. This make case analysis very different according to the way an equivalence property has been defined. .. coqdoc:: Lemma andE (b1 b2 : bool) : (b1 /\ b2) <-> (b1 && b2). Let us compare the respective behaviors of ``andE`` and ``andP``. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Axiom andE : forall (b1 b2 : bool), (b1 /\ b2) <-> (b1 && b2). .. coqtop:: all Lemma test (b1 b2 : bool) : if (b1 && b2) then b1 else ~~(b1||b2). .. coqtop:: all case: (@andE b1 b2). .. coqtop:: none Restart. .. coqtop:: all case: (@andP b1 b2). Expressing reflection relation through the ``reflect`` predicate is hence a very convenient way to deal with classical reasoning, by case analysis. Using the ``reflect`` predicate allows moreover to program rich specifications inside its two constructors, which will be automatically taken into account during destruction. This formalisation style gives far more efficient specifications than quantified (double) implications. A naming convention in |SSR| is to postfix the name of view lemmas with ``P``. For example, ``orP`` relates ``||`` and ``\/``, ``negP`` relates ``~~`` and ``~``. The view mechanism is compatible with reflect predicates. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all abort Lemma test (a b : bool) (Ha : a) (Hb : b) : a /\ b. apply/andP. Conversely .. coqtop:: all Lemma test (a b : bool) : a /\ b -> a. move/andP. The same tactics can also be used to perform the converse operation, changing a boolean conjunction into a logical one. The view mechanism guesses the direction of the transformation to be used i.e., the constructor of the reflect predicate which should be chosen. General mechanism for interpreting goals and assumptions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Specializing assumptions ```````````````````````` The |SSR| tactic: .. coqdoc:: move/(_ term1 … termn). is equivalent to the tactic: .. coqdoc:: intro top; generalize (top term1 … termn); clear top. where ``top`` is a fresh name for introducing the top assumption of the current goal. Interpreting assumptions ```````````````````````` The general form of an assumption view tactic is: .. tacv:: {| move | case } / @term :undocumented: The term , called the *view lemma* can be: + a (term coercible to a) function; + a (possibly quantified) implication; + a (possibly quantified) double implication; + a (possibly quantified) instance of the reflect predicate (see section :ref:`views_and_reflection_ssr`). Let ``top`` be the top assumption in the goal. There are three steps in the behavior of an assumption view tactic: + It first introduces ``top``. + If the type of :token:`term` is neither a double implication nor an instance of the reflect predicate, then the tactic automatically generalises a term of the form: ``term term1 … termn`` where the terms ``term1 … termn`` instantiate the possible quantified variables of ``term`` , in order for ``(term term1 … termn top)`` to be well typed. + If the type of ``term`` is an equivalence, or an instance of the reflect predicate, it generalises a term of the form: ``(termvh (term term1 … termn ))`` where the term ``termvh`` inserted is called an *assumption interpretation view hint*. + It finally clears top. For a ``case/term`` tactic, the generalisation step is replaced by a case analysis step. *View hints* are declared by the user (see section :ref:`views_and_reflection_ssr`) and are stored in the Hint View database. The proof engine automatically detects from the shape of the top assumption ``top`` and of the view lemma ``term`` provided to the tactic the appropriate view hint in the database to be inserted. If ``term`` is a double implication, then the view hint will be one of the defined view hints for implication. These hints are by default the ones present in the file ``ssreflect.v``: .. coqdoc:: Lemma iffLR : forall P Q, (P <-> Q) -> P -> Q. which transforms a double implication into the left-to-right one, or: .. coqdoc:: Lemma iffRL : forall P Q, (P <-> Q) -> Q -> P. which produces the converse implication. In both cases, the two first Prop arguments are implicit. If ``term`` is an instance of the ``reflect`` predicate, then ``A`` will be one of the defined view hints for the ``reflect`` predicate, which are by default the ones present in the file ``ssrbool.v``. These hints are not only used for choosing the appropriate direction of the translation, but they also allow complex transformation, involving negations. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Check introN. .. coqtop:: all Lemma test (a b : bool) (Ha : a) (Hb : b) : ~~ (a && b). apply/andP. In fact this last script does not exactly use the hint ``introN``, but the more general hint: .. coqtop:: all Check introNTF. The lemma ``introN`` is an instantiation of ``introNF`` using ``c := true``. Note that views, being part of :token:`i_pattern`, can be used to interpret assertions too. For example the following script asserts ``a && b`` but actually uses its propositional interpretation. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test (a b : bool) (pab : b && a) : b. have /andP [pa ->] : (a && b) by rewrite andbC. Interpreting goals A goal interpretation view tactic of the form: .. tacv:: apply/@term :undocumented: applied to a goal ``top`` is interpreted in the following way: + If the type of ``term`` is not an instance of the ``reflect`` predicate, nor an equivalence, then the term ``term`` is applied to the current goal ``top``, possibly inserting implicit arguments. + If the type of ``term`` is an instance of the reflect predicate or an equivalence, then a *goal interpretation view hint* can possibly be inserted, which corresponds to the application of a term ``(termvh (term _ … _))`` to the current goal, possibly inserting implicit arguments. Like assumption interpretation view hints, goal interpretation ones are user-defined lemmas stored (see section :ref:`views_and_reflection_ssr`) in the ``Hint View`` database bridging the possible gap between the type of ``term`` and the type of the goal. Interpreting equivalences ~~~~~~~~~~~~~~~~~~~~~~~~~ Equivalent boolean propositions are simply *equal* boolean terms. A special construction helps the user to prove boolean equalities by considering them as logical double implications (between their coerced versions), while performing at the same time logical operations on both sides. The syntax of double views is: .. tacv:: apply/@term/@term :undocumented: The first term is the view lemma applied to the left hand side of the equality, while the second term is the one applied to the right hand side. In this context, the identity view can be used when no view has to be applied: .. coqdoc:: Lemma idP : reflect b1 b1. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test (b1 b2 b3 : bool) : ~~ (b1 || b2) = b3. apply/idP/idP. The same goal can be decomposed in several ways, and the user may choose the most convenient interpretation. .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. .. coqtop:: all Lemma test (b1 b2 b3 : bool) : ~~ (b1 || b2) = b3. apply/norP/idP. .. _declaring_new_hints_ssr: Declaring new Hint Views ~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Hint View for move / @ident {? | @natural } Hint View for apply / @ident {? | @natural } This command can be used to extend the database of hints for the view mechanism. As library ``ssrbool.v`` already declares a corpus of hints, this feature is probably useful only for users who define their own logical connectives. The :token:`ident` is the name of the lemma to be declared as a hint. If ``move`` is used as tactic, the hint is declared for assumption interpretation tactics, ``apply`` declares hints for goal interpretations. Goal interpretation view hints are declared for both simple views and left hand side views. The optional natural number is the number of implicit arguments to be considered for the declared hint view lemma. .. cmdv:: Hint View for apply//@ident {? | @natural } This variant with a double slash ``//``, declares hint views for right hand sides of double views. See the files ``ssreflect.v`` and ``ssrbool.v`` for examples. Multiple views ~~~~~~~~~~~~~~ The hypotheses and the goal can be interpreted by applying multiple views in sequence. Both move and apply can be followed by an arbitrary number of ``/term``. The main difference between the following two tactics .. coqdoc:: apply/v1/v2/v3. apply/v1; apply/v2; apply/v3. is that the former applies all the views to the principal goal. Applying a view with hypotheses generates new goals, and the second line would apply the view ``v2`` to all the goals generated by ``apply/v1``. Note that the NO-OP intro pattern ``-`` can be used to separate two views, making the two following examples equivalent: .. coqdoc:: move=> /v1; move=> /v2. move=> /v1 - /v2. The tactic ``move`` can be used together with the ``in`` tactical to pass a given hypothesis to a lemma. .. example:: .. coqtop:: reset none From Coq Require Import ssreflect ssrbool. Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Section Test. Variables P Q R : Prop. .. coqtop:: all Variable P2Q : P -> Q. Variable Q2R : Q -> R. Lemma test (p : P) : True. move/P2Q/Q2R in p. If the list of views is of length two, ``Hint Views`` for interpreting equivalences are indeed taken into account, otherwise only single ``Hint Views`` are used. Synopsis and Index ------------------ Parameters ~~~~~~~~~~ |SSR| tactics .. prodn:: d_tactic ::= {| elim | case | congr | apply | exact | move } Notation scope .. prodn:: key ::= @ident Module name .. prodn:: modname ::= @qualid Natural number .. prodn:: nat_or_ident ::= {| @natural | @ident } where :token:`ident` is an Ltac variable denoting a standard Coq number (should not be the name of a tactic which can be followed by a bracket ``[``, like ``do``, ``have``,…) Items and switches ~~~~~~~~~~~~~~~~~~ .. prodn:: ssr_binder ::= {| @ident | ( @ident {? : @term } ) } binder see :ref:`abbreviations_ssr`. .. prodn:: clear_switch ::= { {+ @ident } } clear switch see :ref:`discharge_ssr` .. prodn:: c_pattern ::= {? {| @term in | @term as } } @ident in @term context pattern see :ref:`contextual_patterns_ssr` .. prodn:: d_item ::= {? {| @occ_switch | @clear_switch } } {? {| @term | ( @c_pattern ) } } discharge item see :ref:`discharge_ssr` .. prodn:: gen_item ::= {| {? @ } @ident | ( @ident ) | ( {? @ } @ident := @c_pattern ) } generalization item see :ref:`structure_ssr` .. prodn:: i_pattern ::= {| @ident | > | _ | ? | * | + | {? @occ_switch } {| -> | <- } | [ {?| @i_item } ] | - | [: {+ @ident } ] } intro pattern :ref:`introduction_ssr` .. prodn:: i_item ::= {| @clear_switch | @s_item | @i_pattern | @i_view | @i_block } view :ref:`introduction_ssr` .. prodn:: i_view ::= {? %{%} } {| /@term | /ltac:( @tactic ) } intro block :ref:`introduction_ssr` .. prodn:: i_block ::= {| [^ @ident ] | [^~ {| @ident | @natural } ] } intro item see :ref:`introduction_ssr` .. prodn:: int_mult ::= {? @natural } @mult_mark multiplier see :ref:`iteration_ssr` .. prodn:: occ_switch ::= { {? {| + | - } } {* @natural } } occur. switch see :ref:`occurrence_selection_ssr` .. prodn:: mult ::= {? @natural } @mult_mark multiplier see :ref:`iteration_ssr` .. prodn:: mult_mark ::= {| ? | ! } multiplier mark see :ref:`iteration_ssr` .. prodn:: r_item ::= {| {? / } @term | @s_item } rewrite item see :ref:`rewriting_ssr` .. prodn:: r_prefix ::= {? - } {? @int_mult } {? {| @occ_switch | @clear_switch } } {? [ @r_pattern ] } rewrite prefix see :ref:`rewriting_ssr` .. prodn:: r_pattern ::= {| @term | @c_pattern | in {? @ident in } @term } rewrite pattern see :ref:`rewriting_ssr` .. prodn:: r_step ::= {? @r_prefix } @r_item rewrite step see :ref:`rewriting_ssr` .. prodn:: s_item ::= {| /= | // | //= } simplify switch see :ref:`introduction_ssr` Tactics ~~~~~~~ *Note*: ``without loss`` and ``suffices`` are synonyms for ``wlog`` and ``suff`` respectively. .. tacn:: move :name: move (ssreflect) :tacn:`idtac` or :tacn:`hnf` (see :ref:`bookkeeping_ssr`) .. tacn:: apply exact :name: apply (ssreflect); exact (ssreflect) application (see :ref:`the_defective_tactics_ssr`) .. tacv:: abstract: {+ @d_item} see :ref:`abstract_ssr` and :ref:`generating_let_ssr` .. tacv:: elim induction (see :ref:`the_defective_tactics_ssr`) .. tacv:: case case analysis (see :ref:`the_defective_tactics_ssr`) .. tacv:: rewrite {+ @r_step } rewrite (see :ref:`rewriting_ssr`) .. tacn:: under {? @r_prefix } @term {? => {+ @i_item}} {? do {| @tactic | [ {*| @tactic } ] } } under (see :ref:`under_ssr`) .. tacn:: over over (see :ref:`over_ssr`) .. tacn:: have {* @i_item } {? @i_pattern } {? {| @s_item | {+ @ssr_binder } } } {? : @term } := @term have {* @i_item } {? @i_pattern } {? {| @s_item | {+ @ssr_binder } } } : @term {? by @tactic } have suff {? @clear_switch } {? @i_pattern } {? : @term } := @term have suff {? @clear_switch } {? @i_pattern } : @term {? by @tactic } gen have {? @ident , } {? @i_pattern } : {+ @gen_item } / @term {? by @tactic } generally have {? @ident , } {? @i_pattern } : {+ @gen_item } / @term {? by @tactic } :name: _; _; _; _; _; generally have forward chaining (see :ref:`structure_ssr`) .. tacn:: wlog {? suff } {? @i_item } : {* {| @gen_item | @clear_switch } } / @term specializing (see :ref:`structure_ssr`) .. tacn:: suff {* @i_item } {? @i_pattern } {+ @ssr_binder } : @term {? by @tactic } suffices {* @i_item } {? @i_pattern } {+ @ssr_binder } : @term {? by @tactic } suff {? have } {? @clear_switch } {? @i_pattern } : @term {? by @tactic } suffices {? have } {? @clear_switch } {? @i_pattern } : @term {? by @tactic } :name: suff; suffices; _; _ backchaining (see :ref:`structure_ssr`) .. tacv:: pose @ident := @term local definition (see :ref:`definitions_ssr`) .. tacv:: pose @ident {+ @ssr_binder } := @term local function definition .. tacv:: pose fix @fix_decl local fix definition .. tacv:: pose cofix @fix_decl local cofix definition .. tacn:: set @ident {? : @term } := {? @occ_switch } {| @term | ( @c_pattern) } :name: set (ssreflect) abbreviation (see :ref:`abbreviations_ssr`) .. tacn:: unlock {* {? @r_prefix } @ident } unlock (see :ref:`locking_ssr`) .. tacn:: congr {? @natural } @term congruence (see :ref:`congruence_ssr`) Tacticals ~~~~~~~~~ .. prodn:: tactic += @d_tactic {? @ident } : {+ @d_item } {? @clear_switch } discharge :ref:`discharge_ssr` .. prodn:: tactic += @tactic => {+ @i_item } introduction see :ref:`introduction_ssr` .. prodn:: tactic += @tactic in {+ {| @gen_item | @clear_switch } } {? * } localization see :ref:`localization_ssr` .. prodn:: tactic += do {? @mult } {| @tactic | [ {+| @tactic } ] } iteration see :ref:`iteration_ssr` .. prodn:: tactic += @tactic ; {| first | last } {? @natural } {| @tactic | [ {+| @tactic } ] } selector see :ref:`selectors_ssr` .. prodn:: tactic += @tactic ; {| first | last } {? @natural } rotation see :ref:`selectors_ssr` .. prodn:: tactic += by {| @tactic | [ {*| @tactic } ] } closing see :ref:`terminators_ssr` Commands ~~~~~~~~ .. cmd:: Hint View for {| move | apply } / @ident {? | @natural } view hint declaration (see :ref:`declaring_new_hints_ssr`) .. cmd:: Hint View for apply // @ident {? @natural } right hand side double , view hint declaration (see :ref:`declaring_new_hints_ssr`) .. cmd:: Prenex Implicits {+ @ident } prenex implicits declaration (see :ref:`parametric_polymorphism_ssr`) Settings ~~~~~~~~ .. flag:: Debug Ssreflect *Developer only.* Print debug information on reflect. .. flag:: Debug SsrMatching *Developer only.* Print debug information on SSR matching. .. rubric:: Footnotes .. [#1] Unfortunately, even after a call to the Set Printing All command, some occurrences are still not displayed to the user, essentially the ones possibly hidden in the predicate of a dependent match structure. .. [#2] Thus scripts that depend on bound variable names, e.g., via intros or with, are inherently fragile. .. [#3] The name ``subnK`` reads as “right cancellation rule for nat subtraction”. .. [#4] Also, a slightly different variant may be used for the first :token:`d_item` of case and elim; see section :ref:`type_families_ssr`. .. [#5] Except /= does not expand the local definitions created by the |SSR| in tactical. .. [#6] |SSR| reserves all identifiers of the form “_x_”, which is used for such generated names. .. [#7] More precisely, it should have a quantified inductive type with a assumptions and m − a constructors. .. [#8] This is an implementation feature: there is no such obstruction in the metatheory .. [#9] The current state of the proof shall be displayed by the Show Proof command of Coq proof mode. .. [#10] A simple proof context entry is a naked identifier (i.e. not between parentheses) designating a context entry that is not a section variable. coq-8.15.0/doc/sphinx/proof-engine/tactics.rst000066400000000000000000001746231417001151100212250ustar00rootroot00000000000000.. _tactics: Tactics ======== Tactics specify how to transform the :term:`proof state` of an incomplete proof to eventually generate a complete proof. Proofs can be developed in two basic ways: In :gdef:`forward reasoning`, the proof begins by proving simple statements that are then combined to prove the theorem statement as the last step of the proof. With forward reasoning, for example, the proof of `A /\\ B` would begin with proofs of `A` and `B`, which are then used to prove `A /\\ B`. Forward reasoning is probably the most common approach in human-generated proofs. In :gdef:`backward reasoning`, the proof begins with the theorem statement as the goal, which is then gradually transformed until every subgoal generated along the way has been proven. In this case, the proof of `A /\\ B` begins with that formula as the goal. This can be transformed into two subgoals, `A` and `B`, followed by the proofs of `A` and `B`. Coq and its tactics use backward reasoning. A tactic may fully prove a goal, in which case the goal is removed from the proof state. More commonly, a tactic replaces a goal with one or more :term:`subgoals `. (We say that a tactic reduces a goal to its subgoals.) Most tactics require specific elements or preconditions to reduce a goal; they display error messages if they can't be applied to the goal. A few tactics, such as :tacn:`auto`, don't fail even if the proof state is unchanged. Goals are identified by number. The current goal is number 1. Tactics are applied to the current goal by default. (The default can be changed with the :opt:`Default Goal Selector` option.) They can be applied to another goal or to multiple goals with a :ref:`goal selector ` such as :n:`2: @tactic`. This chapter describes many of the most common built-in tactics. Built-in tactics can be combined to form tactic expressions, which are described in the :ref:`Ltac` chapter. Since tactic expressions can be used anywhere that a built-in tactic can be used, "tactic" may refer to both built-in tactics and tactic expressions. Common elements of tactics -------------------------- Reserved keywords ~~~~~~~~~~~~~~~~~ The tactics described in this chapter reserve the following keywords:: by using Thus, these keywords cannot be used as identifiers. It also declares the following character sequences as tokens:: ** [= |- .. _invocation-of-tactics: Invocation of tactics ~~~~~~~~~~~~~~~~~~~~~ A tactic is applied as an ordinary command. It may be preceded by a goal selector (see Section :ref:`goal-selectors`). If no selector is specified, the default selector is used. .. _tactic_invocation_grammar: .. prodn:: tactic_invocation ::= @toplevel_selector : @tactic. | @tactic. .. todo: fully describe selectors. At the moment, ltac has a fairly complete description .. todo: mention selectors can be applied to some commands, such as Check, Search, SearchPattern, SearchRewrite. .. opt:: Default Goal Selector "@toplevel_selector" :name: Default Goal Selector This :term:`option` controls the default selector, used when no selector is specified when applying a tactic. The initial value is 1, hence the tactics are, by default, applied to the first goal. Using value ``all`` will make it so that tactics are, by default, applied to every goal simultaneously. Then, to apply a tactic tac to the first goal only, you can write ``1:tac``. Using value ``!`` enforces that all tactics are used either on a single focused goal or with a local selector (’’strict focusing mode’’). Although other selectors are available, only ``all``, ``!`` or a single natural number are valid default goal selectors. .. _bindings: Bindings ~~~~~~~~ Tactics that take a term as an argument may also accept :token:`bindings` to instantiate some parameters of the term by name or position. The general form of a term with :token:`bindings` is :n:`@term__tac with @bindings` where :token:`bindings` can take two different forms: .. insertprodn bindings bindings .. prodn:: bindings ::= {+ ( {| @ident | @natural } := @term ) } | {+ @one_term } + In the first form, if an :token:`ident` is specified, it must be bound in the type of :n:`@term` and provides the tactic with an instance for the parameter of this name. If a :token:`natural` is specified, it refers to the ``n``-th non-dependent premise of :n:`@term__tac`. .. exn:: No such binder. :undocumented: + In the second form, the interpretation of the :token:`one_term`\s depend on which tactic they appear in. For :tacn:`induction`, :tacn:`destruct`, :tacn:`elim` and :tacn:`case`, the :token:`one_term`\s provide instances for all the dependent products in the type of :n:`@term__tac` while in the case of :tacn:`apply`, or of :tacn:`constructor` and its variants, only instances for the dependent products that are not bound in the conclusion of :n:`@term__tac` are required. .. exn:: Not the right number of missing arguments. :undocumented: .. _intropatterns: Intro patterns ~~~~~~~~~~~~~~ Intro patterns let you specify the name to assign to variables and hypotheses introduced by tactics. They also let you split an introduced hypothesis into multiple hypotheses or subgoals. Common tactics that accept intro patterns include :tacn:`assert`, :tacn:`intros` and :tacn:`destruct`. .. insertprodn intropattern equality_intropattern .. prodn:: intropattern ::= * | ** | @simple_intropattern simple_intropattern ::= @simple_intropattern_closed {* % @term0 } simple_intropattern_closed ::= @naming_intropattern | _ | @or_and_intropattern | @equality_intropattern naming_intropattern ::= @ident | ? | ?@ident or_and_intropattern ::= [ {*| {* @intropattern } } ] | ( {*, @simple_intropattern } ) | ( @simple_intropattern & {+& @simple_intropattern } ) equality_intropattern ::= -> | <- | [= {* @intropattern } ] Note that the intro pattern syntax varies between tactics. Most tactics use :n:`@simple_intropattern` in the grammar. :tacn:`destruct`, :tacn:`edestruct`, :tacn:`induction`, :tacn:`einduction`, :tacn:`case`, :tacn:`ecase` and the various :tacn:`inversion` tactics use :n:`@or_and_intropattern`, while :tacn:`intros` and :tacn:`eintros` use :n:`{* @intropattern }`. The :n:`eqn:` construct in various tactics uses :n:`@naming_intropattern`. **Naming patterns** Use these elementary patterns to specify a name: * :n:`@ident` — use the specified name * :n:`?` — let Coq generate a fresh name * :n:`?@ident` — generate a name that begins with :n:`@ident` * :n:`_` — discard the matched part (unless it is required for another hypothesis) * if a disjunction pattern omits a name, such as :g:`[|H2]`, Coq will choose a name **Splitting patterns** The most common splitting patterns are: * split a hypothesis in the form :n:`A /\ B` into two hypotheses :g:`H1: A` and :g:`H2: B` using the pattern :g:`(H1 & H2)` or :g:`(H1, H2)` or :g:`[H1 H2]`. :ref:`Example `. This also works on :n:`A <-> B`, which is just a notation representing :n:`(A -> B) /\ (B -> A)`. * split a hypothesis in the form :g:`A \/ B` into two subgoals using the pattern :g:`[H1|H2]`. The first subgoal will have the hypothesis :g:`H1: A` and the second subgoal will have the hypothesis :g:`H2: B`. :ref:`Example ` * split a hypothesis in either of the forms :g:`A /\ B` or :g:`A \/ B` using the pattern :g:`[]`. Patterns can be nested: :n:`[[Ha|Hb] H]` can be used to split :n:`(A \/ B) /\ C`. Note that there is no equivalent to intro patterns for goals. For a goal :g:`A /\ B`, use the :tacn:`split` tactic to replace the current goal with subgoals :g:`A` and :g:`B`. For a goal :g:`A \/ B`, use :tacn:`left` to replace the current goal with :g:`A`, or :tacn:`right` to replace the current goal with :g:`B`. * :n:`( {+, @simple_intropattern}` ) — matches a product over an inductive type with a :ref:`single constructor `. If the number of patterns equals the number of constructor arguments, then it applies the patterns only to the arguments, and :n:`( {+, @simple_intropattern} )` is equivalent to :n:`[{+ @simple_intropattern}]`. If the number of patterns equals the number of constructor arguments plus the number of :n:`let-ins`, the patterns are applied to the arguments and :n:`let-in` variables. * :n:`( {+& @simple_intropattern} )` — matches a right-hand nested term that consists of one or more nested binary inductive types such as :g:`a1 OP1 a2 OP2 ...` (where the :g:`OPn` are right-associative). (If the :g:`OPn` are left-associative, additional parentheses will be needed to make the term right-hand nested, such as :g:`a1 OP1 (a2 OP2 ...)`.) The splitting pattern can have more than 2 names, for example :g:`(H1 & H2 & H3)` matches :g:`A /\ B /\ C`. The inductive types must have a :ref:`single constructor with two parameters `. :ref:`Example ` * :n:`[ {+| {* @intropattern } } ]` — splits an inductive type that has :ref:`multiple constructors ` such as :n:`A \/ B` into multiple subgoals. The number of :token:`intropattern`\s must be the same as the number of constructors for the matched part. * :n:`[ {+ @intropattern} ]` — splits an inductive type that has a :ref:`single constructor with multiple parameters ` such as :n:`A /\ B` into multiple hypotheses. Use :n:`[H1 [H2 H3]]` to match :g:`A /\ B /\ C`. * :n:`[]` — splits an inductive type: If the inductive type has multiple constructors, such as :n:`A \/ B`, create one subgoal for each constructor. If the inductive type has a single constructor with multiple parameters, such as :n:`A /\ B`, split it into multiple hypotheses. **Equality patterns** These patterns can be used when the hypothesis is an equality: * :n:`->` — replaces the right-hand side of the hypothesis with the left-hand side of the hypothesis in the conclusion of the goal; the hypothesis is cleared; if the left-hand side of the hypothesis is a variable, it is substituted everywhere in the context and the variable is removed. :ref:`Example ` * :n:`<-` — similar to :n:`->`, but replaces the left-hand side of the hypothesis with the right-hand side of the hypothesis. * :n:`[= {*, @intropattern} ]` — If the product is over an equality type, applies either :tacn:`injection` or :tacn:`discriminate`. If :tacn:`injection` is applicable, the intropattern is used on the hypotheses generated by :tacn:`injection`. If the number of patterns is smaller than the number of hypotheses generated, the pattern :n:`?` is used to complete the list. :ref:`Example ` **Other patterns** * :n:`*` — introduces one or more quantified variables from the result until there are no more quantified variables. :ref:`Example ` * :n:`**` — introduces one or more quantified variables or hypotheses from the result until there are no more quantified variables or implications (:g:`->`). :g:`intros **` is equivalent to :g:`intros`. :ref:`Example ` * :n:`@simple_intropattern_closed {* % @term}` — first applies each of the terms with the :tacn:`apply … in` tactic on the hypothesis to be introduced, then it uses :n:`@simple_intropattern_closed`. :ref:`Example ` .. _intropattern_cons_note: .. note:: :n:`A \/ B` and :n:`A /\ B` use infix notation to refer to the inductive types :n:`or` and :n:`and`. :n:`or` has multiple constructors (:n:`or_introl` and :n:`or_intror`), while :n:`and` has a single constructor (:n:`conj`) with multiple parameters (:n:`A` and :n:`B`). These are defined in ``theories/Init/Logic.v``. The "where" clauses define the infix notation for "or" and "and". .. coqdoc:: Inductive or (A B:Prop) : Prop := | or_introl : A -> A \/ B | or_intror : B -> A \/ B where "A \/ B" := (or A B) : type_scope. Inductive and (A B:Prop) : Prop := conj : A -> B -> A /\ B where "A /\ B" := (and A B) : type_scope. .. note:: :n:`intros {+ p}` is not always equivalent to :n:`intros p; ... ; intros p` if some of the :n:`p` are :g:`_`. In the first form, all erasures are done at once, while they're done sequentially for each tactic in the second form. If the second matched term depends on the first matched term and the pattern for both is :g:`_` (i.e., both will be erased), the first :n:`intros` in the second form will fail because the second matched term still has the dependency on the first. Examples: .. _intropattern_conj_ex: .. example:: intro pattern for /\\ .. coqtop:: reset none Goal forall (A: Prop) (B: Prop), (A /\ B) -> True. .. coqtop:: out intros. .. coqtop:: all destruct H as (HA & HB). .. _intropattern_disj_ex: .. example:: intro pattern for \\/ .. coqtop:: reset none Goal forall (A: Prop) (B: Prop), (A \/ B) -> True. .. coqtop:: out intros. .. coqtop:: all destruct H as [HA|HB]. all: swap 1 2. .. _intropattern_rarrow_ex: .. example:: -> intro pattern .. coqtop:: reset none Goal forall (x:nat) (y:nat) (z:nat), (x = y) -> (y = z) -> (x = z). .. coqtop:: out intros * H. .. coqtop:: all intros ->. .. _intropattern_inj_discr_ex: .. example:: [=] intro pattern The first :n:`intros [=]` uses :tacn:`injection` to strip :n:`(S ...)` from both sides of the matched equality. The second uses :tacn:`discriminate` on the contradiction :n:`1 = 2` (internally represented as :n:`(S O) = (S (S O))`) to complete the goal. .. coqtop:: reset none Goal forall (n m:nat), (S n) = (S m) -> (S O)=(S (S O)) -> False. .. coqtop:: out intros *. .. coqtop:: all intros [= H]. .. coqtop:: all intros [=]. .. _intropattern_ampersand_ex: .. example:: (A & B & ...) intro pattern .. coqtop:: reset none Parameters (A : Prop) (B: nat -> Prop) (C: Prop). .. coqtop:: out Goal A /\ (exists x:nat, B x /\ C) -> True. .. coqtop:: all intros (a & x & b & c). .. _intropattern_star_ex: .. example:: * intro pattern .. coqtop:: reset out Goal forall (A: Prop) (B: Prop), A -> B. .. coqtop:: all intros *. .. _intropattern_2stars_ex: .. example:: ** pattern ("intros \**" is equivalent to "intros") .. coqtop:: reset out Goal forall (A: Prop) (B: Prop), A -> B. .. coqtop:: all intros **. .. example:: compound intro pattern .. coqtop:: reset out Goal forall A B C:Prop, A \/ B /\ C -> (A -> C) -> C. .. coqtop:: all intros * [a | (_,c)] f. all: swap 1 2. .. _intropattern_injection_ex: .. example:: combined intro pattern using [=] -> and % .. coqtop:: reset none Require Import Coq.Lists.List. Section IntroPatterns. Variables (A : Type) (xs ys : list A). .. coqtop:: out Example ThreeIntroPatternsCombined : S (length ys) = 1 -> xs ++ ys = xs. .. coqtop:: all intros [=->%length_zero_iff_nil]. * `intros` would add :g:`H : S (length ys) = 1` * `intros [=]` would additionally apply :tacn:`injection` to :g:`H` to yield :g:`H0 : length ys = 0` * `intros [=->%length_zero_iff_nil]` applies the theorem, making H the equality :g:`l=nil`, which is then applied as for :g:`->`. .. coqdoc:: Theorem length_zero_iff_nil (l : list A): length l = 0 <-> l=nil. The example is based on `Tej Chajed's coq-tricks `_ .. _occurrenceclauses: Occurrence clauses ~~~~~~~~~~~~~~~~~~ An :gdef:`occurrence` is a subterm of a goal or hypothesis that matches a pattern provided by a tactic. Occurrence clauses select a subset of the ocurrences in a goal and/or in one or more of its hypotheses. .. insertprodn occurrences concl_occs .. prodn:: occurrences ::= at @occs_nums | in @goal_occurrences simple_occurrences ::= @occurrences occs_nums ::= {? - } {+ @nat_or_var } nat_or_var ::= {| @natural | @ident } goal_occurrences ::= {+, @hyp_occs } {? %|- {? @concl_occs } } | * %|- {? @concl_occs } | %|- {? @concl_occs } | {? @concl_occs } hyp_occs ::= @hypident {? at @occs_nums } hypident ::= @ident | ( type of @ident ) | ( value of @ident ) concl_occs ::= * {? at @occs_nums } :n:`@occurrences` The first form of :token:`occurrences` selects occurrences in the conclusion of the goal. The second form can select occurrences in the goal conclusion and in one or more hypotheses. :n:`@simple_occurrences` A semantically restricted form of :n:`@occurrences` that doesn't allow the `at` clause anywhere within it. :n:`{? - } {+ @nat_or_var }` Selects the specified occurrences within a single goal or hypothesis. Occurrences are numbered starting with 1 following a depth-first traversal of the term's expression, including occurrences in :ref:`implicit arguments ` and :ref:`coercions ` that are not displayed by default. (Set the :flag:`Printing All` flag to show those in the printed term.) For example, when matching the pattern `_ + _` in the term `(a + b) + c`, occurrence 1 is `(...) + c` and occurrence 2 is `(a + b)`. When matching that pattern with term `a + (b + c)`, occurrence 1 is `a + (...)` and occurrence 2 is `b + c`. Specifying `-` includes all occurrences *except* the ones listed. :n:`{*, @hyp_occs } {? %|- {? @concl_occs } }` Selects occurrences in the specified hypotheses and the specified occurrences in the conclusion. :n:`* %|- {? @concl_occs }` Selects all occurrences in all hypotheses and the specified occurrences in the conclusion. :n:`%|- {? @concl_occs }` Selects the specified occurrences in the conclusion. :n:`@goal_occurrences ::= {? @concl_occs }` Selects all occurrences in all hypotheses and in the specified occurrences in the conclusion. :n:`@hypident {? at @occs_nums }` Omiting :token:`occs_nums` selects all occurrences within the hypothesis. :n:`@hypident ::= @ident` Selects the hypothesis named :token:`ident`. :n:`( type of @ident )` Selects the type part of the named hypothesis (e.g. `: nat`). :n:`( value of @ident )` Selects the value part of the named hypothesis (e.g. `:= 1`). :n:`@concl_occs ::= * {? at @occs_nums }` Selects occurrences in the conclusion. '*' by itself selects all occurrences. :n:`@occs_nums` selects the specified occurrences. Use `in *` to select all occurrences in all hypotheses and the conclusion, which is equivalent to `in * |- *`. Use `* |-` to select all occurrences in all hypotheses. Tactics that select a specific hypothesis H to apply to other hypotheses, such as :tacn:`rewrite` `H in * |-`, won't apply H to itself. If multiple occurrences are given, such as in :tacn:`rewrite` `H at 1 2 3`, the tactic must match at least one occurrence in order to succeed. The tactic will fail if no occurrences match. Occurrence numbers that are out of range (e.g. `at 1 3` when there are only 2 occurrences in the hypothesis or conclusion) are ignored. .. todo: remove last sentence above and add "Invalid occurrence number @natural" exn for 8.14 per #13568. Tactics that use occurrence clauses include :tacn:`set`, :tacn:`remember`, :tacn:`induction` and :tacn:`destruct`. .. seealso:: :ref:`Managingthelocalcontext`, :ref:`caseanalysisandinduction`, :ref:`printing_constructions_full`. .. _applyingtheorems: Applying theorems --------------------- .. tacn:: exact @term :name: exact This tactic applies to any goal. It gives directly the exact proof term of the goal. Let ``T`` be our goal, let ``p`` be a term of type ``U`` then ``exact p`` succeeds iff ``T`` and ``U`` are convertible (see :ref:`Conversion-rules`). .. exn:: Not an exact proof. :undocumented: .. tacv:: eexact @term. :name: eexact This tactic behaves like :tacn:`exact` but is able to handle terms and goals with existential variables. .. tacn:: assumption :name: assumption This tactic looks in the local context for a hypothesis whose type is convertible to the goal. If it is the case, the subgoal is proved. Otherwise, it fails. .. exn:: No such assumption. :undocumented: .. tacv:: eassumption :name: eassumption This tactic behaves like :tacn:`assumption` but is able to handle goals with existential variables. .. tacn:: refine @term :name: refine This tactic applies to any goal. It behaves like :tacn:`exact` with a big difference: the user can leave some holes (denoted by ``_`` or :n:`(_ : @type)`) in the term. :tacn:`refine` will generate as many subgoals as there are remaining holes in the elaborated term. The type of holes must be either synthesized by the system or declared by an explicit cast like ``(_ : nat -> Prop)``. Any subgoal that occurs in other subgoals is automatically shelved, as if calling :tacn:`shelve_unifiable`. The produced subgoals (shelved or not) are *not* candidates for typeclass resolution, even if they have a type-class type as conclusion, letting the user control when and how typeclass resolution is launched on them. This low-level tactic can be useful to advanced users. .. example:: .. coqtop:: reset all Inductive Option : Set := | Fail : Option | Ok : bool -> Option. Definition get : forall x:Option, x <> Fail -> bool. refine (fun x:Option => match x return x <> Fail -> bool with | Fail => _ | Ok b => fun _ => b end). intros; absurd (Fail = Fail); trivial. Defined. .. exn:: Invalid argument. The tactic :tacn:`refine` does not know what to do with the term you gave. .. exn:: Refine passed ill-formed term. The term you gave is not a valid proof (not easy to debug in general). This message may also occur in higher-level tactics that call :tacn:`refine` internally. .. exn:: Cannot infer a term for this placeholder. :name: Cannot infer a term for this placeholder. (refine) There is a hole in the term you gave whose type cannot be inferred. Put a cast around it. .. tacv:: simple refine @term :name: simple refine This tactic behaves like refine, but it does not shelve any subgoal. It does not perform any beta-reduction either. .. tacv:: notypeclasses refine @term :name: notypeclasses refine This tactic behaves like :tacn:`refine` except it performs type checking without resolution of typeclasses. .. tacv:: simple notypeclasses refine @term :name: simple notypeclasses refine This tactic behaves like the combination of :tacn:`simple refine` and :tacn:`notypeclasses refine`: it performs type checking without resolution of typeclasses, does not perform beta reductions or shelve the subgoals. :opt:`Debug` ``"unification"`` enables printing traces of unification steps used during elaboration/typechecking and the :tacn:`refine` tactic. ``"ho-unification"`` prints information about higher order heuristics. .. tacn:: apply @term :name: apply This tactic applies to any goal. The argument term is a term well-formed in the local context. The tactic :tacn:`apply` tries to match the current goal against the conclusion of the type of :token:`term`. If it succeeds, then the tactic returns as many subgoals as the number of non-dependent premises of the type of term. If the conclusion of the type of :token:`term` does not match the goal *and* the conclusion is an inductive type isomorphic to a tuple type, then each component of the tuple is recursively matched to the goal in the left-to-right order. The tactic :tacn:`apply` relies on first-order unification with dependent types unless the conclusion of the type of :token:`term` is of the form :n:`P (t__1 ... t__n)` with ``P`` to be instantiated. In the latter case, the behavior depends on the form of the goal. If the goal is of the form :n:`(fun x => Q) u__1 ... u__n` and the :n:`t__i` and :n:`u__i` unify, then :g:`P` is taken to be :g:`(fun x => Q)`. Otherwise, :tacn:`apply` tries to define :g:`P` by abstracting over :g:`t_1 ... t__n` in the goal. See :tacn:`pattern` to transform the goal so that it gets the form :n:`(fun x => Q) u__1 ... u__n`. .. exn:: Unable to unify @term with @term. The :tacn:`apply` tactic failed to match the conclusion of :token:`term` and the current goal. You can help the :tacn:`apply` tactic by transforming your goal with the :tacn:`change` or :tacn:`pattern` tactics. .. exn:: Unable to find an instance for the variables {+ @ident}. This occurs when some instantiations of the premises of :token:`term` are not deducible from the unification. This is the case, for instance, when you want to apply a transitivity property. In this case, you have to use one of the variants below: .. tacv:: apply @term with {+ @term} Provides apply with explicit instantiations for all dependent premises of the type of term that do not occur in the conclusion and consequently cannot be found by unification. Notice that the collection :n:`{+ @term}` must be given according to the order of these dependent premises of the type of term. .. exn:: Not the right number of missing arguments. :undocumented: .. tacv:: apply @term with @bindings This also provides apply with values for instantiating premises. Here, variables are referred by names and non-dependent products by increasing numbers (see :ref:`bindings`). .. flag:: Apply With Renaming When on, this flag causes the names in the :n:`@term`'s type to be renamed for uniqueness. By default no renaming is done. .. deprecated:: 8.15 This flag is provided for compatibility with versions prior to 8.15. .. tacv:: apply {+, @term} This is a shortcut for :n:`apply @term__1; [.. | ... ; [ .. | apply @term__n] ... ]`, i.e. for the successive applications of :n:`@term`:sub:`i+1` on the last subgoal generated by :n:`apply @term__i` , starting from the application of :n:`@term__1`. .. tacv:: eapply @term :name: eapply The tactic :tacn:`eapply` behaves like :tacn:`apply` but it does not fail when no instantiations are deducible for some variables in the premises. Rather, it turns these variables into existential variables which are variables still to instantiate (see :ref:`Existential-Variables`). The instantiation is intended to be found later in the proof. .. tacv:: rapply @term :name: rapply The tactic :tacn:`rapply` behaves like :tacn:`eapply` but it uses the proof engine of :tacn:`refine` for dealing with existential variables, holes, and conversion problems. This may result in slightly different behavior regarding which conversion problems are solvable. However, like :tacn:`apply` but unlike :tacn:`eapply`, :tacn:`rapply` will fail if there are any holes which remain in :n:`@term` itself after typechecking and typeclass resolution but before unification with the goal. More technically, :n:`@term` is first parsed as a :production:`constr` rather than as a :production:`uconstr` or :production:`open_constr` before being applied to the goal. Note that :tacn:`rapply` prefers to instantiate as many hypotheses of :n:`@term` as possible. As a result, if it is possible to apply :n:`@term` to arbitrarily many arguments without getting a type error, :tacn:`rapply` will loop. Note that you need to :n:`Require Import Coq.Program.Tactics` to make use of :tacn:`rapply`. .. tacv:: simple apply @term. This behaves like :tacn:`apply` but it reasons modulo conversion only on subterms that contain no variables to instantiate. For instance, the following example does not succeed because it would require the conversion of ``id ?foo`` and :g:`O`. .. _simple_apply_ex: .. example:: .. coqtop:: all Definition id (x : nat) := x. Parameter H : forall x y, id x = y. Goal O = O. Fail simple apply H. Because it reasons modulo a limited amount of conversion, :tacn:`simple apply` fails quicker than :tacn:`apply` and it is then well-suited for uses in user-defined tactics that backtrack often. Moreover, it does not traverse tuples as :tacn:`apply` does. .. tacv:: {? simple} apply {+, @term {? with @bindings}} {? simple} eapply {+, @term {? with @bindings}} :name: simple apply; simple eapply This summarizes the different syntaxes for :tacn:`apply` and :tacn:`eapply`. .. tacv:: lapply @term :name: lapply This tactic applies to any goal, say :g:`G`. The argument term has to be well-formed in the current context, its type being reducible to a non-dependent product :g:`A -> B` with :g:`B` possibly containing products. Then it generates two subgoals :g:`B->G` and :g:`A`. Applying ``lapply H`` (where :g:`H` has type :g:`A->B` and :g:`B` does not start with a product) does the same as giving the sequence ``cut B. 2:apply H.`` where ``cut`` is described below. .. example:: Assume we have a transitive relation ``R`` on ``nat``: .. coqtop:: reset in Parameter R : nat -> nat -> Prop. Axiom Rtrans : forall x y z:nat, R x y -> R y z -> R x z. Parameters n m p : nat. Axiom Rnm : R n m. Axiom Rmp : R m p. Consider the goal ``(R n p)`` provable using the transitivity of ``R``: .. coqtop:: in Goal R n p. The direct application of ``Rtrans`` with ``apply`` fails because no value for ``y`` in ``Rtrans`` is found by ``apply``: .. coqtop:: all fail apply Rtrans. A solution is to ``apply (Rtrans n m p)`` or ``(Rtrans n m)``. .. coqtop:: all apply (Rtrans n m p). Note that ``n`` can be inferred from the goal, so the following would work too. .. coqtop:: in restart apply (Rtrans _ m). More elegantly, ``apply Rtrans with (y:=m)`` allows only mentioning the unknown m: .. coqtop:: in restart apply Rtrans with (y := m). Another solution is to mention the proof of ``(R x y)`` in ``Rtrans`` .. coqtop:: all restart apply Rtrans with (1 := Rnm). ... or the proof of ``(R y z)``. .. coqtop:: all restart apply Rtrans with (2 := Rmp). On the opposite, one can use ``eapply`` which postpones the problem of finding ``m``. Then one can apply the hypotheses ``Rnm`` and ``Rmp``. This instantiates the existential variable and completes the proof. .. coqtop:: all restart abort eapply Rtrans. apply Rnm. apply Rmp. .. note:: When the conclusion of the type of the term to ``apply`` is an inductive type isomorphic to a tuple type and ``apply`` looks recursively whether a component of the tuple matches the goal, it excludes components whose statement would result in applying an universal lemma of the form ``forall A, ... -> A``. Excluding this kind of lemma can be avoided by setting the following flag: .. flag:: Universal Lemma Under Conjunction This :term:`flag`, which preserves compatibility with versions of Coq prior to 8.4 is also available for :n:`apply @term in @ident` (see :tacn:`apply … in`). .. deprecated:: 8.15 .. tacn:: apply @term in @ident :name: apply … in This tactic applies to any goal. The argument :token:`term` is a term well-formed in the local context and the argument :token:`ident` is an hypothesis of the context. The tactic :n:`apply @term in @ident` tries to match the conclusion of the type of :token:`ident` against a non-dependent premise of the type of :token:`term`, trying them from right to left. If it succeeds, the statement of hypothesis :token:`ident` is replaced by the conclusion of the type of :token:`term`. The tactic also returns as many subgoals as the number of other non-dependent premises in the type of :token:`term` and of the non-dependent premises of the type of :token:`ident`. If the conclusion of the type of :token:`term` does not match the goal *and* the conclusion is an inductive type isomorphic to a tuple type, then the tuple is (recursively) decomposed and the first component of the tuple of which a non-dependent premise matches the conclusion of the type of :token:`ident`. Tuples are decomposed in a width-first left-to-right order (for instance if the type of :g:`H1` is :g:`A <-> B` and the type of :g:`H2` is :g:`A` then :g:`apply H1 in H2` transforms the type of :g:`H2` into :g:`B`). The tactic :tacn:`apply` relies on first-order pattern matching with dependent types. .. exn:: Statement without assumptions. This happens if the type of :token:`term` has no non-dependent premise. .. exn:: Unable to apply. This happens if the conclusion of :token:`ident` does not match any of the non-dependent premises of the type of :token:`term`. .. tacv:: apply {+, @term} in {+, @ident} This applies each :token:`term` in sequence in each hypothesis :token:`ident`. .. tacv:: apply {+, @term with @bindings} in {+, @ident} This does the same but uses the bindings to instantiate parameters of :token:`term` (see :ref:`bindings`). .. tacv:: eapply {+, @term {? with @bindings } } in {+, @ident} This works as :tacn:`apply … in` but turns unresolved bindings into existential variables, if any, instead of failing. .. tacv:: apply {+, @term {? with @bindings } } in {+, @ident {? as @simple_intropattern}} :name: apply … in … as This works as :tacn:`apply … in` but applying an associated :token:`simple_intropattern` to each hypothesis :token:`ident` that comes with such clause. .. tacv:: simple apply @term in {+, @ident} This behaves like :tacn:`apply … in` but it reasons modulo conversion only on subterms that contain no variables to instantiate and does not traverse tuples. See :ref:`the corresponding example `. .. tacv:: {? simple} apply {+, @term {? with @bindings}} in {+, @ident {? as @simple_intropattern}} {? simple} eapply {+, @term {? with @bindings}} in {+, @ident {? as @simple_intropattern}} This summarizes the different syntactic variants of :n:`apply @term in {+, @ident}` and :n:`eapply @term in {+, @ident}`. :opt:`Debug` ``"tactic-unification"`` enables printing traces of unification steps in tactic unification. Tactic unification is used in tactics such as :tacn:`apply` and :tacn:`rewrite`. .. _managingthelocalcontext: Managing the local context ------------------------------ .. tacn:: intro :name: intro This tactic applies to a goal that is either a product or starts with a let-binder. If the goal is a product, the tactic implements the "Lam" rule given in :ref:`Typing-rules` [1]_. If the goal starts with a let-binder, then the tactic implements a mix of the "Let" and "Conv". If the current goal is a dependent product :g:`forall x:T, U` (resp :g:`let x:=t in U`) then :tacn:`intro` puts :g:`x:T` (resp :g:`x:=t`) in the local context. The new subgoal is :g:`U`. If the goal is a non-dependent product :math:`T \rightarrow U`, then it puts in the local context either :g:`Hn:T` (if :g:`T` is of type :g:`Set` or :g:`Prop`) or :g:`Xn:T` (if the type of :g:`T` is :g:`Type`). The optional index ``n`` is such that ``Hn`` or ``Xn`` is a fresh identifier. In both cases, the new subgoal is :g:`U`. If the goal is an existential variable, :tacn:`intro` forces the resolution of the existential variable into a dependent product :math:`\forall`\ :g:`x:?X, ?Y`, puts :g:`x:?X` in the local context and leaves :g:`?Y` as a new subgoal allowed to depend on :g:`x`. The tactic :tacn:`intro` applies the tactic :tacn:`hnf` until :tacn:`intro` can be applied or the goal is not head-reducible. .. exn:: No product even after head-reduction. :undocumented: .. tacv:: intro @ident This applies :tacn:`intro` but forces :token:`ident` to be the name of the introduced hypothesis. .. exn:: @ident is already used. :undocumented: .. note:: If a name used by intro hides the base name of a global constant then the latter can still be referred to by a qualified name (see :ref:`Qualified-names`). .. tacv:: intros :name: intros This repeats :tacn:`intro` until it meets the head-constant. It never reduces head-constants and it never fails. .. tacv:: intros {+ @ident}. This is equivalent to the composed tactic :n:`intro @ident; ... ; intro @ident`. .. tacv:: intros until @ident This repeats intro until it meets a premise of the goal having the form :n:`(@ident : @type)` and discharges the variable named :token:`ident` of the current goal. .. exn:: No such hypothesis in current goal. :undocumented: .. tacv:: intros until @natural This repeats :tacn:`intro` until the :token:`natural`\-th non-dependent product. .. example:: On the subgoal :g:`forall x y : nat, x = y -> y = x` the tactic :n:`intros until 1` is equivalent to :n:`intros x y H`, as :g:`x = y -> y = x` is the first non-dependent product. On the subgoal :g:`forall x y z : nat, x = y -> y = x` the tactic :n:`intros until 1` is equivalent to :n:`intros x y z` as the product on :g:`z` can be rewritten as a non-dependent product: :g:`forall x y : nat, nat -> x = y -> y = x`. .. exn:: No such hypothesis in current goal. This happens when :token:`natural` is 0 or is greater than the number of non-dependent products of the goal. .. tacv:: intro {? @ident__1 } after @ident__2 intro {? @ident__1 } before @ident__2 intro {? @ident__1 } at top intro {? @ident__1 } at bottom These tactics apply :n:`intro {? @ident__1}` and move the freshly introduced hypothesis respectively after the hypothesis :n:`@ident__2`, before the hypothesis :n:`@ident__2`, at the top of the local context, or at the bottom of the local context. All hypotheses on which the new hypothesis depends are moved too so as to respect the order of dependencies between hypotheses. It is equivalent to :n:`intro {? @ident__1 }` followed by the appropriate call to :tacn:`move`, :tacn:`move … before …`, :tacn:`move … at top`, or :tacn:`move … at bottom`. .. note:: :n:`intro at bottom` is a synonym for :n:`intro` with no argument. .. exn:: No such hypothesis: @ident. :undocumented: .. tacn:: intros {* @intropattern } :name: intros … Introduces one or more variables or hypotheses from the goal by matching the intro patterns. See the description in :ref:`intropatterns`. .. tacn:: eintros {* @intropattern } :name: eintros Works just like :tacn:`intros …` except that it creates existential variables for any unresolved variables rather than failing. .. tacn:: clear @ident :name: clear This tactic erases the hypothesis named :n:`@ident` in the local context of the current goal. As a consequence, :n:`@ident` is no more displayed and no more usable in the proof development. .. exn:: No such hypothesis. :undocumented: .. exn:: @ident is used in the conclusion. :undocumented: .. exn:: @ident is used in the hypothesis @ident. :undocumented: .. tacv:: clear {+ @ident} This is equivalent to :n:`clear @ident. ... clear @ident.` .. tacv:: clear - {+ @ident} This variant clears all the hypotheses except the ones depending in the hypotheses named :n:`{+ @ident}` and in the goal. .. tacv:: clear This variants clears all the hypotheses except the ones the goal depends on. .. tacv:: clear dependent @ident This clears the hypothesis :token:`ident` and all the hypotheses that depend on it. .. tacv:: clearbody {+ @ident} :name: clearbody This tactic expects :n:`{+ @ident}` to be local definitions and clears their respective bodies. In other words, it turns the given definitions into assumptions. .. exn:: @ident is not a local definition. :undocumented: .. tacn:: revert {+ @ident} :name: revert This applies to any goal with variables :n:`{+ @ident}`. It moves the hypotheses (possibly defined) to the goal, if this respects dependencies. This tactic is the inverse of :tacn:`intro`. .. exn:: No such hypothesis. :undocumented: .. exn:: @ident__1 is used in the hypothesis @ident__2. :undocumented: .. tacv:: revert dependent @ident :name: revert dependent This moves to the goal the hypothesis :token:`ident` and all the hypotheses that depend on it. .. tacn:: move @ident__1 after @ident__2 This moves the hypothesis named :n:`@ident__1` in the local context after the hypothesis named :n:`@ident__2`, where “after” is in reference to the direction of the move. The proof term is not changed. If :n:`@ident__1` comes before :n:`@ident__2` in the order of dependencies, then all the hypotheses between :n:`@ident__1` and :n:`@ident__2` that (possibly indirectly) depend on :n:`@ident__1` are moved too, and all of them are thus moved after :n:`@ident__2` in the order of dependencies. If :n:`@ident__1` comes after :n:`@ident__2` in the order of dependencies, then all the hypotheses between :n:`@ident__1` and :n:`@ident__2` that (possibly indirectly) occur in the type of :n:`@ident__1` are moved too, and all of them are thus moved before :n:`@ident__2` in the order of dependencies. .. tacv:: move @ident__1 before @ident__2 :name: move … before … This moves :n:`@ident__1` towards and just before the hypothesis named :n:`@ident__2`. As for :tacn:`move`, dependencies over :n:`@ident__1` (when :n:`@ident__1` comes before :n:`@ident__2` in the order of dependencies) or in the type of :n:`@ident__1` (when :n:`@ident__1` comes after :n:`@ident__2` in the order of dependencies) are moved too. .. tacv:: move @ident at top :name: move … at top This moves :token:`ident` at the top of the local context (at the beginning of the context). .. tacv:: move @ident at bottom :name: move … at bottom This moves :token:`ident` at the bottom of the local context (at the end of the context). .. exn:: No such hypothesis. :undocumented: .. exn:: Cannot move @ident__1 after @ident__2: it occurs in the type of @ident__2. :undocumented: .. exn:: Cannot move @ident__1 after @ident__2: it depends on @ident__2. :undocumented: .. example:: .. coqtop:: reset all Goal forall x :nat, x = 0 -> forall z y:nat, y=y-> 0=x. intros x H z y H0. move x after H0. Undo. move x before H0. Undo. move H0 after H. Undo. move H0 before H. .. tacn:: rename @ident__1 into @ident__2 :name: rename This renames hypothesis :n:`@ident__1` into :n:`@ident__2` in the current context. The name of the hypothesis in the proof-term, however, is left unchanged. .. tacv:: rename {+, @ident__i into @ident__j} This renames the variables :n:`@ident__i` into :n:`@ident__j` in parallel. In particular, the target identifiers may contain identifiers that exist in the source context, as long as the latter are also renamed by the same tactic. .. exn:: No such hypothesis. :undocumented: .. exn:: @ident is already used. :undocumented: .. tacn:: set (@ident := @term) :name: set This replaces :token:`term` by :token:`ident` in the conclusion of the current goal and adds the new definition :n:`@ident := @term` to the local context. If :token:`term` has holes (i.e. subexpressions of the form “`_`”), the tactic first checks that all subterms matching the pattern are compatible before doing the replacement using the leftmost subterm matching the pattern. .. exn:: The variable @ident is already defined. :undocumented: .. tacv:: set (@ident := @term) in @goal_occurrences This notation allows specifying which occurrences of :token:`term` have to be substituted in the context. The :n:`in @goal_occurrences` clause is an occurrence clause whose syntax and behavior are described in :ref:`goal occurrences `. .. tacv:: set (@ident {* @binder } := @term) {? in @goal_occurrences } This is equivalent to :n:`set (@ident := fun {* @binder } => @term) {? in @goal_occurrences }`. .. tacv:: set @term {? in @goal_occurrences } This behaves as :n:`set (@ident := @term) {? in @goal_occurrences }` but :token:`ident` is generated by Coq. .. tacv:: eset (@ident {* @binder } := @term) {? in @goal_occurrences } eset @term {? in @goal_occurrences } :name: eset; _ While the different variants of :tacn:`set` expect that no existential variables are generated by the tactic, :tacn:`eset` removes this constraint. In practice, this is relevant only when :tacn:`eset` is used as a synonym of :tacn:`epose`, i.e. when the :token:`term` does not occur in the goal. .. tacn:: remember @term as @ident__1 {? eqn:@naming_intropattern } :name: remember This behaves as :n:`set (@ident := @term) in *`, using a logical (Leibniz’s) equality instead of a local definition. Use :n:`@naming_intropattern` to name or split up the new equation. .. tacv:: remember @term as @ident__1 {? eqn:@naming_intropattern } in @goal_occurrences This is a more general form of :tacn:`remember` that remembers the occurrences of :token:`term` specified by an occurrence set. .. tacv:: eremember @term as @ident__1 {? eqn:@naming_intropattern } {? in @goal_occurrences } :name: eremember While the different variants of :tacn:`remember` expect that no existential variables are generated by the tactic, :tacn:`eremember` removes this constraint. .. tacn:: pose (@ident := @term) :name: pose This adds the local definition :n:`@ident := @term` to the current context without performing any replacement in the goal or in the hypotheses. It is equivalent to :n:`set (@ident := @term) in |-`. .. tacv:: pose (@ident {* @binder } := @term) This is equivalent to :n:`pose (@ident := fun {* @binder } => @term)`. .. tacv:: pose @term This behaves as :n:`pose (@ident := @term)` but :token:`ident` is generated by Coq. .. tacv:: epose (@ident {* @binder } := @term) epose @term :name: epose; _ While the different variants of :tacn:`pose` expect that no existential variables are generated by the tactic, :tacn:`epose` removes this constraint. .. tacn:: decompose [{+ @qualid}] @term :name: decompose This tactic recursively decomposes a complex proposition in order to obtain atomic ones. .. example:: .. coqtop:: reset all Goal forall A B C:Prop, A /\ B /\ C \/ B /\ C \/ C /\ A -> C. intros A B C H; decompose [and or] H. all: assumption. Qed. .. note:: :tacn:`decompose` does not work on right-hand sides of implications or products. .. tacv:: decompose sum @term This decomposes sum types (like :g:`or`). .. tacv:: decompose record @term This decomposes record types (inductive types with one constructor, like :g:`and` and :g:`exists` and those defined with the :cmd:`Record` command. .. _controllingtheproofflow: Controlling the proof flow ------------------------------ .. tacn:: assert (@ident : @type) :name: assert This tactic applies to any goal. :n:`assert (H : U)` adds a new hypothesis of name :n:`H` asserting :g:`U` to the current goal and opens a new subgoal :g:`U` [2]_. The subgoal :g:`U` comes first in the list of subgoals remaining to prove. .. exn:: Not a proposition or a type. Arises when the argument :token:`type` is neither of type :g:`Prop`, :g:`Set` nor :g:`Type`. .. tacv:: assert @type This behaves as :n:`assert (@ident : @type)` but :n:`@ident` is generated by Coq. .. tacv:: assert @type by @tactic This tactic behaves like :tacn:`assert` but applies tactic to solve the subgoals generated by assert. .. exn:: Proof is not complete. :name: Proof is not complete. (assert) :undocumented: .. tacv:: assert @type as @simple_intropattern If :n:`simple_intropattern` is an intro pattern (see :ref:`intropatterns`), the hypothesis is named after this introduction pattern (in particular, if :n:`simple_intropattern` is :n:`@ident`, the tactic behaves like :n:`assert (@ident : @type)`). If :n:`simple_intropattern` is an action introduction pattern, the tactic behaves like :n:`assert @type` followed by the action done by this introduction pattern. .. tacv:: assert @type as @simple_intropattern by @tactic This combines the two previous variants of :tacn:`assert`. .. tacv:: assert (@ident := @term) This behaves as :n:`assert (@ident : @type) by exact @term` where :token:`type` is the type of :token:`term`. This is equivalent to using :tacn:`pose proof`. If the head of term is :token:`ident`, the tactic behaves as :tacn:`specialize`. .. exn:: Variable @ident is already declared. :undocumented: .. tacv:: eassert @type as @simple_intropattern by @tactic :name: eassert While the different variants of :tacn:`assert` expect that no existential variables are generated by the tactic, :tacn:`eassert` removes this constraint. This lets you avoid specifying the asserted statement completely before starting to prove it. .. tacv:: pose proof @term {? as @simple_intropattern} :name: pose proof This tactic behaves like :n:`assert @type {? as @simple_intropattern} by exact @term` where :token:`type` is the type of :token:`term`. In particular, :n:`pose proof @term as @ident` behaves as :n:`assert (@ident := @term)` and :n:`pose proof @term as @simple_intropattern` is the same as applying the :token:`simple_intropattern` to :token:`term`. .. tacv:: epose proof @term {? as @simple_intropattern} :name: epose proof While :tacn:`pose proof` expects that no existential variables are generated by the tactic, :tacn:`epose proof` removes this constraint. .. tacv:: pose proof (@ident := @term) This is an alternative syntax for :n:`assert (@ident := @term)` and :n:`pose proof @term as @ident`, following the model of :n:`pose (@ident := @term)` but dropping the value of :token:`ident`. .. tacv:: epose proof (@ident := @term) This is an alternative syntax for :n:`eassert (@ident := @term)` and :n:`epose proof @term as @ident`, following the model of :n:`epose (@ident := @term)` but dropping the value of :token:`ident`. .. tacv:: enough (@ident : @type) :name: enough This adds a new hypothesis of name :token:`ident` asserting :token:`type` to the goal the tactic :tacn:`enough` is applied to. A new subgoal stating :token:`type` is inserted after the initial goal rather than before it as :tacn:`assert` would do. .. tacv:: enough @type This behaves like :n:`enough (@ident : @type)` with the name :token:`ident` of the hypothesis generated by Coq. .. tacv:: enough @type as @simple_intropattern This behaves like :n:`enough @type` using :token:`simple_intropattern` to name or destruct the new hypothesis. .. tacv:: enough (@ident : @type) by @tactic enough @type {? as @simple_intropattern } by @tactic This behaves as above but with :token:`tactic` expected to solve the initial goal after the extra assumption :token:`type` is added and possibly destructed. If the :n:`as @simple_intropattern` clause generates more than one subgoal, :token:`tactic` is applied to all of them. .. tacv:: eenough @type {? as @simple_intropattern } {? by @tactic } eenough (@ident : @type) {? by @tactic } :name: eenough; _ While the different variants of :tacn:`enough` expect that no existential variables are generated by the tactic, :tacn:`eenough` removes this constraint. .. tacv:: cut @type :name: cut This tactic applies to any goal. It implements the non-dependent case of the “App” rule given in :ref:`typing-rules`. (This is Modus Ponens inference rule.) :n:`cut U` transforms the current goal :g:`T` into the two following subgoals: :g:`U -> T` and :g:`U`. The subgoal :g:`U -> T` comes first in the list of remaining subgoal to prove. .. tacv:: specialize (@ident {* @term}) {? as @simple_intropattern} specialize @ident with @bindings {? as @simple_intropattern} :name: specialize; _ This tactic works on local hypothesis :n:`@ident`. The premises of this hypothesis (either universal quantifications or non-dependent implications) are instantiated by concrete terms coming either from arguments :n:`{* @term}` or from :ref:`bindings`. In the first form the application to :n:`{* @term}` can be partial. The first form is equivalent to :n:`assert (@ident := @ident {* @term})`. In the second form, instantiation elements can also be partial. In this case the uninstantiated arguments are inferred by unification if possible or left quantified in the hypothesis otherwise. With the :n:`as` clause, the local hypothesis :n:`@ident` is left unchanged and instead, the modified hypothesis is introduced as specified by the :token:`simple_intropattern`. The name :n:`@ident` can also refer to a global lemma or hypothesis. In this case, for compatibility reasons, the behavior of :tacn:`specialize` is close to that of :tacn:`generalize`: the instantiated statement becomes an additional premise of the goal. The ``as`` clause is especially useful in this case to immediately introduce the instantiated statement as a local hypothesis. .. exn:: @ident is used in hypothesis @ident. :undocumented: .. exn:: @ident is used in conclusion. :undocumented: .. tacn:: generalize @term :name: generalize This tactic applies to any goal. It generalizes the conclusion with respect to some term. .. example:: .. coqtop:: reset none Goal forall x y:nat, 0 <= x + y + y. Proof. intros *. .. coqtop:: all abort Show. generalize (x + y + y). If the goal is :g:`G` and :g:`t` is a subterm of type :g:`T` in the goal, then :n:`generalize t` replaces the goal by :g:`forall (x:T), G′` where :g:`G′` is obtained from :g:`G` by replacing all occurrences of :g:`t` by :g:`x`. The name of the variable (here :g:`n`) is chosen based on :g:`T`. .. tacv:: generalize {+ @term} This is equivalent to :n:`generalize @term; ... ; generalize @term`. Note that the sequence of term :sub:`i` 's are processed from n to 1. .. tacv:: generalize @term at {+ @natural} This is equivalent to :n:`generalize @term` but it generalizes only over the specified occurrences of :n:`@term` (counting from left to right on the expression printed using the :flag:`Printing All` flag). .. tacv:: generalize @term as @ident This is equivalent to :n:`generalize @term` but it uses :n:`@ident` to name the generalized hypothesis. .. tacv:: generalize {+, @term at {+ @natural} as @ident} This is the most general form of :n:`generalize` that combines the previous behaviors. .. tacv:: generalize dependent @term This generalizes term but also *all* hypotheses that depend on :n:`@term`. It clears the generalized hypotheses. .. tacn:: evar (@ident : @term) :name: evar The :n:`evar` tactic creates a new local definition named :n:`@ident` with type :n:`@term` in the context. The body of this binding is a fresh existential variable. .. tacn:: instantiate (@ident := @term ) :name: instantiate The instantiate tactic refines (see :tacn:`refine`) an existential variable :n:`@ident` with the term :n:`@term`. It is equivalent to :n:`only [ident]: refine @term` (preferred alternative). .. note:: To be able to refer to an existential variable by name, the user must have given the name explicitly (see :ref:`Existential-Variables`). .. note:: When you are referring to hypotheses which you did not name explicitly, be aware that Coq may make a different decision on how to name the variable in the current goal and in the context of the existential variable. This can lead to surprising behaviors. .. tacv:: instantiate (@natural := @term) This variant selects an existential variable by its position. The :n:`@natural` argument is the position of the existential variable *from right to left* in the conclusion of the goal. (Use one of the variants below to select an existential variable in a hypothesis.) Counting starts at 1 and multiple occurrences of the same existential variable are counted multiple times. Because this variant is not robust to slight changes in the goal, its use is strongly discouraged. .. tacv:: instantiate ( @natural := @term ) in @ident instantiate ( @natural := @term ) in ( value of @ident ) instantiate ( @natural := @term ) in ( type of @ident ) These allow to refer respectively to existential variables occurring in a hypothesis or in the body or the type of a local definition (named :n:`@ident`). .. tacv:: instantiate Without argument, the instantiate tactic tries to solve as many existential variables as possible, using information gathered from other tactics in the same tactical. This is automatically done after each complete tactic (i.e. after a dot in proof mode), but not, for example, between each tactic when they are sequenced by semicolons. .. tacn:: admit :name: admit This tactic allows temporarily skipping a subgoal so as to progress further in the rest of the proof. A proof containing admitted goals cannot be closed with :cmd:`Qed` but only with :cmd:`Admitted`. .. tacv:: give_up Synonym of :tacn:`admit`. .. tacn:: absurd @term :name: absurd This tactic applies to any goal. The argument term is any proposition :g:`P` of type :g:`Prop`. This tactic applies False elimination, that is it deduces the current goal from False, and generates as subgoals :g:`∼P` and :g:`P`. It is very useful in proofs by cases, where some cases are impossible. In most cases, :g:`P` or :g:`∼P` is one of the hypotheses of the local context. .. tacn:: contradiction :name: contradiction This tactic applies to any goal. The contradiction tactic attempts to find in the current context (after all intros) a hypothesis that is equivalent to an empty inductive type (e.g. :g:`False`), to the negation of a singleton inductive type (e.g. :g:`True` or :g:`x=x`), or two contradictory hypotheses. .. exn:: No such assumption. :undocumented: .. tacv:: contradiction @ident The proof of False is searched in the hypothesis named :n:`@ident`. .. tacn:: contradict @ident :name: contradict This tactic allows manipulating negated hypothesis and goals. The name :n:`@ident` should correspond to a hypothesis. With :n:`contradict H`, the current goal and context is transformed in the following way: + H:¬A ⊢ B becomes ⊢ A + H:¬A ⊢ ¬B becomes H: B ⊢ A + H: A ⊢ B becomes ⊢ ¬A + H: A ⊢ ¬B becomes H: B ⊢ ¬A .. tacn:: exfalso :name: exfalso This tactic implements the “ex falso quodlibet” logical principle: an elimination of False is performed on the current goal, and the user is then required to prove that False is indeed provable in the current context. This tactic is a macro for :n:`elimtype False`. Classical tactics ----------------- In order to ease the proving process, when the ``Classical`` module is loaded, a few more tactics are available. Make sure to load the module using the ``Require Import`` command. .. tacn:: classical_left classical_right :name: classical_left; classical_right These tactics are the analog of :tacn:`left` and :tacn:`right` but using classical logic. They can only be used for disjunctions. Use :tacn:`classical_left` to prove the left part of the disjunction with the assumption that the negation of right part holds. Use :tacn:`classical_right` to prove the right part of the disjunction with the assumption that the negation of left part holds. Performance-oriented tactic variants ------------------------------------ .. todo: move the following adjacent to the `exact` tactic in the rewriting chapter? .. tacn:: exact_no_check @term :name: exact_no_check For advanced usage. Similar to :tacn:`exact` :n:`@term`, but as an optimization, it skips checking that :n:`@term` has the goal's type, relying on the kernel check instead. See :tacn:`change_no_check` for more explanation. .. example:: .. coqtop:: all abort Goal False. exact_no_check I. Fail Qed. .. tacv:: vm_cast_no_check @term :name: vm_cast_no_check For advanced usage. Similar to :tacn:`exact_no_check` :n:`@term`, but additionally instructs the kernel to use :tacn:`vm_compute` to compare the goal's type with the :n:`@term`'s type. .. example:: .. coqtop:: all abort Goal False. vm_cast_no_check I. Fail Qed. .. tacv:: native_cast_no_check @term :name: native_cast_no_check for advanced usage. similar to :tacn:`exact_no_check` :n:`@term`, but additionally instructs the kernel to use :tacn:`native_compute` to compare the goal's type with the :n:`@term`'s type. .. example:: .. coqtop:: all abort Goal False. native_cast_no_check I. Fail Qed. .. [1] Actually, only the second subgoal will be generated since the other one can be automatically checked. .. [2] This corresponds to the cut rule of sequent calculus. coq-8.15.0/doc/sphinx/proof-engine/vernacular-commands.rst000066400000000000000000001167341417001151100235330ustar00rootroot00000000000000.. _vernacularcommands: Commands ======== .. _displaying: Displaying ---------- .. _Print: .. cmd:: Print {? Term } @reference {? @univ_name_list } .. insertprodn univ_name_list univ_name_list .. prodn:: univ_name_list ::= @%{ {* @name } %} Displays definitions of terms, including opaque terms, for the object :n:`@reference`. * :n:`Term` - a syntactic marker to allow printing a term that is the same as one of the various :n:`Print` commands. For example, :cmd:`Print All` is a different command, while :n:`Print Term All` shows information on the object whose name is ":n:`All`". * :n:`@univ_name_list` - locally renames the polymorphic universes of :n:`@reference`. The name `_` means the usual name is printed. .. exn:: @qualid not a defined object. :undocumented: .. exn:: Universe instance length is @natural but should be @natural. :undocumented: .. exn:: This object does not support universe names. :undocumented: .. cmd:: Print All This command displays information about the current state of the environment, including sections and modules. .. cmd:: Inspect @natural This command displays the :n:`@natural` last objects of the current environment, including sections and modules. .. cmd:: Print Section @qualid Displays the objects defined since the beginning of the section named :n:`@qualid`. .. todo: "A.B" is permitted but unnecessary for modules/sections. should the command just take an @ident? Query commands -------------- Unlike other commands, :production:`query_command`\s may be prefixed with a goal selector (:n:`@natural:`) to specify which goals it applies to. If no selector is provided, the command applies to the current goal. If no proof is open, then the command only applies to accessible objects. (see Section :ref:`invocation-of-tactics`). :cmd:`Eval` and :cmd:`Compute` are also :token:`query_command`\s, which are described elsewhere .. cmd:: About @reference {? @univ_name_list } Displays information about the :n:`@reference` object, which, if a proof is open, may be a hypothesis of the selected goal, or an accessible theorem, axiom, etc.: its kind (module, constant, assumption, inductive, constructor, abbreviation, …), long name, type, implicit arguments and argument scopes (as set in the definition of :token:`reference` or subsequently with the :cmd:`Arguments` command). It does not print the body of definitions or proofs. .. cmd:: Check @term Displays the type of :n:`@term`. When called in proof mode, the term is checked in the local context of the selected goal. .. cmd:: Search {+ @search_query } {? {| inside | in | outside } {+ @qualid } } This command can be used to filter the goal and the global context to retrieve objects whose name or type satisfies a number of conditions. Library files that were not loaded with :cmd:`Require` are not considered. The :table:`Search Blacklist` table can also be used to exclude some things from all calls to :cmd:`Search`. The output of the command is a list of qualified identifiers and their types. If the :flag:`Search Output Name Only` flag is on, the types are omitted. .. insertprodn search_query search_query .. prodn:: search_query ::= @search_item | - @search_query | [ {+| {+ @search_query } } ] Multiple :n:`@search_item`\s can be combined into a complex :n:`@search_query`: :n:`- @search_query` Excludes the objects that would be filtered by :n:`@search_query`. See :ref:`this example `. :n:`[ {+ @search_query } | ... | {+ @search_query } ]` This is a disjunction of conjunctions of queries. A simple conjunction can be expressed by having a single disjunctive branch. For a conjunction at top-level, the surrounding brackets are not required. .. insertprodn search_item search_item .. prodn:: search_item ::= {? {| head | hyp | concl | headhyp | headconcl } : } @string {? % @scope_key } | {? {| head | hyp | concl | headhyp | headconcl } : } @one_pattern | is : @logical_kind Searched objects can be filtered by patterns, by the constants they contain (identified by their name or a notation) and by their names. The location of the pattern or constant within a term :n:`@one_pattern` Search for objects whose type contains a subterm matching the pattern :n:`@one_pattern`. Holes of the pattern are indicated by `_` or :n:`?@ident`. If the same :n:`?@ident` occurs more than once in the pattern, all occurrences in the subterm must be identical. See :ref:`this example `. :n:`@string {? % @scope_key }` - If :n:`@string` is a substring of a valid identifier and no :n:`% @scope_key` is provided, search for objects whose name contains :n:`@string`. See :ref:`this example `. - Otherwise, search for objects whose type contains the reference that this string, interpreted as a notation, is attached to (as described in :n:`@reference`). See :ref:`this example `. .. note:: To refer to a string used in a notation that is a substring of a valid identifier, put it between single quotes or explicitly provide a scope. See :ref:`this example `. :n:`hyp:` The provided pattern or reference is matched against any subterm of an hypothesis of the type of the objects. See :ref:`this example `. :n:`headhyp:` The provided pattern or reference is matched against the subterms in head position (any partial applicative subterm) of the hypotheses of the type of the objects. See :ref:`the previous example `. :n:`concl:` The provided pattern or reference is matched against any subterm of the conclusion of the type of the objects. See :ref:`this example `. :n:`headconcl:` The provided pattern or reference is matched against the subterms in head position (any partial applicative subterm) of the conclusion of the type of the objects. See :ref:`the previous example `. :n:`head:` This is simply the union between `headconcl:` and `headhyp:`. :n:`is: @logical_kind` .. insertprodn logical_kind logical_kind .. prodn:: logical_kind ::= {| @thm_token | @assumption_token } | {| Definition | Example | Context | Primitive } | {| Coercion | Instance | Scheme | Canonical | SubClass } | {| Field | Method } Filters objects by the keyword that was used to define them (`Theorem`, `Lemma`, `Axiom`, `Variable`, `Context`, `Primitive`...) or its status (`Coercion`, `Instance`, `Scheme`, `Canonical`, `SubClass`, Field` for record fields, `Method` for class fields). Note that `Coercion`\s, `Canonical Structure`\s, Instance`\s and `Scheme`\s can be defined without using those keywords. See :ref:`this example `. Additional clauses: * :n:`{| inside | in } {+ @qualid }` - limit the search to the specified modules * :n:`outside {+ @qualid }` - exclude the specified modules from the search .. exn:: Module/section @qualid not found. There is no constant in the environment named :n:`@qualid`, where :n:`@qualid` is in an `inside` or `outside` clause. .. _search-pattern: .. example:: Searching for a pattern .. coqtop:: none reset Require Import PeanoNat. We can repeat meta-variables to narrow down the search. Here, we are looking for commutativity lemmas. .. coqtop:: all Search (_ ?n ?m = _ ?m ?n). .. _search-part-ident: .. example:: Searching for part of an identifier .. coqtop:: all reset Search "_assoc". .. _search-by-notation: .. example:: Searching for a reference by notation .. coqtop:: all reset Search "+". .. _search-disambiguate-notation: .. example:: Disambiguating between part of identifier and notation .. coqtop:: none reset Require Import PeanoNat. In this example, we show two ways of searching for all the objects whose type contains `Nat.modulo` but which do not contain the substring "mod". .. coqtop:: all Search "'mod'" -"mod". Search "mod"%nat -"mod". .. _search-hyp: .. example:: Search in hypotheses The following search shows the objects whose type contains `bool` in an hypothesis as a strict subterm only: .. coqtop:: none reset Add Search Blacklist "internal_". .. coqtop:: all Search hyp:bool -headhyp:bool. .. _search-concl: .. example:: Search in conclusion The following search shows the objects whose type contains `bool` in the conclusion as a strict subterm only: .. coqtop:: all Search concl:bool -headconcl:bool. .. _search-by-keyword: .. example:: Search by keyword or status The following search shows the definitions whose type is a `nat` or a function which returns a `nat` and the lemmas about `+`: .. coqtop:: all reset Search [ is:Definition headconcl:nat | is:Lemma (_ + _) ]. The following search shows the instances whose type includes the classes `Reflexive` or `Symmetric`: .. coqtop:: none reset Require Import Morphisms. .. coqtop:: all Search is:Instance [ Reflexive | Symmetric ]. .. cmd:: SearchPattern @one_pattern {? {| inside | in | outside } {+ @qualid } } Displays the name and type of all hypotheses of the selected goal (if any) and theorems of the current context ending with :n:`{? forall {* @binder }, } {* P__i -> } C` that match the pattern :n:`@one_pattern`. See :cmd:`Search` for an explanation of the `inside`/`in`/`outside` clauses. .. example:: :cmd:`SearchPattern` examples .. coqtop:: in Require Import Arith. .. coqtop:: all SearchPattern (_ + _ = _ + _). SearchPattern (nat -> bool). SearchPattern (forall l : list _, _ l l). .. coqtop:: all SearchPattern (?X1 + _ = _ + ?X1). .. cmd:: SearchRewrite @one_pattern {? {| inside | in | outside } {+ @qualid } } Displays the name and type of all hypotheses of the selected goal (if any) and theorems of the current context that have the form :n:`{? forall {* @binder }, } {* P__i -> } LHS = RHS` where :n:`@one_pattern` matches either `LHS` or `RHS`. See :cmd:`Search` for an explanation of the `inside`/`in`/`outside` clauses. .. example:: :cmd:`SearchRewrite` examples .. coqtop:: in Require Import Arith. .. coqtop:: all SearchRewrite (_ + _ + _). .. table:: Search Blacklist @string This :term:`table` specifies a set of strings used to exclude lemmas from the results of :cmd:`Search`, :cmd:`SearchPattern` and :cmd:`SearchRewrite` queries. A lemma whose fully-qualified name contains any of the strings will be excluded from the search results. The default blacklisted substrings are ``_subterm``, ``_subproof`` and ``Private_``. Use the :cmd:`Add` and :cmd:`Remove` commands to update the set of blacklisted strings. .. flag:: Search Output Name Only This :term:`flag` restricts the output of search commands to identifier names; turning it on causes invocations of :cmd:`Search`, :cmd:`SearchPattern`, :cmd:`SearchRewrite` etc. to omit types from their output, printing only identifiers. .. _requests-to-the-environment: Requests to the environment ------------------------------- .. cmd:: Print Assumptions @reference Displays all the assumptions (axioms, parameters and variables) a theorem or definition depends on. The message "Closed under the global context" indicates that the theorem or definition has no dependencies. .. cmd:: Print Opaque Dependencies @reference Displays the assumptions and opaque constants that :n:`@reference` depends on. .. cmd:: Print Transparent Dependencies @reference Displays the assumptions and transparent constants that :n:`@reference` depends on. .. cmd:: Print All Dependencies @reference Displays all the assumptions and constants :n:`@reference` depends on. .. cmd:: Locate @reference .. insertprodn reference reference .. prodn:: reference ::= @qualid | @string {? % @scope_key } Displays the full name of objects from Coq's various qualified namespaces such as terms, modules and Ltac, thereby showing the module they are defined in. It also displays notation definitions. Note that objects are reported only when the module containing them has been loaded, such as through a :cmd:`Require` command. Notation definitions are reported only when the containing module has been imported (e.g. with :cmd:`Require Import` or :cmd:`Import`). :n:`@qualid` refers to object names that end with :n:`@qualid`. :n:`@string {? % @scope_key }` refers to definitions of notations. :n:`@string` can be a single token in the notation such as "`->`" or a pattern that matches the notation. See :ref:`locating-notations`. :n:`% @scope_key`, if present, limits the reference to the scope bound to the delimiting key :n:`@scope_key`, such as, for example, :n:`%nat`. (see Section :ref:`LocalInterpretationRulesForNotations`) .. todo somewhere we should list all the qualified namespaces .. cmd:: Locate Term @reference Like :cmd:`Locate`, but limits the search to terms .. cmd:: Locate Module @qualid Like :cmd:`Locate`, but limits the search to modules .. cmd:: Locate Ltac @qualid Like :cmd:`Locate`, but limits the search to tactics .. cmd:: Locate Library @qualid Displays the full name, status and file system path of the module :n:`@qualid`, whether loaded or not. .. cmd:: Locate File @string Displays the file system path of the file ending with :n:`@string`. Typically, :n:`@string` has a suffix such as ``.cmo`` or ``.vo`` or ``.v`` file, such as :n:`Nat.v`. .. todo: also works for directory names such as "Data" (parent of Nat.v) also "Data/Nat.v" works, but not a substring match .. example:: Locate examples .. coqtop:: all Locate nat. Locate Datatypes.O. Locate Init.Datatypes.O. Locate Coq.Init.Datatypes.O. Locate I.Dont.Exist. .. _printing-flags: Printing flags ------------------------------- .. flag:: Fast Name Printing When this :term:`flag` is turned on, Coq uses an asymptotically faster algorithm for the generation of unambiguous names of bound variables while printing terms. While faster, it is also less clever and results in a typically less elegant display, e.g. it will generate more names rather than reusing certain names across subterms. This flag is not enabled by default, because as Ltac observes bound names, turning it on can break existing proof scripts. .. _loading-files: Loading files ----------------- Coq offers the possibility of loading different parts of a whole development stored in separate files. Their contents will be loaded as if they were entered from the keyboard. This means that the loaded files are text files containing sequences of commands for Coq’s toplevel. This kind of file is called a *script* for Coq. The standard (and default) extension of Coq’s script files is .v. .. cmd:: Load {? Verbose } {| @string | @ident } Loads a file. If :n:`@ident` is specified, the command loads a file named :n:`@ident.v`, searching successively in each of the directories specified in the *loadpath*. (see Section :ref:`libraries-and-filesystem`) If :n:`@string` is specified, it must specify a complete filename. `~` and .. abbreviations are allowed as well as shell variables. If no extension is specified, Coq will use the default extension ``.v``. Files loaded this way can't leave proofs open, nor can :cmd:`Load` be used inside a proof. We discourage the use of :cmd:`Load`; use :cmd:`Require` instead. :cmd:`Require` loads `.vo` files that were previously compiled from `.v` files. :n:`Verbose` displays the Coq output for each command and tactic in the loaded file, as if the commands and tactics were entered interactively. .. exn:: Can’t find file @ident on loadpath. :undocumented: .. exn:: Load is not supported inside proofs. :undocumented: .. exn:: Files processed by Load cannot leave open proofs. :undocumented: .. _compiled-files: Compiled files ------------------ This section describes the commands used to load compiled files (see Chapter :ref:`thecoqcommands` for documentation on how to compile a file). A compiled file is a particular case of a module called a *library file*. .. cmd:: {? From @dirpath } Require {? {| Import | Export } } {+ @qualid } :name: From … Require; Require; Require Import; Require Export Loads compiled files into the Coq environment. For each :n:`@qualid`, the command looks in the loadpath for a compiled file :n:`@ident.vo` in the file system whose logical name has the form :n:`@dirpath.{* @ident. }@qualid` (if :n:`From @dirpath` is given) or :n:`{* @ident. }@qualid` (if the optional `From` clause is absent). See Section :ref:`libraries-and-filesystem` for more on loadpaths. If a file is found, its logical name must be the same as the one used to compile the file. Then the file is loaded as well as all the files it depends on (recursively). All the files must have been compiled with the same version of Coq. * :n:`Import` - additionally does an :cmd:`Import` on the loaded module, making components defined in the module available by their short names * :n:`Export` - additionally does an :cmd:`Export` on the loaded module, making components defined in the module available by their short names *and* marking them to be exported by the current module If the required file has already been loaded, it is not reloaded. If :n:`Import` or :n:`Export` are present, the command also does the equivalent of the :cmd:`Import` or :cmd:`Export` commands. When looking for a file whose logical name has the form :n:`@dirpath.{* @ident. }@qualid` or :n:`{* @ident. }@qualid`, exact matches are preferred (that is, matches such that the implicit segment :n:`{* @ident. }` is empty). If the name exactly matches in multiple `-R` or `-Q` options, the file corresponding to the most recent `-R` or `-Q` is used. If there is no exact match, the matches from the most recent `-R` or `-Q` are selected. If this results in a unique match, the corresponding file is selected. If this results in several matches, it is an error. The difference between the `-R` and the `-Q` option is that non-exact matches are allowed for `-Q` only if `From` is present, that is if a prefix is given. For instance, ``-Q path Lib`` associates the file ``path/Foo/File.vo`` with the logical name ``Lib.Foo.File`` but allows this file to be accessed through the name ``Lib.Foo.File`` when no `From` is given, with the names ``Foo.File`` and ``File`` when :n:`From Lib` is given, and with the name ``File`` when :n:`From Lib.Foo` is given. Additionally, ``-R path Lib`` allows the same file to be accessed through the names ``Foo.File`` and ``File`` when no `From` is given. In particular, `From` is useful to ensure that the file comes from a particular package or subpackage. .. exn:: Cannot load @qualid: no physical path bound to @dirpath. :undocumented: .. exn:: Cannot find library foo in loadpath. The command did not find the file foo.vo. Either foo.v exists but is not compiled or foo.vo is in a directory which is not in your loadpath (see Section :ref:`libraries-and-filesystem`). .. exn:: Required library @qualid matches several files in path (found file__1.vo, file__2.vo, ...). The file to load must be required with a more discriminating suffix, or, at worst, with its full logical name. .. exn:: Compiled library @ident.vo makes inconsistent assumptions over library @qualid. The command tried to load library file :n:`@ident`.vo that depends on some specific version of library :n:`@qualid` which is not the one already loaded in the current Coq session. Probably :n:`@ident.v` was not properly recompiled with the last version of the file containing module :token:`qualid`. .. exn:: Bad magic number. The file :n:`@ident.vo` was found but either it is not a Coq compiled module, or it was compiled with an incompatible version of Coq. .. exn:: The file @ident.vo contains library @qualid__1 and not library @qualid__2. The library :n:`@qualid__2` is indirectly required by a :cmd:`Require`. The loadpath maps :n:`@qualid__2` to :n:`@ident.vo`, which was compiled using a loadpath that bound it to :n:`@qualid__1`. Usually the appropriate solution is to recompile :n:`@ident.v` using the correct loadpath. See :ref:`libraries-and-filesystem`. .. warn:: Require inside a module is deprecated and strongly discouraged. You can Require a module at toplevel and optionally Import it inside another one. Note that the :cmd:`Import` and :cmd:`Export` commands can be used inside modules. .. seealso:: Chapter :ref:`thecoqcommands` .. cmd:: Print Libraries This command displays the list of library files loaded in the current Coq session. .. cmd:: Declare ML Module {+ @string } This commands dynamically loads OCaml compiled code from a :n:`.mllib` file. It is used to load plugins dynamically. The files must be accessible in the current OCaml loadpath (see :ref:`command line option ` :n:`-I` and command :cmd:`Add ML Path`). The :n:`.mllib` suffix may be omitted. This command is reserved for plugin developers, who should provide a .v file containing the command. Users of the plugins will then generally load the .v file. This command supports the :attr:`local` attribute. If present, the listed files are not exported, even if they're outside a section. .. exn:: File not found on loadpath: @string. :undocumented: .. cmd:: Print ML Modules This prints the name of all OCaml modules loaded with :cmd:`Declare ML Module`. To know from where these module were loaded, the user should use the command :cmd:`Locate File`. .. _loadpath: Loadpath ------------ Loadpaths are preferably managed using Coq command line options (see Section :ref:`libraries-and-filesystem`), but there are also commands to manage them within Coq. These commands are only meant to be issued in the toplevel, and using them in source files is discouraged. .. cmd:: Pwd This command displays the current working directory. .. cmd:: Cd {? @string } If :n:`@string` is specified, changes the current directory according to :token:`string` which can be any valid path. Otherwise, it displays the current directory. .. cmd:: Add LoadPath @string as @dirpath .. insertprodn dirpath dirpath .. prodn:: dirpath ::= {* @ident . } @ident This command is equivalent to the command line option :n:`-Q @string @dirpath`. It adds a mapping to the loadpath from the logical name :n:`@dirpath` to the file system directory :n:`@string`. * :n:`@dirpath` is a prefix of a module name. The module name hierarchy follows the file system hierarchy. On Linux, for example, the prefix `A.B.C` maps to the directory :n:`@string/B/C`. Avoid using spaces after a `.` in the path because the parser will interpret that as the end of a command or tactic. .. cmd:: Add Rec LoadPath @string as @dirpath This command is equivalent to the command line option :n:`-R @string @dirpath`. It adds the physical directory string and all its subdirectories to the current Coq loadpath. .. cmd:: Remove LoadPath @string This command removes the path :n:`@string` from the current Coq loadpath. .. cmd:: Print LoadPath {? @dirpath } This command displays the current Coq loadpath. If :n:`@dirpath` is specified, displays only the paths that extend that prefix. .. cmd:: Add ML Path @string Equivalent to the :ref:`command line option ` :n:`-I @string`. Adds the path :n:`@string` to the current OCaml loadpath (cf. :cmd:`Declare ML Module`). It is for convenience, such as for use in an interactive session, and it is not exported to compiled files. For separation of concerns with respect to the relocability of files, we recommend using :n:`-I @string`. .. cmd:: Print ML Path Displays the current OCaml loadpath, as provided by the :ref:`command line option ` :n:`-I @string` or by the command :cmd:`Add ML Path` `@string` (cf. :cmd:`Declare ML Module`). .. _backtracking_subsection: Backtracking ------------ The backtracking commands described in this section can only be used interactively, they cannot be part of a Coq file loaded via ``Load`` or compiled by ``coqc``. .. cmd:: Reset @ident This command removes all the objects in the environment since :n:`@ident` was introduced, including :n:`@ident`. :n:`@ident` may be the name of a defined or declared object as well as the name of a section. One cannot reset over the name of a module or of an object inside a module. .. exn:: @ident: no such entry. :undocumented: .. cmd:: Reset Initial Goes back to the initial state, just after the start of the interactive session. .. cmd:: Back {? @natural } Undoes all the effects of the last :n:`@natural @sentence`\s. If :n:`@natural` is not specified, the command undoes one sentence. Sentences read from a `.v` file via a :cmd:`Load` are considered a single sentence. While :cmd:`Back` can undo tactics and commands executed within proof mode, once you exit proof mode, such as with :cmd:`Qed`, all the statements executed within are thereafter considered a single sentence. :cmd:`Back` immediately following :cmd:`Qed` gets you back to the state just after the statement of the proof. .. exn:: Invalid backtrack. The user wants to undo more commands than available in the history. .. cmd:: BackTo @natural This command brings back the system to the state labeled :n:`@natural`, forgetting the effect of all commands executed after this state. The state label is an integer which grows after each successful command. It is displayed in the prompt when in -emacs mode. Just as :cmd:`Back` (see above), the :cmd:`BackTo` command now handles proof states. For that, it may have to undo some extra commands and end on a state :n:`@natural′ ≤ @natural` if necessary. .. _quitting-and-debugging: Quitting and debugging -------------------------- .. cmd:: Quit Causes Coq to exit. Valid only in coqtop. .. cmd:: Drop This command temporarily enters the OCaml toplevel. It is a debug facility used by Coq’s implementers. Valid only in the bytecode version of coqtop. The OCaml command: :: #use "include";; adds the right loadpaths and loads some toplevel printers for all abstract types of Coq- section_path, identifiers, terms, judgments, …. You can also use the file base_include instead, that loads only the pretty-printers for section_paths and identifiers. You can return back to Coq with the command: :: go();; .. warning:: #. It only works with the bytecode version of Coq (i.e. `coqtop.byte`, see Section `interactive-use`). #. You must have compiled Coq from the source package and set the environment variable COQTOP to the root of your copy of the sources (see Section `customization-by-environment-variables`). .. cmd:: Time @sentence Executes :n:`@sentence` and displays the time needed to execute it. .. cmd:: Redirect @string @sentence Executes :n:`@sentence`, redirecting its output to the file ":n:`@string`.out". .. cmd:: Timeout @natural @sentence Executes :n:`@sentence`. If the operation has not terminated after :n:`@natural` seconds, then it is interrupted and an error message is displayed. .. opt:: Default Timeout @natural When this :term:`option` is set, each :n:`@sentence` is treated as if it was prefixed with :cmd:`Timeout` :n:`@natural`, except for :cmd:`Timeout` commands themselves. If unset, no timeout is applied. .. cmd:: Fail @sentence For debugging scripts, sometimes it is desirable to know whether a command or a tactic fails. If :n:`@sentence` fails, then :n:`Fail @sentence` succeeds (except for critical errors, such as "stack overflow"), without changing the proof state. In interactive mode, the system prints a message confirming the failure. .. exn:: The command has not failed! If the given :n:`@command` succeeds, then :n:`Fail @sentence` fails with this error message. .. cmd:: Succeed @sentence If :n:`@sentence` succeeds, then :n:`Succeed @sentence` succeeds without changing the proof state. If :n:`@sentence` fails, then :n:`Succeed @sentence` fails showing the error message for :n:`@sentence`. In interactive mode, the system prints the message :n:`The command has succeeded and its effects have been reverted.` confirming the success. This command can be useful for writing tests. .. note:: :cmd:`Time`, :cmd:`Redirect`, :cmd:`Timeout`, :cmd:`Fail` and :cmd:`Succeed` are :production:`control_command`\s. For these commands, attributes and goal selectors, when specified, are part of the :n:`@sentence` argument, and thus come after the control command prefix and before the inner command or tactic. For example: `Time #[ local ] Definition foo := 0.` or `Fail Timeout 10 all: auto.` .. _controlling-display: Controlling display ----------------------- .. flag:: Silent This :term:`flag` controls the normal displaying. .. opt:: Warnings "{+, {? {| - | + } } @ident }" This :term:`option` configures the display of warnings. It is experimental, and expects, between quotes, a comma-separated list of warning names or categories. Adding - in front of a warning or category disables it, adding + makes it an error. It is possible to use the special categories all and default, the latter containing the warnings enabled by default. The flags are interpreted from left to right, so in case of an overlap, the flags on the right have higher priority, meaning that `A,-A` is equivalent to `-A`. .. opt:: Debug "{+, {? - } @ident }" This :term:`option` configures the display of debug messages. Each :n:`@ident` enables debug messages for that component, while :n:`-@ident` disables messages for the component. ``all`` activates or deactivates all other components. ``backtrace`` controls printing of error backtraces. :cmd:`Test` `Debug` displays the list of components and their enabled/disabled state. .. opt:: Printing Width @natural This :term:`option` sets which left-aligned part of the width of the screen is used for display. At the time of writing this documentation, the default value is 78. .. opt:: Printing Depth @natural This :term:`option` controls the nesting depth of the formatter used for pretty- printing. Beyond this depth, display of subterms is replaced by dots. At the time of writing this documentation, the default value is 50. .. flag:: Printing Compact Contexts This :term:`flag` controls the compact display mode for goals contexts. When on, the printer tries to reduce the vertical size of goals contexts by putting several variables (even if of different types) on the same line provided it does not exceed the printing width (see :opt:`Printing Width`). At the time of writing this documentation, it is off by default. .. flag:: Printing Unfocused This :term:`flag` controls whether unfocused goals are displayed. Such goals are created by focusing other goals with bullets (see :ref:`bullets` or :ref:`curly braces `). It is off by default. .. flag:: Printing Dependent Evars Line This :term:`flag` controls the printing of the “(dependent evars: …)” information after each tactic. The information is used by the Prooftree tool in Proof General. (https://askra.de/software/prooftree) .. extracted from Gallina extensions chapter .. _printing_constructions_full: Printing constructions in full ------------------------------ .. flag:: Printing All Coercions, implicit arguments, the type of pattern matching, but also notations (see :ref:`syntax-extensions-and-notation-scopes`) can obfuscate the behavior of some tactics (typically the tactics applying to occurrences of subterms are sensitive to the implicit arguments). Turning this :term:`flag` on deactivates all high-level printing features such as coercions, implicit arguments, returned type of pattern matching, notations and various syntactic sugar for pattern matching or record projections. Otherwise said, :flag:`Printing All` includes the effects of the flags :flag:`Printing Implicit`, :flag:`Printing Coercions`, :flag:`Printing Synth`, :flag:`Printing Projections`, and :flag:`Printing Notations`. To reactivate the high-level printing features, use the command ``Unset Printing All``. .. note:: In some cases, setting :flag:`Printing All` may display terms that are so big they become very hard to read. One technique to work around this is use :cmd:`Undelimit Scope` and/or :cmd:`Close Scope` to turn off the printing of notations bound to particular scope(s). This can be useful when notations in a given scope are getting in the way of understanding a goal, but turning off all notations with :flag:`Printing All` would make the goal unreadable. .. see a contrived example here: https://github.com/coq/coq/pull/11718#discussion_r415481854 .. _controlling-typing-flags: Controlling Typing Flags ---------------------------- .. flag:: Guard Checking This :term:`flag` can be used to enable/disable the guard checking of fixpoints. Warning: this can break the consistency of the system, use at your own risk. Decreasing argument can still be specified: the decrease is not checked anymore but it still affects the reduction of the term. Unchecked fixpoints are printed by :cmd:`Print Assumptions`. .. attr:: bypass_check(guard{? = {| yes | no } }) :name: bypass_check(guard) This :term:`boolean attribute` is similar to the :flag:`Guard Checking` flag, but on a per-declaration basis. Disable guard checking locally with ``bypass_check(guard)``. .. flag:: Positivity Checking This :term:`flag` can be used to enable/disable the positivity checking of inductive types and the productivity checking of coinductive types. Warning: this can break the consistency of the system, use at your own risk. Unchecked (co)inductive types are printed by :cmd:`Print Assumptions`. .. attr:: bypass_check(positivity{? = {| yes | no } }) :name: bypass_check(positivity) This :term:`boolean attribute` is similar to the :flag:`Positivity Checking` flag, but on a per-declaration basis. Disable positivity checking locally with ``bypass_check(positivity)``. .. flag:: Universe Checking This :term:`flag` can be used to enable/disable the checking of universes, providing a form of "type in type". Warning: this breaks the consistency of the system, use at your own risk. Constants relying on "type in type" are printed by :cmd:`Print Assumptions`. It has the same effect as `-type-in-type` command line argument (see :ref:`command-line-options`). .. attr:: bypass_check(universes{? = {| yes | no } }) :name: bypass_check(universes) This :term:`boolean attribute` is similar to the :flag:`Universe Checking` flag, but on a per-declaration basis. Disable universe checking locally with ``bypass_check(universes)``. .. cmd:: Print Typing Flags Print the status of the three typing flags: guard checking, positivity checking and universe checking. See also :flag:`Cumulative StrictProp` in the |SProp| chapter. .. example:: .. coqtop:: all reset Unset Guard Checking. Print Typing Flags. Fixpoint f (n : nat) : False := f n. Fixpoint ackermann (m n : nat) {struct m} : nat := match m with | 0 => S n | S m => match n with | 0 => ackermann m 1 | S n => ackermann m (ackermann (S m) n) end end. Print Assumptions ackermann. Note that the proper way to define the Ackermann function is to use an inner fixpoint: .. coqtop:: all reset Fixpoint ack m := fix ackm n := match m with | 0 => S n | S m' => match n with | 0 => ack m' 1 | S n' => ack m' (ackm n') end end. Typing flags may not be changed while inside sections. .. _internal-registration-commands: Internal registration commands -------------------------------- Due to their internal nature, the commands that are presented in this section are not for general use. They are meant to appear only in standard libraries and in support libraries of plug-ins. .. _exposing-constants-to-ocaml-libraries: Exposing constants to OCaml libraries ``````````````````````````````````````` .. cmd:: Register @qualid__1 as @qualid__2 Makes the constant :n:`@qualid__1` accessible to OCaml libraries under the name :n:`@qualid__2`. The constant can then be dynamically located in OCaml code by calling :n:`Coqlib.lib_ref "@qualid__2"`. The OCaml code doesn't need to know where the constant is defined (what file, module, library, etc.). As a special case, when the first segment of :n:`@qualid__2` is :g:`kernel`, the constant is exposed to the kernel. For instance, the `PrimInt63` module features the following declaration: .. coqdoc:: Register bool as kernel.ind_bool. This makes the kernel aware of the `bool` type, which is used, for example, to define the return type of the :g:`#int63_eq` primitive. .. seealso:: :ref:`primitive-integers` Inlining hints for the fast reduction machines `````````````````````````````````````````````` .. cmd:: Register Inline @qualid Gives a hint to the reduction machines (VM and native) that the body of the constant :n:`@qualid` should be inlined in the generated code. Registering primitive operations ```````````````````````````````` .. cmd:: Primitive @ident_decl {? : @term } := #@ident Makes the primitive type or primitive operator :n:`#@ident` defined in OCaml accessible in Coq commands and tactics. For internal use by implementors of Coq's standard library or standard library replacements. No space is allowed after the `#`. Invalid values give a syntax error. For example, the standard library files `PrimInt63.v` and `PrimFloat.v` use :cmd:`Primitive` to support, respectively, the features described in :ref:`primitive-integers` and :ref:`primitive-floats`. The types associated with an operator must be declared to the kernel before declaring operations that use the type. Do this with :cmd:`Primitive` for primitive types and :cmd:`Register` with the :g:`kernel` prefix for other types. For example, in `PrimInt63.v`, `#int63_type` must be declared before the associated operations. .. exn:: The type @ident must be registered before this construction can be typechecked. :undocumented: The type must be defined with :cmd:`Primitive` command before this :cmd:`Primitive` command (declaring an operation using the type) will succeed. coq-8.15.0/doc/sphinx/proofs/000077500000000000000000000000001417001151100157445ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/proofs/automatic-tactics/000077500000000000000000000000001417001151100213625ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/proofs/automatic-tactics/auto.rst000066400000000000000000000713671417001151100231020ustar00rootroot00000000000000.. _automation: ========================= Programmable proof search ========================= .. tacn:: auto {? @nat_or_var } {? @auto_using } {? @hintbases } .. insertprodn auto_using hintbases .. prodn:: auto_using ::= using {+, @one_term } hintbases ::= with * | with {+ @ident } Implements a Prolog-like resolution procedure to solve the current goal. It first tries to solve the goal using the :tacn:`assumption` tactic, then it reduces the goal to an atomic one using :tacn:`intros` and introduces the newly generated hypotheses as hints. Then it looks at the list of tactics associated with the head symbol of the goal and tries to apply one of them. Lower cost tactics are tried before higher-cost tactics. This process is recursively applied to the generated subgoals. :n:`@nat_or_var` Specifies the maximum search depth. The default is 5. :n:`using {+, @one_term }` Uses lemmas :n:`{+, @one_term }` in addition to hints. If :n:`@one_term` is an inductive type, the collection of its constructors are added as hints. Note that hints passed through the `using` clause are used in the same way as if they were passed through a hint database. Consequently, they use a weaker version of :tacn:`apply` and :n:`auto using @one_term` may fail where :n:`apply @one_term` succeeds. .. todo Given that this can be seen as counter-intuitive, it could be useful to have an option to use full-blown :tacn:`apply` for lemmas passed through the `using` clause. Contributions welcome! :n:`with *` Use all existing hint databases. Using this variant is highly discouraged in finished scripts since it is both slower and less robust than explicitly selecting the required databases. :n:`with {+ @ident }` Use the hint databases :n:`{+ @ident}` in addition to the database ``core``. Use the fake database `nocore` to omit `core`. If no `with` clause is given, :tacn:`auto` only uses the hypotheses of the current goal and the hints of the database named ``core``. :tacn:`auto` generally either completely solves the goal or leaves it unchanged. Use :tacn:`solve` `[ auto ]` if you want a failure when they don't solve the goal. :tacn:`auto` will fail if :tacn:`fail` or :tacn:`gfail` are invoked directly or indirectly, in which case setting the :flag:`Ltac Debug` may help you debug the failure. .. warning:: :tacn:`auto` uses a weaker version of :tacn:`apply` that is closer to :tacn:`simple apply` so it is expected that sometimes :tacn:`auto` will fail even if applying manually one of the hints would succeed. .. seealso:: :ref:`hintdatabases` for the list of pre-defined databases and the way to create or extend a database. .. tacn:: info_auto {? @nat_or_var } {? @auto_using } {? @hintbases } Behaves like :tacn:`auto` but shows the tactics it uses to solve the goal. This variant is very useful for getting a better understanding of automation, or to know what lemmas/assumptions were used. .. tacn:: debug auto {? @nat_or_var } {? @auto_using } {? @hintbases } Behaves like :tacn:`auto` but shows the tactics it tries to solve the goal, including failing paths. .. tacn:: trivial {? @auto_using } {? @hintbases } debug trivial {? @auto_using } {? @hintbases } info_trivial {? @auto_using } {? @hintbases } Like :tacn:`auto`, but is not recursive and only tries hints with zero cost. Typically used to solve goals for which a lemma is already available in the specified :n:`hintbases`. .. flag:: Info Auto Debug Auto Info Trivial Debug Trivial These :term:`flags ` enable printing of informative or debug information for the :tacn:`auto` and :tacn:`trivial` tactics. .. tacn:: eauto {? @nat_or_var } {? @auto_using } {? @hintbases } Generalizes :tacn:`auto`. While :tacn:`auto` does not try resolution hints which would leave existential variables in the goal, :tacn:`eauto` does try them (informally speaking, it internally uses a tactic close to :tacn:`simple eapply` instead of a tactic close to :tacn:`simple apply` in the case of :tacn:`auto`). As a consequence, :tacn:`eauto` can solve such a goal: .. example:: .. coqtop:: none Set Warnings "-deprecated-hint-without-locality". .. coqtop:: all Hint Resolve ex_intro : core. Goal forall P:nat -> Prop, P 0 -> exists n, P n. eauto. `ex_intro` is declared as a hint so the proof succeeds. .. seealso:: :ref:`hintdatabases` .. tacn:: info_eauto {? @nat_or_var } {? @auto_using } {? @hintbases } The various options for :tacn:`info_eauto` are the same as for :tacn:`info_auto`. :tacn:`eauto` also obeys the following flags: .. flag:: Info Eauto Debug Eauto :undocumented: .. tacn:: debug eauto {? @nat_or_var } {? @auto_using } {? @hintbases } Behaves like :tacn:`eauto` but shows the tactics it tries to solve the goal, including failing paths. .. tacn:: bfs eauto {? @nat_or_var } {? @auto_using } {? @hintbases } .. deprecated:: 8.15 This tactic was meant to provide a `breadth-first search `_ version of :tacn:`eauto` but it actually behaved like :tacn:`eauto`. Use :tacn:`typeclasses eauto` with the `bfs` flag instead. .. tacn:: autounfold {? @hintbases } {? @simple_occurrences } Unfolds constants that were declared through a :cmd:`Hint Unfold` in the given databases. :n:`@simple_occurrences` Performs the unfolding in the specified occurrences. .. tacn:: autorewrite {? * } with {+ @ident } {? @occurrences } {? using @ltac_expr } `*` If present, rewrite all occurrences whose side conditions are solved. .. todo: This may not always work as described, see #4976 #7672 and https://github.com/coq/coq/issues/1933#issuecomment-337497938 as mentioned here: https://github.com/coq/coq/pull/13343#discussion_r527801604 :n:`with {+ @ident }` Specifies the rewriting rule bases to use. :n:`@occurrences` Performs rewriting in the specified occurrences. Note: the `at` clause is currently not supported. .. exn:: The "at" syntax isn't available yet for the autorewrite tactic. Appears when there is an `at` clause on the conclusion. :n:`using @ltac_expr` :token:`ltac_expr` is applied to the main subgoal after each rewriting step. Applies rewritings according to the rewriting rule bases :n:`{+ @ident }`. For each rule base, applies each rewriting to the main subgoal until it fails. Once all the rules have been processed, if the main subgoal has changed then the rules of this base are processed again. If the main subgoal has not changed then the next base is processed. For the bases, the behavior is very similar to the processing of the rewriting rules. The rewriting rule bases are built with the :cmd:`Hint Rewrite` command. .. warning:: This tactic may loop if you build non-terminating rewriting systems. .. seealso:: :cmd:`Hint Rewrite` for feeding the database of lemmas used by :tacn:`autorewrite` and :tacn:`autorewrite` for examples showing the use of this tactic. Also see :ref:`strategies4rewriting`. Here are two examples of ``autorewrite`` use. The first one ( *Ackermann function*) shows actually a quite basic use where there is no conditional rewriting. The second one ( *Mac Carthy function*) involves conditional rewritings and shows how to deal with them using the optional tactic of the ``Hint Rewrite`` command. .. example:: Ackermann function .. coqtop:: in reset Require Import Arith. .. coqtop:: in Parameter Ack : nat -> nat -> nat. .. coqtop:: in Axiom Ack0 : forall m:nat, Ack 0 m = S m. Axiom Ack1 : forall n:nat, Ack (S n) 0 = Ack n 1. Axiom Ack2 : forall n m:nat, Ack (S n) (S m) = Ack n (Ack (S n) m). .. coqtop:: in Global Hint Rewrite Ack0 Ack1 Ack2 : base0. .. coqtop:: all Lemma ResAck0 : Ack 3 2 = 29. .. coqtop:: all autorewrite with base0 using try reflexivity. .. example:: MacCarthy function .. coqtop:: in reset Require Import Lia. .. coqtop:: in Parameter g : nat -> nat -> nat. .. coqtop:: in Axiom g0 : forall m:nat, g 0 m = m. Axiom g1 : forall n m:nat, (n > 0) -> (m > 100) -> g n m = g (pred n) (m - 10). Axiom g2 : forall n m:nat, (n > 0) -> (m <= 100) -> g n m = g (S n) (m + 11). .. coqtop:: in Global Hint Rewrite g0 g1 g2 using lia : base1. .. coqtop:: in Lemma Resg0 : g 1 110 = 100. .. coqtop:: out Show. .. coqtop:: all autorewrite with base1 using reflexivity || simpl. .. coqtop:: none Qed. .. coqtop:: all Lemma Resg1 : g 1 95 = 91. .. coqtop:: all autorewrite with base1 using reflexivity || simpl. .. coqtop:: none Qed. .. tacn:: easy This tactic tries to solve the current goal by a number of standard closing steps. In particular, it tries to close the current goal using the closing tactics :tacn:`trivial`, :tacn:`reflexivity`, :tacn:`symmetry`, :tacn:`contradiction` and :tacn:`inversion` of hypothesis. If this fails, it tries introducing variables and splitting and-hypotheses, using the closing tactics afterwards, and splitting the goal using :tacn:`split` and recursing. This tactic solves goals that belong to many common classes; in particular, many cases of unsatisfiable hypotheses, and simple equality goals are usually solved by this tactic. .. tacn:: now @ltac_expr Run :n:`@tactic` followed by :tacn:`easy`. This is a notation for :n:`@tactic; easy`. .. _hintdatabases: Hint databases -------------- Hints used by :tacn:`auto`, :tacn:`eauto` and other tactics are stored in hint databases. Each database maps head symbols to a list of hints. Use the :cmd:`Print Hint` command to view a database. Each hint has a cost that is a nonnegative integer and an optional pattern. Hints with lower costs are tried first. :tacn:`auto` tries a hint when the conclusion of the current goal matches its pattern or when the hint has no pattern. Creating Hint databases ----------------------- Hint databases can be created with the :cmd:`Create HintDb` command or implicitly by adding a hint to an unknown database. We recommend you always use :cmd:`Create HintDb` and then imediately use :cmd:`Hint Constants` and :cmd:`Hint Variables` to make those settings explicit. Note that the default transparency settings differ between these two methods of creation. Databases created with :cmd:`Create HintDb` have the default setting `Transparent` for both `Variables` and `Constants`, while implicitly created databases have the `Opaque` setting. .. cmd:: Create HintDb @ident {? discriminated } Creates a new hint database named :n:`@ident`. The database is implemented by a Discrimination Tree (DT) that serves as a filter to select the lemmas that will be applied. When discriminated, the DT uses transparency information to decide if a constant should considered rigid for filtering, making the retrieval more efficient. By contrast, undiscriminated databases treat all constants as transparent, resulting in a larger number of selected lemmas to be applied, and thus putting more pressure on unification. By default, hint databases are undiscriminated. .. _creating_hints: Creating Hints -------------- The various `Hint` commands share these elements: :n:`{? : {+ @ident } }` specifies the hint database(s) to add to. *(Deprecated since version 8.10:* If no :token:`ident`\s are given, the hint is added to the `core` database.) Outside of sections, these commands support the :attr:`local`, :attr:`export` and :attr:`global` attributes. :attr:`global` is the default. Inside sections, some commands only support the :attr:`local` attribute. These are :cmd:`Hint Immediate`, :cmd:`Hint Resolve`, :cmd:`Hint Constructors`, :cmd:`Hint Unfold`, :cmd:`Hint Extern` and :cmd:`Hint Rewrite`. :attr:`local` is the default for all hint commands inside sections. + :attr:`local` hints are never visible from other modules, even if they :cmd:`Import` or :cmd:`Require` the current module. + :attr:`export` hints are visible from other modules when they :cmd:`Import` the current module, but not when they only :cmd:`Require` it. + :attr:`global` hints are visible from other modules when they :cmd:`Import` or :cmd:`Require` the current module. .. versionadded:: 8.14 The :cmd:`Hint Rewrite` now supports locality attributes like other `Hint` commands. .. deprecated:: 8.13 The default value for hint locality will change in a future release. Hints added outside of sections without an explicit locality are now deprecated. We recommend using :attr:`export` where possible. The `Hint` commands are: .. cmd:: Hint Resolve {+ {| @qualid | @one_term } } {? @hint_info } {? : {+ @ident } } Hint Resolve {| -> | <- } {+ @qualid } {? @natural } {? : {+ @ident } } :name: Hint Resolve; _ .. insertprodn hint_info one_pattern .. prodn:: hint_info ::= %| {? @natural } {? @one_pattern } one_pattern ::= @one_term The first form adds each :n:`@qualid` as a hint with the head symbol of the type of :n:`@qualid` to the specified hint databases (:n:`@ident`\s). The cost of the hint is the number of subgoals generated by :tacn:`simple apply` :n:`@qualid` or, if specified, :n:`@natural`. The associated pattern is inferred from the conclusion of the type of :n:`@qualid` or, if specified, the given :n:`@one_pattern`. If the inferred type of :n:`@qualid` does not start with a product, :tacn:`exact` :n:`@qualid` is added to the hint list. If the type can be reduced to a type starting with a product, :tacn:`simple apply` :n:`@qualid` is also added to the hints list. If the inferred type of :n:`@qualid` contains a dependent quantification on a variable which occurs only in the premises of the type and not in its conclusion, no instance could be inferred for the variable by unification with the goal. In this case, the hint is only used by :tacn:`eauto` / :tacn:`typeclasses eauto`, but not by :tacn:`auto`. A typical hint that would only be used by :tacn:`eauto` is a transitivity lemma. :n:`{| -> | <- }` The second form adds the left-to-right (`->`) or right-ot-left implication (`<-`) of an equivalence as a hint (informally the hint will be used as, respectively, :tacn:`apply` :n:`-> @qualid` or :tacn:`apply` :n:`<- @qualid`, although as mentioned before, the tactic actually used is a restricted version of :tacn:`apply`). :n:`@one_term` Permits declaring a hint without declaring a new constant first, but this is not recommended. .. warn:: Declaring arbitrary terms as hints is fragile; it is recommended to declare a toplevel constant instead :undocumented: .. exn:: @qualid cannot be used as a hint The head symbol of the type of :n:`@qualid` is a bound variable such that this tactic cannot be associated with a constant. .. cmd:: Hint Immediate {+ {| @qualid | @one_term } } {? : {+ @ident } } For each specified :n:`@qualid`, adds the tactic :tacn:`simple apply` :n:`@qualid;` :tacn:`solve` :n:`[` :tacn:`trivial` :n:`]` to the hint list associated with the head symbol of the type of :n:`@qualid`. This tactic will fail if all the subgoals generated by :tacn:`simple apply` :n:`@qualid` are not solved immediately by the :tacn:`trivial` tactic (which only tries tactics with cost 0). This command is useful for theorems such as the symmetry of equality or :g:`n+1=m+1 -> n=m` that we may want to introduce with limited use in order to avoid useless proof search. The cost of this tactic (which never generates subgoals) is always 1, so that it is not used by :tacn:`trivial` itself. .. cmd:: Hint Constructors {+ @qualid } {? : {+ @ident } } For each :n:`@qualid` that is an inductive type, adds all its constructors as hints of type ``Resolve``. Then, when the conclusion of current goal has the form :n:`(@qualid ...)`, :tacn:`auto` will try to apply each constructor. .. exn:: @qualid is not an inductive type :undocumented: .. cmd:: Hint Unfold {+ @qualid } {? : {+ @ident } } For each :n:`@qualid`, adds the tactic :tacn:`unfold` :n:`@qualid` to the hint list that will only be used when the head constant of the goal is :token:`qualid`. Its cost is 4. .. cmd:: Hint {| Transparent | Opaque } {+ @qualid } {? : {+ @ident } } :name: Hint Transparent; Hint Opaque Adds transparency hints to the database, making each :n:`@qualid` a transparent or opaque constant during resolution. This information is used during unification of the goal with any lemma in the database and inside the discrimination network to relax or constrain it in the case of discriminated databases. .. exn:: Cannot coerce @qualid to an evaluable reference. :undocumented: .. cmd:: Hint {| Constants | Variables } {| Transparent | Opaque } {? : {+ @ident } } :name: Hint Constants; Hint Variables Sets the transparency flag for constants or variables for the specified hint databases. These flags affect the unification of hints in the database. We advise using this just after a :cmd:`Create HintDb` command. .. cmd:: Hint Extern @natural {? @one_pattern } => @ltac_expr {? : {+ @ident } } Extends :tacn:`auto` with tactics other than :tacn:`apply` and :tacn:`unfold`. :n:`@natural` is the cost, :n:`@one_term` is the pattern to match and :n:`@ltac_expr` is the action to apply. .. note:: Use a :cmd:`Hint Extern` with no pattern to do pattern matching on hypotheses using ``match goal with`` inside the tactic. .. example:: .. coqtop:: none Set Warnings "-deprecated-hint-without-locality". .. coqtop:: in Hint Extern 4 (~(_ = _)) => discriminate : core. Now, when the head of the goal is a disequality, ``auto`` will try discriminate if it does not manage to solve the goal with hints with a cost less than 4. One can even use some sub-patterns of the pattern in the tactic script. A sub-pattern is a question mark followed by an identifier, like ``?X1`` or ``?X2``. Here is an example: .. example:: .. coqtop:: reset none Set Warnings "-deprecated-hint-without-locality". .. coqtop:: all Require Import List. Hint Extern 5 ({?X1 = ?X2} + {?X1 <> ?X2}) => generalize X1, X2; decide equality : eqdec. Goal forall a b:list (nat * nat), {a = b} + {a <> b}. info_auto. .. cmd:: Hint Cut [ @hints_regexp ] {? : {+ @ident } } .. DISABLED insertprodn hints_regexp hints_regexp .. prodn:: hints_regexp ::= {+ @qualid } (hint or instance identifier) | _ (any hint) | @hints_regexp | @hints_regexp (disjunction) | @hints_regexp @hints_regexp (sequence) | @hints_regexp * (Kleene star) | emp (empty) | eps (epsilon) | ( @hints_regexp ) Used to cut the proof search tree according to a regular expression that matches the paths to be cut. During proof search, the path of successive successful hints on a search branch is recorded as a list of identifiers for the hints (note that :cmd:`Hint Extern`\s do not have an associated identifier). For each hint :n:`@qualid` in the hint database, the current path `p` extended with :n:`@qualid` is matched against the current cut expression `c` associated with the hint database. If the match succeeds the hint is *not* applied. :n:`Hint Cut @hints_regexp` sets the cut expression to :n:`c | @hints_regexp`. The initial cut expression is `emp`. The output of :cmd:`Print HintDb` shows the cut expression. .. warning:: The regexp matches the entire path. Most hints will start with a leading `( _* )` to match the tail of the path. (Note that `(_*)` misparses since `*)` would end a comment.) .. warning:: There is no operator precedence during parsing, one can check with :cmd:`Print HintDb` to verify the current cut expression. .. warning:: These hints currently only apply to typeclass proof search and the :tacn:`typeclasses eauto` tactic. .. cmd:: Hint Mode @qualid {+ {| + | ! | - } } {? : {+ @ident } } Sets an optional mode of use for the identifier :n:`@qualid`. When proof search has a goal that ends in an application of :n:`@qualid` to arguments :n:`@arg ... @arg`, the mode tells if the hints associated with :n:`@qualid` can be applied or not. A mode specification is a list of ``+``, ``!`` or ``-`` items that specify if an argument of the identifier is to be treated as an input (``+``), if its head only is an input (``!``) or an output (``-``) of the identifier. Mode ``-`` matches any term, mode ``+`` matches a term if and only if it does not contain existential variables, while mode ``!`` matches a term if and only if the *head* of the term is not an existential variable. The head of a term is understood here as the applicative head, recursively, ignoring casts. :cmd:`Hint Mode` is especially useful for typeclasses, when one does not want to support default instances and wants to avoid ambiguity in general. Setting a parameter of a class as an input forces proof search to be driven by that index of the class, with ``!`` allowing existentials to appear in the index but not at its head. .. note:: + Multiple modes can be declared for a single identifier. In that case only one mode needs to match the arguments for the hints to be applied. + If you want to add hints such as :cmd:`Hint Transparent`, :cmd:`Hint Cut`, or :cmd:`Hint Mode`, for typeclass resolution, do not forget to put them in the ``typeclass_instances`` hint database. .. warn:: This hint is not local but depends on a section variable. It will disappear when the section is closed. A hint with a non-local attribute was added inside a section, but it refers to a local variable that will go out of scope when closing the section. As a result the hint will not survive either. .. cmd:: Hint Rewrite {? {| -> | <- } } {+ @one_term } {? using @ltac_expr } {? : {* @ident } } :n:`{? using @ltac_expr }` If specified, :n:`@ltac_expr` is applied to the generated subgoals, except for the main subgoal. :n:`{| -> | <- }` Arrows specify the orientation; left to right (:n:`->`) or right to left (:n:`<-`). If no arrow is given, the default orientation is left to right (:n:`->`). Adds the terms :n:`{+ @one_term }` (their types must be equalities) to the rewriting bases :n:`{* @ident }`. Note that the rewriting bases are distinct from the :tacn:`auto` hint bases and that :tacn:`auto` does not take them into account. .. cmd:: Print Rewrite HintDb @ident This command displays all rewrite hints contained in :n:`@ident`. .. cmd:: Remove Hints {+ @qualid } {? : {+ @ident } } Removes the hints associated with the :n:`{+ @qualid }` in databases :n:`{+ @ident}`. Note: hints created with :cmd:`Hint Extern` currently can't be removed. The best workaround for this is to make the hints non-global and carefully select which modules you import. .. cmd:: Print Hint {? {| * | @reference } } :n:`*` Display all declared hints. :n:`@reference` Display all hints associated with the head symbol :n:`@reference`. Displays tactics from the hints list. The default is to show hints that apply to the conclusion of the current goal. The other forms with :n:`*` and :n:`@reference` can be used even if no proof is open. Each hint has a cost that is a nonnegative integer and an optional pattern. The hints with lower cost are tried first. .. cmd:: Print HintDb @ident This command displays all hints from database :n:`@ident`. Hint databases defined in the Coq standard library -------------------------------------------------- Several hint databases are defined in the Coq standard library. The actual content of a database is the collection of hints declared to belong to this database in each of the various modules currently loaded. Especially, requiring new modules may extend the database. At Coq startup, only the core database is nonempty and can be used. :core: This special database is automatically used by ``auto``, except when pseudo-database ``nocore`` is given to ``auto``. The core database contains only basic lemmas about negation, conjunction, and so on. Most of the hints in this database come from the Init and Logic directories. :arith: This database contains all lemmas about Peano’s arithmetic proved in the directories Init and Arith. :zarith: contains lemmas about binary signed integers from the directories theories/ZArith. The database also contains high-cost hints that call :tacn:`lia` on equations and inequalities in ``nat`` or ``Z``. :bool: contains lemmas about booleans, mostly from directory theories/Bool. :datatypes: is for lemmas about lists, streams and so on that are mainly proved in the Lists subdirectory. :sets: contains lemmas about sets and relations from the directories Sets and Relations. :typeclass_instances: contains all the typeclass instances declared in the environment, including those used for ``setoid_rewrite``, from the Classes directory. :fset: internal database for the implementation of the ``FSets`` library. :ordered_type: lemmas about ordered types (as defined in the legacy ``OrderedType`` module), mainly used in the ``FSets`` and ``FMaps`` libraries. You are advised not to put your own hints in the core database, but use one or several databases specific to your development. Hint locality ------------- Hints provided by the ``Hint`` commands are erased when closing a section. Conversely, all hints of a module ``A`` that are not defined inside a section (and not defined with option ``Local``) become available when the module ``A`` is required (using e.g. ``Require A.``). As of today, hints only have a binary behavior regarding locality, as described above: either they disappear at the end of a section scope, or they remain global forever. This causes a scalability issue, because hints coming from an unrelated part of the code may badly influence another development. It can be mitigated to some extent thanks to the :cmd:`Remove Hints` command, but this is a mere workaround and has some limitations (for instance, external hints cannot be removed). A proper way to fix this issue is to bind the hints to their module scope, as for most of the other objects Coq uses. Hints should only be made available when the module they are defined in is imported, not just required. It is very difficult to change the historical behavior, as it would break a lot of scripts. We propose a smooth transitional path by providing the :opt:`Loose Hint Behavior` option which accepts three flags allowing for a fine-grained handling of non-imported hints. .. opt:: Loose Hint Behavior {| "Lax" | "Warn" | "Strict" } This :term:`option` accepts three values, which control the behavior of hints w.r.t. :cmd:`Import`: - "Lax": this is the default, and corresponds to the historical behavior, that is, hints defined outside of a section have a global scope. - "Warn": outputs a warning when a non-imported hint is used. Note that this is an over-approximation, because a hint may be triggered by a run that will eventually fail and backtrack, resulting in the hint not being actually useful for the proof. - "Strict": changes the behavior of an unloaded hint to a immediate fail tactic, allowing to emulate an import-scoped hint mechanism. .. _tactics-implicit-automation: Setting implicit automation tactics ----------------------------------- .. cmd:: Proof with @ltac_expr {? using @section_var_expr } Starts a proof in which :token:`ltac_expr` is applied to the active goals after each tactic that ends with `...` instead of the usual single period. ":n:`@tactic...`" is equivalent to ":n:`@tactic; @ltac_expr.`". .. seealso:: :cmd:`Proof` in :ref:`proof-editing-mode`. coq-8.15.0/doc/sphinx/proofs/automatic-tactics/index.rst000066400000000000000000000011271417001151100232240ustar00rootroot00000000000000.. _automatic-tactics: ===================================================== Automatic solvers and programmable tactics ===================================================== Some tactics are largely automated and are able to solve complex goals. This chapter presents both built-in solvers that can be used on specific categories of goals and programmable tactics that the user can instrument to handle complex goals in new domains. .. toctree:: :maxdepth: 1 logic ../../addendum/micromega ../../addendum/ring ../../addendum/nsatz auto ../../addendum/generalized-rewriting coq-8.15.0/doc/sphinx/proofs/automatic-tactics/logic.rst000066400000000000000000000220771417001151100232210ustar00rootroot00000000000000.. _decisionprocedures: ============================== Solvers for logic and equality ============================== .. tacn:: tauto This tactic implements a decision procedure for intuitionistic propositional calculus based on the contraction-free sequent calculi LJT* of Roy Dyckhoff :cite:`Dyc92`. Note that :tacn:`tauto` succeeds on any instance of an intuitionistic tautological proposition. :tacn:`tauto` unfolds negations and logical equivalence but does not unfold any other definition. .. example:: The following goal can be proved by :tacn:`tauto` whereas :tacn:`auto` would fail: .. coqtop:: reset all Goal forall (x:nat) (P:nat -> Prop), x = 0 \/ P x -> x <> 0 -> P x. intros. tauto. Moreover, if it has nothing else to do, :tacn:`tauto` performs introductions. Therefore, the use of :tacn:`intros` in the previous proof is unnecessary. :tacn:`tauto` can for instance for: .. example:: .. coqtop:: reset all Goal forall (A:Prop) (P:nat -> Prop), A \/ (forall x:nat, ~ A -> P x) -> forall x:nat, ~ A -> P x. tauto. .. note:: In contrast, :tacn:`tauto` cannot solve the following goal :g:`Goal forall (A:Prop) (P:nat -> Prop), A \/ (forall x:nat, ~ A -> P x) ->` :g:`forall x:nat, ~ ~ (A \/ P x).` because :g:`(forall x:nat, ~ A -> P x)` cannot be treated as atomic and an instantiation of `x` is necessary. .. tacn:: dtauto While :tacn:`tauto` recognizes inductively defined connectives isomorphic to the standard connectives ``and``, ``prod``, ``or``, ``sum``, ``False``, ``Empty_set``, ``unit`` and ``True``, :tacn:`dtauto` also recognizes all inductive types with one constructor and no indices, i.e. record-style connectives. .. todo would be nice to explain/discuss the various types of flags that define the differences between these tactics. See Tauto.v/tauto.ml. .. tacn:: intuition {? @ltac_expr } Uses the search tree built by the decision procedure for :tacn:`tauto` to generate a set of subgoals equivalent to the original one (but simpler than it) and applies :n:`@ltac_expr` to them :cite:`Mun94`. If :n:`@ltac_expr` is not specified, it defaults to :n:`auto with *` If :n:`@ltac_expr` fails on some goals then :tacn:`intuition` fails. In fact, :tacn:`tauto` is simply :g:`intuition fail`. :tacn:`intuition` recognizes inductively defined connectives isomorphic to the standard connectives ``and``, ``prod``, ``or``, ``sum``, ``False``, ``Empty_set``, ``unit`` and ``True``. .. example:: For instance, the tactic :g:`intuition auto` applied to the goal:: (forall (x:nat), P x) /\ B -> (forall (y:nat), P y) /\ P O \/ B /\ P O internally replaces it by the equivalent one:: (forall (x:nat), P x), B |- P O and then uses :tacn:`auto` which completes the proof. .. tacn:: dintuition {? @ltac_expr } In addition to the inductively defined connectives recognized by :tacn:`intuition`, :tacn:`dintuition` also recognizes all inductive types with one constructor and no indices, i.e. record-style connectives. .. flag:: Intuition Negation Unfolding This :term:`flag` controls whether :tacn:`intuition` unfolds inner negations which do not need to be unfolded. It is on by default. .. tacn:: rtauto Solves propositional tautologies similarly to :tacn:`tauto`, but the proof term is built using a reflection scheme applied to a sequent calculus proof of the goal. The search procedure is also implemented using a different technique. Users should be aware that this difference may result in faster proof search but slower proof checking, and :tacn:`rtauto` might not solve goals that :tacn:`tauto` would be able to solve (e.g. goals involving universal quantifiers). Note that this tactic is only available after a ``Require Import Rtauto``. .. tacn:: firstorder {? @ltac_expr } {? using {+, @qualid } } {? with {+ @ident } } An experimental extension of :tacn:`tauto` to first-order reasoning. It is not restricted to usual logical connectives but instead can reason about any first-order class inductive definition. :token:`ltac_expr` Tries to solve the goal with :token:`ltac_expr` when no logical rule applies. If unspecified, the tactic uses the default from the :opt:`Firstorder Solver` option. :n:`using {+, @qualid }` Adds the lemmas :n:`{+, @qualid }` to the proof search environment. If :n:`@qualid` refers to an inductive type, its constructors are added to the proof search environment. :n:`with {+ @ident }` Adds lemmas from :tacn:`auto` hint bases :n:`{+ @ident }` to the proof search environment. .. opt:: Firstorder Solver @ltac_expr The default tactic used by :tacn:`firstorder` when no rule applies in :g:`auto with core`. It can be set locally or globally using this :term:`option`. .. cmd:: Print Firstorder Solver Prints the default tactic used by :tacn:`firstorder` when no rule applies. .. opt:: Firstorder Depth @natural This :term:`option` controls the proof search depth bound. .. tacn:: congruence {? @natural } {? with {+ @one_term } } :token:`natural` Specifies the maximum number of hypotheses stating quantified equalities that may be added to the problem in order to solve it. The default is 1000. :n:`{? with {+ @one_term } }` Adds :n:`{+ @one_term }` to the pool of terms used by :tacn:`congruence`. This helps in case you have partially applied constructors in your goal. Implements the standard Nelson and Oppen congruence closure algorithm, which is a decision procedure for ground equalities with uninterpreted symbols. It also includes constructor theory (see :tacn:`injection` and :tacn:`discriminate`). If the goal is a non-quantified equality, congruence tries to prove it with non-quantified equalities in the context. Otherwise it tries to infer a discriminable equality from those in the context. Alternatively, congruence tries to prove that a hypothesis is equal to the goal or to the negation of another hypothesis. :tacn:`congruence` is also able to take advantage of hypotheses stating quantified equalities, but you have to provide a bound for the number of extra equalities generated that way. Please note that one of the sides of the equality must contain all the quantified variables in order for congruence to match against it. Increasing the maximum number of hypotheses may solve problems that would have failed with a smaller value. It will make failures slower but it won't make successes found with the smaller value any slower. You may want to use :tacn:`assert` to add some lemmas as hypotheses so that :tacn:`congruence` can use them. .. tacn:: simple congruence {? @natural } {? with {+ @one_term } } Behaves like :tacn:`congruence`, but does not unfold definitions. .. example:: .. coqtop:: reset all Theorem T (A:Type) (f:A -> A) (g: A -> A -> A) a b: a=(f a) -> (g b (f a))=(f (f a)) -> (g a b)=(f (g b a)) -> (g a b)=a. intros. congruence. Qed. Theorem inj (A:Type) (f:A -> A * A) (a c d: A) : f = pair a -> Some (f c) = Some (f d) -> c=d. intros. congruence. Qed. .. exn:: I don’t know how to handle dependent equality. The decision procedure managed to find a proof of the goal or of a discriminable equality but this proof could not be built in Coq because of dependently-typed functions. .. exn:: Goal is solvable by congruence but some arguments are missing. Try congruence with {+ @term}, replacing metavariables by arbitrary terms. The decision procedure could solve the goal with the provision that additional arguments are supplied for some partially applied constructors. Any term of an appropriate type will allow the tactic to successfully solve the goal. Those additional arguments can be given to congruence by filling in the holes in the terms given in the error message, using the `with` clause. :opt:`Debug` ``"congruence"`` makes :tacn:`congruence` print debug information. .. tacn:: btauto The tactic :tacn:`btauto` implements a reflexive solver for boolean tautologies. It solves goals of the form :g:`t = u` where `t` and `u` are constructed over the following grammar: .. prodn:: btauto_term ::= @ident | true | false | orb @btauto_term @btauto_term | andb @btauto_term @btauto_term | xorb @btauto_term @btauto_term | negb @btauto_term | if @btauto_term then @btauto_term else @btauto_term Whenever the formula supplied is not a tautology, it also provides a counter-example. Internally, it uses a system very similar to the one of the ring tactic. Note that this tactic is only available after a ``Require Import Btauto``. .. exn:: Cannot recognize a boolean equality. The goal is not of the form :g:`t = u`. Especially note that :tacn:`btauto` doesn't introduce variables into the context on its own. coq-8.15.0/doc/sphinx/proofs/creating-tactics/000077500000000000000000000000001417001151100211705ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/proofs/creating-tactics/index.rst000066400000000000000000000025001417001151100230260ustar00rootroot00000000000000.. _writing-tactics: ==================== Creating new tactics ==================== The languages presented in this chapter allow one to build complex tactics by combining existing ones with constructs such as conditionals and looping. While :ref:`Ltac ` was initially thought of as a language for doing some basic combinations, it has been used successfully to build highly complex tactics as well, but this has also highlighted its limits and fragility. The experimental language :ref:`Ltac2 ` is a typed and more principled variant which is more adapted to building complex tactics. There are other solutions beyond these two tactic languages to write new tactics: - `Mtac2 `_ is an external plugin which provides another typed tactic language. While Ltac2 belongs to the ML language family, Mtac2 reuses the language of Coq itself as the language to build Coq tactics. - The most traditional way of building new complex tactics is to write a Coq plugin in OCaml. Beware that this also requires much more effort and commitment. A tutorial for writing Coq plugins is available in the Coq repository in `doc/plugin_tutorial `_. .. toctree:: :maxdepth: 1 ../../proof-engine/ltac ../../proof-engine/ltac2 coq-8.15.0/doc/sphinx/proofs/writing-proofs/000077500000000000000000000000001417001151100207355ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/proofs/writing-proofs/equality.rst000066400000000000000000001342761417001151100233410ustar00rootroot00000000000000========================= Reasoning with equalities ========================= There are multiple notions of :gdef:`equality` in Coq: - :gdef:`Leibniz equality` is the standard way to define equality in Coq and the Calculus of Inductive Constructions, which is in terms of a binary relation, i.e. a binary function that returns a `Prop`. The standard library defines `eq` similar to this: .. coqdoc:: Inductive eq {A : Type} (x : A) : A -> Prop := eq_refl : eq x x. The notation `x = y` represents the term `eq x y`. The notation `x = y :> A` gives the type of x and y explicitly. - :gdef:`Setoid equality ` defines equality in terms of an equivalence relation. A :gdef:`setoid` is a set that is equipped with an equivalence relation (see https://en.wikipedia.org/wiki/Setoid). These are needed to form a :gdef:`quotient set` or :gdef:`quotient` (see https://en.wikipedia.org/wiki/Equivalence_Class). In Coq, users generally work with setoids rather than constructing quotients, for which there is no specific support. - :gdef:`Definitional equality ` is equality based on the :ref:`conversion rules `, which Coq can determine automatically. When two terms are definitionally equal, Coq knows it can replace one with the other, such as with :tacn:`change` `X with Y`, among many other advantages. ":term:`Convertible `" is another way of saying that two terms are definitionally equal. Tactics for dealing with equality of inductive types such as :tacn:`injection` and :tacn:`inversion` are described :ref:`here `. Tactics for simple equalities ----------------------------- .. tacn:: reflexivity For a goal with the form :n:`{? forall @open_binders , } t = u`, verifies that `t` and `u` are :term:`definitionally equal `, and if so, solves the goal (by applying `eq_refl`). If not, it fails. The tactic may also be applied to goals with the form :n:`{? forall @open_binders , } R @term__1 @term__2` where `R` is a reflexive relation registered with the `Equivalence` or `Reflexive` typeclasses. See :cmd:`Class` and :cmd:`Instance`. .. exn:: The relation @ident is not a declared reflexive relation. Maybe you need to require the Coq.Classes.RelationClasses library :undocumented: .. tacn:: symmetry {? @simple_occurrences } Changes a goal that has the form :n:`{? forall @open_binders , } t = u` into :n:`u = t`. :n:`@simple_occurrences` may be used to apply the change in the selected hypotheses and/or the conclusion. The tactic may also be applied to goals with the form :n:`{? forall @open_binders , } R @term__1 @term__2` where `R` is a symmetric relation registered with the `Equivalence` or `Symmetric` typeclasses. See :cmd:`Class` and :cmd:`Instance`. .. exn:: The relation @ident is not a declared symmetric relation. Maybe you need to require the Coq.Classes.RelationClasses library :undocumented: .. tacn:: transitivity @one_term Changes a goal that has the form :n:`{? forall @open_binders , } t = u` into the two subgoals :n:`t = @one_term` and :n:`@one_term = u`. The tactic may also be applied to goals with the form :n:`{? forall @open_binders , } R @term__1 @term__2` where `R` is a transitive relation registered with the `Equivalence` or `Transitivity` typeclasses. See :cmd:`Class` and :cmd:`Instance`. .. tacn:: etransitivity This tactic behaves like :tacn:`transitivity`, using a fresh evar instead of a concrete :token:`one_term`. .. exn:: The relation @ident is not a declared transitive relation. Maybe you need to require the Coq.Classes.RelationClasses library :undocumented: .. tacn:: f_equal For a goal with the form :n:`f a__1 ... a__n = g b__1 ... b__n`, creates subgoals :n:`f = g` and :n:`a__i = b__i` for the `n` arguments. Subgoals that can be proven by :tacn:`reflexivity` or :tacn:`congruence` are solved automatically. .. _rewritingexpressions: Rewriting with Leibniz and setoid equality ------------------------------------------ .. tacn:: rewrite {+, @oriented_rewriter } {? @occurrences } {? by @ltac_expr3 } .. insertprodn oriented_rewriter one_term_with_bindings .. prodn:: oriented_rewriter ::= {? {| -> | <- } } {? @natural } {? {| ? | ! } } @one_term_with_bindings one_term_with_bindings ::= {? > } @one_term {? with @bindings } Replaces subterms with other subterms that have been proven to be equal. The type of :n:`@one_term` must have the form: :n:`{? forall @open_binders , } EQ @term__1 @term__2` where :g:`EQ` is the :term:`Leibniz equality` `eq` or a registered :term:`setoid equality`. Note that :n:`eq @term__1 @term__2` is typically written with the infix notation :n:`@term__1 = @term__2`. You must `Require Setoid` to use the tactic with a setoid equality or with :ref:`setoid rewriting `. :n:`rewrite @one_term` finds subterms matching :n:`@term__1` in the goal, and replaces them with :n:`@term__2` (or the reverse if `<-` is given). Some of the variables :g:`x`\ :sub:`i` are solved by unification, and some of the types :n:`A__1, …, A__n` may become new subgoals. :tacn:`rewrite` won't find occurrences inside `forall` that refer to variables bound by the `forall`; use the more advanced :tacn:`setoid_rewrite` if you want to find such occurrences. :n:`{+, @oriented_rewriter }` The :n:`@oriented_rewriter`\s are applied sequentially to the first goal generated by the previous :n:`@oriented_rewriter`. If any of them fail, the tactic fails. :n:`{? {| -> | <- } }` For `->` (the default), :n:`@term__1` is rewritten into :n:`@term__2`. For `<-`, :n:`@term__2` is rewritten into :n:`@term__1`. :n:`{? @natural } {? {| ? | ! } }` :n:`@natural` is the number of rewrites to perform. If `?` is given, :n:`@natural` is the maximum number of rewrites to perform; otherwise :n:`@natural` is the exact number of rewrites to perform. `?` (without :n:`@natural`) performs the rewrite as many times as possible (possibly zero times). This form never fails. `!` (without :n:`@natural`) performs the rewrite as many times as possible and at least once. The tactic fails if the requested number of rewrites can't be performed. :n:`@natural !` is equivalent to :n:`@natural`. :n:`@occurrences` If :n:`@occurrences` specifies multiple occurrences, the tactic succeeds if any of them can be rewritten. If not specified, only the first occurrence in the conclusion is replaced. .. note:: If :n:`at @occs_nums` is specified, rewriting is always done with :ref:`setoid rewriting `, even for Leibniz equality, which means that you must `Require Setoid` to use that form. However, note that :tacn:`rewrite` (even when using setoid rewriting) and :tacn:`setoid_rewrite` don't behave identically (as is noted above and below). :n:`by @ltac_expr3` If specified, is used to resolve all side conditions generated by the tactic. .. note:: For each selected hypothesis and/or the conclusion, :tacn:`rewrite` finds the first matching subterm in depth-first search order. Only subterms identical to that first matched subterm are rewritten. If the `at` clause is specified, only these subterms are considered when counting occurrences. To select a different set of matching subterms, you can specify how some or all of the free variables are bound by using a `with` clause (see :n:`@one_term_with_bindings`). For instance, if we want to rewrite the right-hand side in the following goal, this will not work: .. coqtop:: none Require Import Arith. .. coqtop:: out Lemma example x y : x + y = y + x. .. coqtop:: all fail rewrite Nat.add_comm at 2. One can explicitly specify how some variables are bound to match a different subterm: .. coqtop:: all abort rewrite Nat.add_comm with (m := x). Note that the more advanced :tacn:`setoid_rewrite` tactic behaves differently, and thus the number of occurrences available to rewrite may differ between the two tactics. .. exn:: Tactic failure: Setoid library not loaded. :undocumented: .. todo You can use Typeclasses Debug to tell whether rewrite used setoid rewriting. Example here: https://github.com/coq/coq/pull/13470#discussion_r539230973 .. exn:: Cannot find a relation to rewrite. :undocumented: .. exn:: Tactic generated a subgoal identical to the original goal. :undocumented: .. exn:: Found no subterm matching @term in @ident. Found no subterm matching @term in the current goal. This happens if :n:`@term` does not occur in, respectively, the named hypothesis or the goal. .. tacn:: erewrite {+, @oriented_rewriter } {? @occurrences } {? by @ltac_expr3 } Works like :tacn:`rewrite`, but turns unresolved bindings, if any, into existential variables instead of failing. It has the same parameters as :tacn:`rewrite`. .. flag:: Keyed Unification This :term:`flag` makes higher-order unification used by :tacn:`rewrite` rely on a set of keys to drive unification. The subterms, considered as rewriting candidates, must start with the same key as the left- or right-hand side of the lemma given to rewrite, and the arguments are then unified up to full reduction. .. tacn:: rewrite * {? {| -> | <- } } @one_term {? in @ident } {? at @rewrite_occs } {? by @ltac_expr3 } rewrite * {? {| -> | <- } } @one_term at @rewrite_occs in @ident {? by @ltac_expr3 } :name: rewrite *; _ :undocumented: .. tacn:: rewrite_db @ident {? in @ident } :undocumented: .. tacn:: replace @one_term__from with @one_term__to {? @occurrences } {? by @ltac_expr3 } replace {? {| -> | <- } } @one_term__from {? @occurrences } :name: replace; _ The first form replaces all free occurrences of :n:`@one_term__from` in the current goal with :n:`@one_term__to` and generates an equality :n:`@one_term__to = @one_term__from` as a subgoal. (Note the generated equality is reversed with respect to the order of the two terms in the tactic syntax; see issue `#13480 `_.) This equality is automatically solved if it occurs among the hypotheses, or if its symmetric form occurs. The second form, with `->` or no arrow, replaces :n:`@one_term__from` with :n:`@term__to` using the first hypothesis whose type has the form :n:`@one_term__from = @term__to`. If `<-` is given, the tactic uses the first hypothesis with the reverse form, i.e. :n:`@term__to = @one_term__from`. :n:`@occurrences` The `type of` and `value of` forms are not supported. Note you must `Require Setoid` to use the `at` clause in :n:`@occurrences`. :n:`by @ltac_expr3` Applies the :n:`@ltac_expr3` to solve the generated equality. .. exn:: Terms do not have convertible types. :undocumented: .. tacn:: cutrewrite {? {| -> | <- } } @one_term {? in @ident } Where :n:`@one_term` is an equality. .. deprecated:: 8.5 Use :tacn:`replace` instead. .. tacn:: substitute {? {| -> | <- } } @one_term {? with @bindings } :undocumented: .. tacn:: subst {* @ident } For each :n:`@ident`, in order, for which there is a hypothesis in the form :n:`@ident = @term` or :n:`@term = @ident`, replaces :n:`@ident` with :n:`@term` everywhere in the hypotheses and the conclusion and clears :n:`@ident` and the hypothesis from the context. If there are multiple hypotheses that match the :n:`@ident`, the first one is used. If no :n:`@ident` is given, replacement is done for all hypotheses in the appropriate form in top to bottom order. If :n:`@ident` is a local definition of the form :n:`@ident := @term`, it is also unfolded and cleared. If :n:`@ident` is a section variable it must have no indirect occurrences in the goal, i.e. no global declarations implicitly depending on the section variable may be present in the goal. .. note:: If the hypothesis is itself dependent in the goal, it is replaced by the proof of reflexivity of equality. .. flag:: Regular Subst Tactic This :term:`flag` controls the behavior of :tacn:`subst`. When it is activated (it is by default), :tacn:`subst` also deals with the following corner cases: + A context with ordered hypotheses :n:`@ident__1 = @ident__2` and :n:`@ident__1 = t`, or :n:`t′ = @ident__1` with `t′` not a variable, and no other hypotheses of the form :n:`@ident__2 = u` or :n:`u = @ident__2`; without the flag, a second call to subst would be necessary to replace :n:`@ident__2` by `t` or `t′` respectively. + The presence of a recursive equation which without the flag would be a cause of failure of :tacn:`subst`. + A context with cyclic dependencies as with hypotheses :n:`@ident__1 = f @ident__2` and :n:`@ident__2 = g @ident__1` which without the flag would be a cause of failure of :tacn:`subst`. Additionally, it prevents a local definition such as :n:`@ident := t` from being unfolded which otherwise it would exceptionally unfold in configurations containing hypotheses of the form :n:`@ident = u`, or :n:`u′ = @ident` with `u′` not a variable. Finally, it preserves the initial order of hypotheses, which without the flag it may break. .. exn:: Cannot find any non-recursive equality over @ident. :undocumented: .. exn:: Section variable @ident occurs implicitly in global declaration @qualid present in hypothesis @ident. Section variable @ident occurs implicitly in global declaration @qualid present in the conclusion. Raised when the variable is a section variable with indirect dependencies in the goal. If :n:`@ident` is a section variable, it must not have any indirect occurrences in the goal, i.e. no global declarations implicitly depending on the section variable may be present in the goal. .. tacn:: simple subst :undocumented: .. tacn:: stepl @one_term {? by @ltac_expr } For chaining rewriting steps. It assumes a goal in the form :n:`R @term__1 @term__2` where ``R`` is a binary relation and relies on a database of lemmas of the form :g:`forall x y z, R x y -> eq x z -> R z y` where `eq` is typically a setoid equality. The application of :n:`stepl @one_term` then replaces the goal by :n:`R @one_term @term__2` and adds a new goal stating :n:`eq @one_term @term__1`. If :n:`@ltac_expr` is specified, it is applied to the side condition. .. cmd:: Declare Left Step @one_term Adds :n:`@one_term` to the database used by :tacn:`stepl`. This tactic is especially useful for parametric setoids which are not accepted as regular setoids for :tacn:`rewrite` and :tacn:`setoid_replace` (see :ref:`Generalizedrewriting`). .. tacn:: stepr @one_term {? by @ltac_expr } This behaves like :tacn:`stepl` but on the right hand side of the binary relation. Lemmas are expected to be in the form :g:`forall x y z, R x y -> eq y z -> R x z`. .. cmd:: Declare Right Step @one_term Adds :n:`@term` to the database used by :tacn:`stepr`. Rewriting with definitional equality ------------------------------------ .. tacn:: change {? @one_term__from {? at @occs_nums } with } @one_term__to {? @occurrences } Replaces terms with other :term:`convertible` terms. If :n:`@one_term__from` is not specified, then :n:`@one_term__to` replaces the conclusion and/or the specified hypotheses. If :n:`@one_term__from` is specified, the tactic replaces occurrences of :n:`@one_term__to` within the conclusion and/or the specified hypotheses. :n:`{? @one_term__from {? at @occs_nums } with }` Replaces the occurrences of :n:`@one_term__from` specified by :n:`@occs_nums` with :n:`@one_term__to`, provided that the two :n:`@one_term`\s are convertible. :n:`@one_term__from` may contain pattern variables such as `?x`, whose value which will substituted for `x` in :n:`@one_term__to`, such as in `change (f ?x ?y) with (g (x, y))` or `change (fun x => ?f x) with f`. The `at … with …` form is deprecated in 8.14; use `with … at …` instead. For `at … with … in H |-`, use `with … in H at … |-`. :n:`@occurrences` If `with` is not specified, :n:`@occurrences` must only specify entire hypotheses and/or the goal; it must not include any :n:`at @occs_nums` clauses. .. exn:: Not convertible. :undocumented: .. exn:: Found an "at" clause without "with" clause :undocumented: .. tacn:: now_show @one_term A synonym for :n:`change @one_term`. It can be used to make some proof steps explicit when refactoring a proof script to make it readable. .. seealso:: :ref:`applyingconversionrules` .. tacn:: change_no_check {? @one_term__from {? at @occs_nums } with } @one_term__to {? @occurrences } For advanced usage. Similar to :tacn:`change`, but as an optimization, it skips checking that :n:`@one_term__to` is convertible with the goal or :n:`@one_term__from`. Recall that the Coq kernel typechecks proofs again when they are concluded to ensure correctness. Hence, using :tacn:`change` checks convertibility twice overall, while :tacn:`change_no_check` can produce ill-typed terms, but checks convertibility only once. Hence, :tacn:`change_no_check` can be useful to speed up certain proof scripts, especially if one knows by construction that the argument is indeed convertible to the goal. In the following example, :tacn:`change_no_check` replaces :g:`False` with :g:`True`, but :cmd:`Qed` then rejects the proof, ensuring consistency. .. example:: .. coqtop:: all abort fail Goal False. change_no_check True. exact I. Qed. .. example:: .. coqtop:: all abort fail Goal True -> False. intro H. change_no_check False in H. exact H. Qed. .. _applyingconversionrules: Applying conversion rules ------------------------- These tactics apply reductions and expansions, replacing :term:`convertible` subterms with others that are equal by definition in |CiC|. They implement different specialized uses of the :tacn:`change` tactic. Other ways to apply these reductions are through the :cmd:`Eval` command, the `Eval` clause in the :cmd:`Definition`/:cmd:`Example` command and the :tacn:`eval` tactic. Tactics described in this section include: - :tacn:`lazy` and :tacn:`cbv`, which allow precise selection of which reduction rules to apply - :tacn:`simpl` and :tacn:`cbn`, which are "clever" tactics meant to give the most readable result - :tacn:`hnf` and :tacn:`red`, which apply reduction rules only to the head of the term - :tacn:`vm_compute` and :tacn:`native_compute`, which are performance-oriented. Conversion tactics, with two exceptions, only change the types and contexts of existential variables and leave the proof term unchanged. (The :tacn:`vm_compute` and :tacn:`native_compute` tactics change existential variables in a way similar to other conversions while also adding a single explicit cast to the proof term to tell the kernel which reduction engine to use. See :ref:`type-cast`.) For example: .. coqtop:: all Goal 3 + 4 = 7. Show Proof. Show Existentials. cbv. Show Proof. Show Existentials. .. coqtop:: none Abort. .. tacn:: lazy {? @reductions } @simple_occurrences cbv {? @reductions } @simple_occurrences .. insertprodn reductions delta_reductions .. prodn:: reductions ::= {+ @reduction } | @delta_reductions reduction ::= beta | delta {? @delta_reductions } | match | fix | cofix | iota | zeta delta_reductions ::= {? - } [ {+ @reference } ] Normalize the goal as specified by :n:`@reductions`. If no reductions are specified by name, all reductions are applied. If any reductions are specified by name, then only the named reductions are applied. The reductions include: `beta` :term:`beta-reduction` of functional application :n:`delta {? @delta_reductions }` :term:`delta-reduction`: unfolding of transparent constants, see :ref:`controlling-the-reduction-strategies`. The form in :n:`@reductions` without the keyword `delta` includes `beta`, `iota` and `zeta` reductions in addition to `delta` using the given :n:`@delta_reductions`. :n:`{? - } [ {+ @reference } ]` without the `-`, limits delta unfolding to the listed constants. If the `-` is present, unfolding is applied to all constants that are not listed. Notice that the ``delta`` doesn't apply to variables bound by a let-in construction inside the term itself (use ``zeta`` to inline these). Opaque constants are never unfolded except by :tacn:`vm_compute` and :tacn:`native_compute` (see `#4476 `_ and :ref:`controlling-the-reduction-strategies`). `iota` :term:`iota-reduction` of pattern matching (`match`) over a constructed term and reduction of :g:`fix` and :g:`cofix` expressions. Shorthand for `match fix cofix`. `zeta` :term:`zeta-reduction`: reduction of :ref:`let-in definitions ` Normalization is done by first evaluating the head of the expression into :gdef:`weak-head normal form`, i.e. until the evaluation is blocked by a variable, an opaque constant, an axiom, such as in :n:`x u__1 … u__n`, :g:`match x with … end`, :g:`(fix f x {struct x} := …) x`, a constructed form (a :math:`\lambda`-expression, constructor, cofixpoint, inductive type, product type or sort) or a redex for which flags prevent reduction of the redex. Once a weak-head normal form is obtained, subterms are recursively reduced using the same strategy. There are two strategies for reduction to weak-head normal form: *lazy* (the :tacn:`lazy` tactic), or *call-by-value* (the :tacn:`cbv` tactic). The lazy strategy is a `call by need `_ strategy, with sharing of reductions: the arguments of a function call are weakly evaluated only when necessary, and if an argument is used several times then it is weakly computed only once. This reduction is efficient for reducing expressions with dead code. For instance, the proofs of a proposition :g:`exists x. P(x)` reduce to a pair of a witness :g:`t` and a proof that :g:`t` satisfies the predicate :g:`P`. Most of the time, :g:`t` may be computed without computing the proof of :g:`P(t)`, thanks to the lazy strategy. .. flag:: Kernel Term Sharing Turning this flag off disables the sharing of computations in :tacn:`lazy`, making it a call-by-name reduction. This also affects the reduction procedure used by the kernel when typechecking. By default sharing is activated. The call-by-value strategy is the one used in ML languages: the arguments of a function call are systematically weakly evaluated first. The lazy strategy is similar to how Haskell reduces terms. Although the lazy strategy always does fewer reductions than the call-by-value strategy, the latter is generally more efficient for evaluating purely computational expressions (i.e. with little dead code). .. tacn:: compute {? @delta_reductions } @simple_occurrences A variant form of :tacn:`cbv`. :opt:`Debug` ``"Cbv"`` makes :tacn:`cbv` (and its derivative :tacn:`compute`) print information about the constants it encounters and the unfolding decisions it makes. .. tacn:: simpl {? @delta_reductions } {? {| @reference_occs | @pattern_occs } } @simple_occurrences .. insertprodn reference_occs pattern_occs .. prodn:: reference_occs ::= @reference {? at @occs_nums } pattern_occs ::= @one_term {? at @occs_nums } Reduces a term to something still readable instead of fully normalizing it. It performs a sort of strong normalization with two key differences: + It unfolds constants only if they lead to an ι-reduction, i.e. reducing a match or unfolding a fixpoint. + When reducing a constant unfolding to (co)fixpoints, the tactic uses the name of the constant the (co)fixpoint comes from instead of the (co)fixpoint definition in recursive calls. :n:`@simple_occurrences` Permits selecting whether to reduce the conclusion and/or one or more hypotheses. While the `at` option of :n:`@occurrences` is not allowed here, :n:`@reference_occs` and :n:`@pattern_occs` have a somewhat less flexible `at` option for selecting specific occurrences. :tacn:`simpl` can unfold transparent constants whose name can be reused in recursive calls as well as those designated by :cmd:`Arguments` :n:`@reference … /` commands. For instance, a constant :g:`plus' := plus` may be unfolded and reused in recursive calls, but a constant such as :g:`succ := plus (S O)` is not unfolded unless it was specifically designated in an :cmd:`Arguments` command such as :n:`Arguments succ /.`. :n:`{| @reference_occs | @pattern_occs }` can limit the application of :tacn:`simpl` to: - applicative subterms whose :term:`head` is the constant :n:`@qualid` or is the constant used in the notation :n:`@string` (see :n:`@reference`) - subterms matching a pattern :n:`@one_term` .. tacn:: cbn {? @reductions } @simple_occurrences :tacn:`cbn` was intended to be a more principled, faster and more predictable replacement for :tacn:`simpl`. The main difference between :tacn:`cbn` and :tacn:`simpl` is that :tacn:`cbn` may unfold constants even when they cannot be reused in recursive calls: in the previous example, :g:`succ t` is reduced to :g:`S t`. :opt:`Debug` ``"RAKAM"`` makes :tacn:`cbn` print various debugging information. ``RAKAM`` is the Refolding Algebraic Krivine Abstract Machine. .. tacn:: hnf @simple_occurrences Replaces the current goal with its weak-head normal form according to the βδιζ-reduction rules, i.e. it reduces the :term:`head` of the goal until it becomes a product or an irreducible term. All inner βι-redexes are also reduced. While :tacn:`hnf` behaves similarly to :tacn:`simpl` and :tacn:`cbn`, unlike them, it does not recurse into subterms. The behavior of :tacn:`hnf` can be tuned using the :cmd:`Arguments` command. Example: The term :g:`fun n : nat => S n + S n` is not reduced by :n:`hnf`. .. note:: The δ rule only applies to transparent constants (see :ref:`controlling-the-reduction-strategies` on transparency and opacity). .. tacn:: red @simple_occurrences βιζ-reduces the constant at the head of `T` (which may be called the :gdef:`head constant`; :gdef:`head` means the beginning of the term), if possible, in the selected hypotheses and/or the goal, which must have the form: :n:`{? forall @open_binders,} T` (where `T` does not begin with a `forall`) to :n:`c t__1 … t__n` where :g:`c` is a constant. If :g:`c` is transparent then it replaces :g:`c` with its definition and reduces again until no further reduction is possible. .. exn:: No head constant to reduce. :undocumented: .. tacn:: unfold {+, @reference_occs } {? @occurrences } Applies :term:`delta-reduction` to the constants specified by each :n:`@reference_occs`. The selected hypotheses and/or goals are then reduced to βιζ-normal form. Use the general reduction tactics if you want to only apply the δ rule, for example :tacn:`cbv` :n:`delta [ @reference ]`. :n:`@reference_occs` If :n:`@reference` is a :n:`@qualid`, it must be a defined transparent constant or local definition (see :ref:`gallina-definitions` and :ref:`controlling-the-reduction-strategies`). If :n:`@reference` is a :n:`@string {? @scope_key}`, the :n:`@string` is the discriminating symbol of a notation (e.g. "+") or an expression defining a notation (e.g. `"_ + _"`) and the notation is an application whose head symbol is an unfoldable constant, then the tactic unfolds it. :n:`@occurrences` If :n:`@occurrences` is specified, the specified occurrences will be replaced in the selected hypotheses and/or goal. Otherwise every occurrence of the constants in the goal is replaced. If multiple :n:`@reference_occs` are given, any `at` clauses must be in the :n:`@reference_occs` rather than in :n:`@occurrences`. .. exn:: Cannot turn {| inductive | constructor } into an evaluable reference. Occurs when trying to unfold something that is defined as an inductive type (or constructor) and not as a definition. .. example:: .. coqtop:: abort all fail Goal 0 <= 1. unfold le. .. exn:: @ident is opaque. Raised if you are trying to unfold a definition that has been marked opaque. .. example:: .. coqtop:: abort all fail Opaque Nat.add. Goal 1 + 0 = 1. unfold Nat.add. .. exn:: Bad occurrence number of @qualid. :undocumented: .. exn:: @qualid does not occur. :undocumented: .. tacn:: fold {+ @one_term } @simple_occurrences First, this tactic reduces each :n:`@one_term` using the :tacn:`red` tactic. Then, every occurrence of the resulting terms in the selected hypotheses and/or goal will be replaced by its associated :n:`@one_term`. This tactic is particularly useful for reversing undesired unfoldings, which may make the goal very hard to read. The undesired unfoldings may be due to the limited capabilities of other reduction tactics. On the other hand, when an unfolded function applied to its argument has been reduced, the :tacn:`fold` tactic doesn't do anything. :tacn:`fold` :n:`@one_term__1 @one_term__2` is equivalent to :n:`fold @one_term__1; fold @one_term__2`. .. example:: :tacn:`fold` doesn't always undo :tacn:`unfold` .. coqtop:: all Goal ~0=0. unfold not. This :tacn:`fold` doesn't undo the preceeding :tacn:`unfold` (it makes no change): .. coqtop:: all fold not. However, this :tacn:`pattern` followed by :tacn:`fold` does: .. coqtop:: all abort pattern (0 = 0). fold not. .. example:: Use :tacn:`fold` to reverse unfolding of `fold_right` .. coqtop:: none Require Import Coq.Lists.List. Local Open Scope list_scope. .. coqtop:: all abort Goal forall x xs, fold_right and True (x::xs). red. fold (fold_right and True). .. tacn:: pattern {+, @pattern_occs } {? @occurrences } Performs beta-expansion (the inverse of :term:`beta-reduction`) for the selected hypotheses and/or goals. The :n:`@one_term`\s in :n:`@pattern_occs` must be free subterms in the selected items. The expansion is done for each selected item :g:`T` for a set of :n:`@one_term`\s in the :n:`@pattern_occs` by: + replacing all selected occurrences of the :n:`@one_term`\s in :g:`T` with fresh variables + abstracting these variables + applying the abstracted goal to the :n:`@one_term`\s For instance, if the current goal :g:`T` is expressible as :n:`φ(t__1 … t__n)` where the notation captures all the instances of the :n:`t__i` in φ, then :tacn:`pattern` :n:`t__1, …, t__n` generates the equivalent goal :n:`(fun (x__1:A__1 … (x__n:A__n) => φ(x__1 … x__n)) t__1 … t__n`. If :n:`t__i` occurs in one of the generated types :n:`A__j` (for `j > i`), occurrences will also be considered and possibly abstracted. This tactic can be used, for instance, when the tactic :tacn:`apply` fails on matching or to better control the behavior of :tacn:`rewrite`. Fast reduction tactics: vm_compute and native_compute ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :tacn:`vm_compute` is a brute-force but efficient tactic that first normalizes the terms before comparing them. It is based on a bytecode representation of terms similar to the bytecode representation used in the ZINC virtual machine :cite:`Leroy90`. It is especially useful for intensive computation of algebraic values, such as numbers, and for reflection-based tactics. :tacn:`native_compute` is based on on converting the Coq code to OCaml. Note that both these tactics ignore :cmd:`Opaque` markings (see issue `#4776 `_), nor do they apply unfolding strategies such as from :cmd:`Strategy`. :tacn:`native_compute` is typically two to five times faster than :tacn:`vm_compute` at applying conversion rules when Coq is running native code, but :tacn:`native_compute` requires considerably more overhead. We recommend using :tacn:`native_compute` when all of the following are true (otherwise use :tacn:`vm_compute`): - the running time in :tacn:`vm_compute` at least 5-10 seconds - the size of the input term is small (e.g. hand-generated code rather than automatically-generated code that may have nested destructs on inductives with dozens or hundreds of constructors) - the output is small (e.g. you're returning a boolean, a natural number or an integer rather than a large abstract syntax tree) These tactics change existential variables in a way similar to other conversions while also adding a single explicit cast (see :ref:`type-cast`) to the proof term to tell the kernel which reduction engine to use. .. tacn:: vm_compute {? {| @reference_occs | @pattern_occs } } {? @occurrences } Evaluates the goal using the optimized call-by-value evaluation bytecode-based virtual machine described in :cite:`CompiledStrongReduction`. This algorithm is dramatically more efficient than the algorithm used for the :tacn:`cbv` tactic, but it cannot be fine-tuned. It is especially useful for full evaluation of algebraic objects. This includes the case of reflection-based tactics. .. tacn:: native_compute {? {| @reference_occs | @pattern_occs } } {? @occurrences } Evaluates the goal by compilation to OCaml as described in :cite:`FullReduction`. Depending on the configuration, this tactic can either default to :tacn:`vm_compute`, recompile dependencies or fail due to some missing precompiled dependencies, see :ref:`the native-compiler option ` for details. .. flag:: NativeCompute Timing This :term:`flag` causes all calls to the native compiler to print timing information for the conversion to native code, compilation, execution, and reification phases of native compilation. Timing is printed in units of seconds of wall-clock time. .. flag:: NativeCompute Profiling On Linux, if you have the ``perf`` profiler installed, this :term:`flag` makes it possible to profile :tacn:`native_compute` evaluations. .. opt:: NativeCompute Profile Filename @string This :term:`option` specifies the profile output; the default is ``native_compute_profile.data``. The actual filename used will contain extra characters to avoid overwriting an existing file; that filename is reported to the user. That means you can individually profile multiple uses of :tacn:`native_compute` in a script. From the Linux command line, run ``perf report`` on the profile file to see the results. Consult the ``perf`` documentation for more details. Computing in a term: eval and Eval ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Evaluation of a term can be performed with: .. tacn:: eval @red_expr in @term .. insertprodn red_expr red_expr .. prodn:: red_expr ::= lazy {? @reductions } | cbv {? @reductions } | compute {? @delta_reductions } | vm_compute {? {| @reference_occs | @pattern_occs } } | native_compute {? {| @reference_occs | @pattern_occs } } | red | hnf | simpl {? @delta_reductions } {? {| @reference_occs | @pattern_occs } } | cbn {? @reductions } | unfold {+, @reference_occs } | fold {+ @one_term } | pattern {+, @pattern_occs } | @ident :tacn:`eval` is a :token:`value_tactic`. It returns the result of applying the conversion rules specified by :n:`@red_expr`. It does not change the proof state. The :n:`@red_expr` alternatives that begin with a keyword correspond to the tactic with the same name, though in several cases with simpler syntax than the tactic. :n:`@ident` is a named reduction expression created with :cmd:`Declare Reduction`. .. seealso:: Section :ref:`applyingconversionrules`. .. cmd:: Eval @red_expr in @term Performs the specified reduction on :n:`@term` and displays the resulting term with its type. If a proof is open, :n:`@term` may reference hypotheses of the selected goal. :cmd:`Eval` is a :token:`query_command`, so it may be prefixed with a goal selector. .. cmd:: Compute @term Evaluates :n:`@term` using the bytecode-based virtual machine. It is a shortcut for :cmd:`Eval` :n:`vm_compute in @term`. :cmd:`Compute` is a :token:`query_command`, so it may be prefixed with a goal selector. .. cmd:: Declare Reduction @ident := @red_expr Declares a short name for the reduction expression :n:`@red_expr`, for instance ``lazy beta delta [foo bar]``. This short name can then be used in :n:`Eval @ident in` or ``eval`` constructs. This command accepts the :attr:`local` attribute, which indicates that the reduction will be discarded at the end of the file or module. The name is not qualified. In particular declaring the same name in several modules or in several functor applications will be rejected if these declarations are not local. The name :n:`@ident` cannot be used directly as an Ltac tactic, but nothing prevents the user from also performing a :n:`Ltac @ident := @red_expr`. .. _controlling-the-reduction-strategies: Controlling reduction strategies and the conversion algorithm ------------------------------------------------------------- The commands to fine-tune the reduction strategies and the lazy conversion algorithm are described in this section. Also see :ref:`Args_effect_on_unfolding`, which supports additional fine-tuning. .. cmd:: Opaque {+ @reference } Marks the specified constants as :term:`opaque` so tactics won't :term:`unfold` them with :term:`delta-reduction`. "Constants" are items defined by commands such as :cmd:`Definition`, :cmd:`Let` (with an explicit body), :cmd:`Fixpoint`, :cmd:`CoFixpoint` and :cmd:`Function`. This command accepts the :attr:`global` attribute. By default, the scope of :cmd:`Opaque` is limited to the current section or module. :cmd:`Opaque` also affects Coq's conversion algorithm, causing it to delay unfolding the specified constants as much as possible when it has to check that two distinct applied constants are convertible. See Section :ref:`conversion-rules`. .. cmd:: Transparent {+ @reference } The opposite of :cmd:`Opaque`, it marks the specified constants as :term:`transparent` so that tactics may unfold them. See :cmd:`Opaque` above. This command accepts the :attr:`global` attribute. By default, the scope of :cmd:`Transparent` is limited to the current section or module. Note that constants defined by proofs ending with :cmd:`Qed` are irreversibly opaque; :cmd:`Transparent` will not make them transparent. This is consistent with the usual mathematical practice of *proof irrelevance*: what matters in a mathematical development is the sequence of lemma statements, not their actual proofs. This distinguishes lemmas from the usual defined constants, whose actual values are of course relevant in general. .. exn:: The reference @qualid was not found in the current environment. There is no constant named :n:`@qualid` in the environment. .. seealso:: :ref:`applyingconversionrules`, :cmd:`Qed` and :cmd:`Defined` .. _vernac-strategy: .. cmd:: Strategy {+ @strategy_level [ {+ @reference } ] } .. insertprodn strategy_level strategy_level .. prodn:: strategy_level ::= opaque | @integer | expand | transparent Generalizes the behavior of the :cmd:`Opaque` and :cmd:`Transparent` commands. It is used to fine-tune the strategy for unfolding constants, both at the tactic level and at the kernel level. This command associates a :n:`@strategy_level` with the qualified names in the :n:`@reference` sequence. Whenever two expressions with two distinct head constants are compared (for example, typechecking `f x` where `f : A -> B` and `x : C` will result in converting `A` and `C`), the one with lower level is expanded first. In case of a tie, the second one (appearing in the cast type) is expanded. This command accepts the :attr:`local` attribute, which limits its effect to the current section or module, in which case the section and module behavior is the same as :cmd:`Opaque` and :cmd:`Transparent` (without :attr:`global`). Levels can be one of the following (higher to lower): + ``opaque`` : level of opaque constants. They cannot be expanded by tactics (behaves like +∞, see next item). + :n:`@integer` : levels indexed by an integer. Level 0 corresponds to the default behavior, which corresponds to transparent constants. This level can also be referred to as ``transparent``. Negative levels correspond to constants to be expanded before normal transparent constants, while positive levels correspond to constants to be expanded after normal transparent constants. + ``expand`` : level of constants that should be expanded first (behaves like −∞) + ``transparent`` : Equivalent to level 0 .. cmd:: Print Strategy @reference This command prints the strategy currently associated with :n:`@reference`. It fails if :n:`@reference` is not an unfoldable reference, that is, neither a variable nor a constant. .. exn:: The reference is not unfoldable. :undocumented: .. cmd:: Print Strategies Print all the currently non-transparent strategies. .. tacn:: with_strategy @strategy_level_or_var [ {+ @reference } ] @ltac_expr3 .. insertprodn strategy_level_or_var strategy_level_or_var .. prodn:: strategy_level_or_var ::= @strategy_level | @ident Executes :token:`ltac_expr3`, applying the alternate unfolding behavior that the :cmd:`Strategy` command controls, but only for :token:`ltac_expr3`. This can be useful for guarding calls to reduction in tactic automation to ensure that certain constants are never unfolded by tactics like :tacn:`simpl` and :tacn:`cbn` or to ensure that unfolding does not fail. .. example:: .. coqtop:: all reset abort Opaque id. Goal id 10 = 10. Fail unfold id. with_strategy transparent [id] unfold id. .. warning:: Use this tactic with care, as effects do not persist past the end of the proof script. Notably, this fine-tuning of the conversion strategy is not in effect during :cmd:`Qed` nor :cmd:`Defined`, so this tactic is most useful either in combination with :tacn:`abstract`, which will check the proof early while the fine-tuning is still in effect, or to guard calls to conversion in tactic automation to ensure that, e.g., :tacn:`unfold` does not fail just because the user made a constant :cmd:`Opaque`. This can be illustrated with the following example involving the factorial function. .. coqtop:: in reset Fixpoint fact (n : nat) : nat := match n with | 0 => 1 | S n' => n * fact n' end. Suppose now that, for whatever reason, we want in general to unfold the :g:`id` function very late during conversion: .. coqtop:: in Strategy 1000 [id]. If we try to prove :g:`id (fact n) = fact n` by :tacn:`reflexivity`, it will now take time proportional to :math:`n!`, because Coq will keep unfolding :g:`fact` and :g:`*` and :g:`+` before it unfolds :g:`id`, resulting in a full computation of :g:`fact n` (in unary, because we are using :g:`nat`), which takes time :math:`n!`. We can see this cross the relevant threshold at around :math:`n = 9`: .. coqtop:: all abort Goal True. Time assert (id (fact 8) = fact 8) by reflexivity. Time assert (id (fact 9) = fact 9) by reflexivity. Note that behavior will be the same if you mark :g:`id` as :g:`Opaque` because while most reduction tactics refuse to unfold :g:`Opaque` constants, conversion treats :g:`Opaque` as merely a hint to unfold this constant last. We can get around this issue by using :tacn:`with_strategy`: .. coqtop:: all Goal True. Fail Timeout 1 assert (id (fact 100) = fact 100) by reflexivity. Time assert (id (fact 100) = fact 100) by with_strategy -1 [id] reflexivity. However, when we go to close the proof, we will run into trouble, because the reduction strategy changes are local to the tactic passed to :tacn:`with_strategy`. .. coqtop:: all abort fail exact I. Timeout 1 Defined. We can fix this issue by using :tacn:`abstract`: .. coqtop:: all Goal True. Time assert (id (fact 100) = fact 100) by with_strategy -1 [id] abstract reflexivity. exact I. Time Defined. On small examples this sort of behavior doesn't matter, but because Coq is a super-linear performance domain in so many places, unless great care is taken, tactic automation using :tacn:`with_strategy` may not be robustly performant when scaling the size of the input. .. warning:: In much the same way this tactic does not play well with :cmd:`Qed` and :cmd:`Defined` without using :tacn:`abstract` as an intermediary, this tactic does not play well with ``coqchk``, even when used with :tacn:`abstract`, due to the inability of tactics to persist information about conversion hints in the proof term. See `#12200 `_ for more details. coq-8.15.0/doc/sphinx/proofs/writing-proofs/index.rst000066400000000000000000000016351417001151100226030ustar00rootroot00000000000000.. _writing-proofs: =================== Basic proof writing =================== Coq is an interactive theorem prover, or proof assistant, which means that proofs can be constructed interactively through a dialog between the user and the assistant. The building blocks for this dialog are tactics which the user will use to represent steps in the proof of a theorem. The first section presents the proof mode (the core mechanism of the dialog between the user and the proof assistant). Then, several sections describe the available tactics. The last section covers the SSReflect proof language, which provides a consistent alternative set of tactics to the standard basic tactics. Additional tactics are documented in the next chapter :ref:`automatic-tactics`. .. toctree:: :maxdepth: 1 proof-mode ../../proof-engine/tactics equality reasoning-inductives ../../proof-engine/ssreflect-proof-language coq-8.15.0/doc/sphinx/proofs/writing-proofs/proof-mode.rst000066400000000000000000001203341417001151100235410ustar00rootroot00000000000000.. _proofhandling: ---------- Proof mode ---------- :gdef:`Proof mode ` is used to prove theorems. Coq enters proof mode when you begin a proof, such as with the :cmd:`Theorem` command. It exits proof mode when you complete a proof, such as with the :cmd:`Qed` command. Tactics, which are available only in proof mode, incrementally transform incomplete proofs to eventually generate a complete proof. When you run Coq interactively, such as through CoqIDE, Proof General or coqtop, Coq shows the current proof state (the incomplete proof) as you enter tactics. This information isn't shown when you run Coq in batch mode with `coqc`. Proof State ----------- The :gdef:`proof state` consists of one or more unproven goals. Each goal has a :gdef:`conclusion` (the statement that is to be proven) and a :gdef:`local context`, which contains named :term:`hypotheses ` (which are propositions), variables and local definitions that can be used in proving the conclusion. The proof may also use *constants* from the :term:`global environment` such as definitions and proven theorems. .. _conclusion_meaning_2: (Note that *conclusion* is also used to refer to the last part of an implication. For example, in `A -> B -> C`, `A` and `B` are :term:`premises ` and `C` is the conclusion.) The term ":gdef:`goal`" may refer to an entire goal or to the conclusion of a goal, depending on the context. The conclusion appears below a line and the local context appears above the line. The conclusion is a type. Each item in the local context begins with a name and ends, after a colon, with an associated type. :gdef:`Local definitions ` are shown in the form `n := 0 : nat`, for example, in which `nat` is the type of `0`. The local context of a goal contains items specific to the goal as well as section-local variables and hypotheses (see :ref:`gallina-assumptions`) defined in the current :ref:`section `. The latter are included in the initial proof state. Items in the local context are ordered; an item can only refer to items that appear before it. (A more mathematical description of the *local context* is :ref:`here `.) The :gdef:`global environment` has definitions and proven theorems that are global in scope. (A more mathematical description of the *global environment* is :ref:`here `.) When you begin proving a theorem, the proof state shows the statement of the theorem below the line and often nothing in the local context: .. coqtop:: none Parameter P: nat -> Prop. .. coqtop:: out Goal forall n m: nat, n > m -> P 1 /\ P 2. After applying the :tacn:`intros` :term:`tactic`, we see hypotheses above the line. The names of variables (`n` and `m`) and hypotheses (`H`) appear before a colon, followed by the type they represent. .. coqtop:: all intros. Some tactics, such as :tacn:`split`, create new goals, which may be referred to as :gdef:`subgoals ` for clarity. Goals are numbered from 1 to N at each step of the proof to permit applying a tactic to specific goals. The local context is only shown for the first goal. .. coqtop:: all split. "Variables" may refer specifically to local context items for which the type of their type is `Set` or `Type`, and :gdef:`"hypotheses" ` refers to items that are :term:`propositions `, for which the type of their type is `Prop` or `SProp`, but these terms are also used interchangeably. .. coqtop:: out let t_n := type of n in idtac "type of n :" t_n; let tt_n := type of t_n in idtac "type of" t_n ":" tt_n. let t_H := type of H in idtac "type of H :" t_H; let tt_H := type of t_H in idtac "type of" t_H ":" tt_H. A proof script, consisting of the tactics that are applied to prove a theorem, is often informally referred to as a "proof". The real proof, whether complete or incomplete, is the associated term, the :gdef:`proof term`, which users may occasionally want to examine. (This is based on the *Curry-Howard isomorphism* :cite:`How80,Bar81,Gir89,H89`, which is a correspondence between between proofs and terms and between :term:`propositions ` and types of λ-calculus. The isomorphism is also sometimes called the "propositions-as-types correspondence".) The :cmd:`Show Proof` command displays the incomplete proof term before you've completed the proof. For example, here's the proof term after using the :tacn:`split` tactic above: .. coqtop:: all Show Proof. The incomplete parts, the goals, are represented by :term:`existential variables ` with names that begin with `?Goal`. The :cmd:`Show Existentials` command shows each existential with the hypotheses and conclusion for the associated goal. .. coqtop:: all Show Existentials. Coq's kernel verifies the correctness of proof terms when it exits proof mode by checking that the proof term is :term:`well-typed` and that its type is the same as the theorem statement. After a proof is completed, :cmd:`Print` `` shows the proof term and its type. The type appears after the colon (`forall ...`), as for this theorem from Coq's standard library: .. coqtop:: all Print proj1. .. note:: Many tactics accept :n:`@term`\s as arguments and frequently refer to them with wording such as "the type of :token:`term`". When :n:`@term` is the name of a theorem or lemma, this wording refers to the type of the proof term, which is what's given in the :cmd:`Theorem` statement. When :n:`@term` is the name of a hypothesis, the wording refers to the type shown in the context for the hypothesis (i.e., after the colon). For terms that are more complex than just an :token:`ident`, you can use :cmd:`Check` :n:`@term` to display their type. .. _proof-editing-mode: Entering and exiting proof mode ------------------------------- Coq enters :term:`proof mode` when you begin a proof through commands such as :cmd:`Theorem` or :cmd:`Goal`. Coq user interfaces usually have a way to indicate that you're in proof mode. :term:`Tactics ` are available only in proof mode (currently they give syntax errors outside of proof mode). Most :term:`commands ` can be used both in and out of proof mode, but some commands only work in or outside of proof mode. When the proof is completed, you can exit proof mode with commands such as :cmd:`Qed`, :cmd:`Defined` and :cmd:`Save`. .. cmd:: Goal @type Asserts an unnamed proposition. This is intended for quick tests that a proposition is provable. If the proof is eventually completed and validated, you can assign a name with the :cmd:`Save` or :cmd:`Defined` commands. If no name is given, the name will be `Unnamed_thm` (or, if that name is already defined, a variant of that). .. cmd:: Qed Passes a completed :term:`proof term` to Coq's kernel to check that the proof term is :term:`well-typed` and to verify that its type matches the theorem statement. If it's verified, the proof term is added to the global environment as an :term:`opaque` constant using the declared name from the original goal. It's very rare for a proof term to fail verification. Generally this indicates a bug in a tactic you used or that you misused some unsafe tactics. .. exn:: Attempt to save an incomplete proof. :undocumented: .. exn:: No focused proof (No proof-editing in progress). You tried to use a proof mode command such as :cmd:`Qed` outside of proof mode. .. note:: Sometimes an error occurs when building the proof term, because tactics do not enforce completely the term construction constraints. The user should also be aware of the fact that since the proof term is completely rechecked at this point, one may have to wait a while when the proof is large. In some exceptional cases one may even incur a memory overflow. .. cmd:: Save @ident Similar to :cmd:`Qed`, except that the proof term is added to the global context with the name :token:`ident`, which overrides any name provided by the :cmd:`Theorem` command or its variants. .. cmd:: Defined {? @ident } Similar to :cmd:`Qed` and :cmd:`Save`, except the proof is made :term:`transparent`, which means that its content can be explicitly used for type checking and that it can be unfolded in conversion tactics (see :ref:`applyingconversionrules`, :cmd:`Opaque`, :cmd:`Transparent`). If :token:`ident` is specified, the proof is defined with the given name, which overrides any name provided by the :cmd:`Theorem` command or its variants. .. cmd:: Admitted This command is available in proof mode to give up the current proof and declare the initial goal as an axiom. .. cmd:: Abort {? {| All | @ident } } Cancels the current proof development, switching back to the previous proof development, or to the Coq toplevel if no other proof was being edited. :n:`@ident` Aborts editing the proof named :n:`@ident` for use when you have nested proofs. See also :flag:`Nested Proofs Allowed`. :n:`All` Aborts all current proofs. .. exn:: No focused proof (No proof-editing in progress). :undocumented: .. cmd:: Proof @term :name: Proof `term` This command applies in proof mode. It is equivalent to :n:`exact @term. Qed.` That is, you have to give the full proof in one gulp, as a proof term (see Section :ref:`applyingtheorems`). .. warning:: Use of this command is discouraged. In particular, it doesn't work in Proof General because it must immediately follow the command that opened proof mode, but Proof General inserts :cmd:`Unset` :flag:`Silent` before it (see `Proof General issue #498 `_). .. cmd:: Proof Is a no-op which is useful to delimit the sequence of tactic commands which start a proof, after a :cmd:`Theorem` command. It is a good practice to use :cmd:`Proof` as an opening parenthesis, closed in the script with a closing :cmd:`Qed`. .. seealso:: :cmd:`Proof with` .. cmd:: Proof using @section_var_expr {? with @ltac_expr } .. insertprodn section_var_expr starred_ident_ref .. prodn:: section_var_expr ::= {* @starred_ident_ref } | {? - } @section_var_expr50 section_var_expr50 ::= @section_var_expr0 - @section_var_expr0 | @section_var_expr0 + @section_var_expr0 | @section_var_expr0 section_var_expr0 ::= @starred_ident_ref | ( @section_var_expr ) {? * } starred_ident_ref ::= @ident {? * } | Type {? * } | All Opens proof mode, declaring the set of section variables (see :ref:`gallina-assumptions`) used by the proof. At :cmd:`Qed` time, the system verifies that the set of section variables used in the proof is a subset of the declared one. The set of declared variables is closed under type dependency. For example, if ``T`` is a variable and ``a`` is a variable of type ``T``, then the commands ``Proof using a`` and ``Proof using T a`` are equivalent. The set of declared variables always includes the variables used by the statement. In other words ``Proof using e`` is equivalent to ``Proof using Type + e`` for any declaration expression ``e``. :n:`- @section_var_expr50` Use all section variables except those specified by :n:`@section_var_expr50` :n:`@section_var_expr0 + @section_var_expr0` Use section variables from the union of both collections. See :ref:`nameaset` to see how to form a named collection. :n:`@section_var_expr0 - @section_var_expr0` Use section variables which are in the first collection but not in the second one. :n:`{? * }` Use the transitive closure of the specified collection. :n:`Type` Use only section variables occurring in the statement. Specifying :n:`*` uses the forward transitive closure of all the section variables occurring in the statement. For example, if the variable ``H`` has type ``p < 5`` then ``H`` is in ``p*`` since ``p`` occurs in the type of ``H``. :n:`All` Use all section variables. .. warn:: @ident is both name of a Collection and Variable, Collection @ident takes precedence over Variable. If a specified name is ambiguous (it could be either a :cmd:`Collection` or a :cmd:`Variable`), then it is assumed to be a :cmd:`Collection` name. .. warn:: Variable All is shadowed by Collection named All containing all variables. This is variant of the previous warning for the **All** collection. .. seealso:: :ref:`tactics-implicit-automation` .. attr:: using This :term:`attribute` can be applied to the :cmd:`Definition`, :cmd:`Example`, :cmd:`Fixpoint` and :cmd:`CoFixpoint` commands as well as to :cmd:`Lemma` and its variants. It takes a :n:`@section_var_expr`, in quotes, as its value. This is equivalent to specifying the same :n:`@section_var_expr` in :cmd:`Proof using`. .. example:: .. coqtop:: all reset Section Test. Variable n : nat. Hypothesis Hn : n <> 0. #[using="Hn"] Lemma example : 0 < n. .. coqtop:: in Abort. End Test. Proof using options ``````````````````` The following options modify the behavior of ``Proof using``. .. opt:: Default Proof Using "@section_var_expr" Set this :term:`option` to use :n:`@section_var_expr` as the default ``Proof using`` value. E.g. ``Set Default Proof Using "a b"`` will complete all ``Proof`` commands not followed by a ``using`` part with ``using a b``. .. flag:: Suggest Proof Using When this :term:`flag` is on, :cmd:`Qed` suggests a ``using`` annotation if the user did not provide one. .. _`nameaset`: Name a set of section hypotheses for ``Proof using`` ```````````````````````````````````````````````````` .. cmd:: Collection @ident := @section_var_expr This can be used to name a set of section hypotheses, with the purpose of making ``Proof using`` annotations more compact. .. example:: Define the collection named ``Some`` containing ``x``, ``y`` and ``z``:: Collection Some := x y z. Define the collection named ``Fewer`` containing only ``x`` and ``y``:: Collection Fewer := Some - z Define the collection named ``Many`` containing the set union or set difference of ``Fewer`` and ``Some``:: Collection Many := Fewer + Some Collection Many := Fewer - Some Define the collection named ``Many`` containing the set difference of ``Fewer`` and the unnamed collection ``x y``:: Collection Many := Fewer - (x y) .. deprecated:: 8.15 Redefining a collection, defining a collection with the same name as a variable, and invoking the :cmd:`Proof using` command when collection and variable names overlap are deprecated. See the warnings below and in the :cmd:`Proof using` command. .. exn:: "All" is a predefined collection containing all variables. It can't be redefined. When issuing a :cmd:`Proof using` command, **All** used as a collection name always means "use all variables". .. warn:: New Collection definition of @ident shadows the previous one. Redefining a :cmd:`Collection` overwrites the previous definition. .. warn:: @ident was already a defined Variable, the name @ident will refer to Collection when executing "Proof using" command. The :cmd:`Proof using` command allows specifying both :cmd:`Collection` and :cmd:`Variable` names. In case of ambiguity, a name is assumed to be Collection name. Proof modes ----------- When entering proof mode through commands such as :cmd:`Goal` and :cmd:`Proof`, Coq picks by default the |Ltac| mode. Nonetheless, there exist other proof modes shipped in the standard Coq installation, and furthermore some plugins define their own proof modes. The default proof mode used when opening a proof can be changed using the following option. .. opt:: Default Proof Mode @string This :term:`option` selects the proof mode to use when starting a proof. Depending on the proof mode, various syntactic constructs are allowed when writing a proof. All proof modes support commands; the proof mode determines which tactic language and set of tactic definitions are available. The possible option values are: `"Classic"` Activates the |Ltac| language and the tactics with the syntax documented in this manual. Some tactics are not available until the associated plugin is loaded, such as `SSR` or `micromega`. This proof mode is set when the :term:`prelude` is loaded. `"Noedit"` No tactic language is activated at all. This is the default when the :term:`prelude` is not loaded, e.g. through the `-noinit` option for `coqc`. `"Ltac2"` Activates the Ltac2 language and the Ltac2-specific variants of the documented tactics. This value is only available after :cmd:`Requiring ` Ltac2. :cmd:`Importing ` Ltac2 sets this mode. Some external plugins also define their own proof mode, which can be activated with this command. Navigation in the proof tree -------------------------------- .. cmd:: Undo {? {? To } @natural } Cancels the effect of the last :token:`natural` commands or tactics. The :n:`To @natural` form goes back to the specified state number. If :token:`natural` is not specified, the command goes back one command or tactic. .. cmd:: Restart Restores the proof to the original goal. .. exn:: No focused proof to restart. :undocumented: .. cmd:: Focus {? @natural } Focuses the attention on the first goal to prove or, if :token:`natural` is specified, the :token:`natural`\-th. The printing of the other goals is suspended until the focused goal is solved or unfocused. .. deprecated:: 8.8 Prefer the use of bullets or focusing brackets with a goal selector (see below). .. cmd:: Unfocus This command restores to focus the goal that were suspended by the last :cmd:`Focus` command. .. deprecated:: 8.8 .. cmd:: Unfocused Succeeds if the proof is fully unfocused, fails if there are some goals out of focus. .. _curly-braces: .. tacn:: {? {| @natural | [ @ident ] } : } %{ %} :name: {; } .. todo See https://github.com/coq/coq/issues/12004 and https://github.com/coq/coq/issues/12825. ``{`` (without a terminating period) focuses on the first goal. The subproof can only be unfocused when it has been fully solved (*i.e.*, when there is no focused goal left). Unfocusing is then handled by ``}`` (again, without a terminating period). See also an example in the next section. Note that when a focused goal is proved a message is displayed together with a suggestion about the right bullet or ``}`` to unfocus it or focus the next one. :n:`@natural:` Focuses on the :token:`natural`\-th goal to prove. :n:`[ @ident ]: %{` Focuses on the named goal :token:`ident`. .. note:: Goals are just existential variables and existential variables do not get a name by default. You can give a name to a goal by using :n:`refine ?[@ident]`. You may also wrap this in an Ltac-definition like: .. coqtop:: in Ltac name_goal name := refine ?[name]. .. seealso:: :ref:`existential-variables` .. example:: This first example uses the Ltac definition above, and the named goals only serve for documentation. .. coqtop:: all Goal forall n, n + 0 = n. Proof. induction n; [ name_goal base | name_goal step ]. [base]: { .. coqtop:: all reflexivity. .. coqtop:: in } .. coqtop:: all [step]: { .. coqtop:: all simpl. f_equal. assumption. } Qed. This can also be a way of focusing on a shelved goal, for instance: .. coqtop:: all Goal exists n : nat, n = n. eexists ?[x]. reflexivity. [x]: exact 0. Qed. .. exn:: This proof is focused, but cannot be unfocused this way. You are trying to use ``}`` but the current subproof has not been fully solved. .. exn:: No such goal (@natural). :undocumented: .. exn:: No such goal (@ident). :undocumented: .. exn:: Brackets do not support multi-goal selectors. Brackets are used to focus on a single goal given either by its position or by its name if it has one. .. seealso:: The error messages for bullets below. .. _bullets: Bullets ``````` Alternatively, proofs can be structured with bullets instead of ``{`` and ``}``. The use of a bullet ``b`` for the first time focuses on the first goal ``g``, the same bullet cannot be used again until the proof of ``g`` is completed, then it is mandatory to focus the next goal with ``b``. The consequence is that ``g`` and all goals present when ``g`` was focused are focused with the same bullet ``b``. See the example below. Different bullets can be used to nest levels. The scope of bullet does not go beyond enclosing ``{`` and ``}``, so bullets can be reused as further nesting levels provided they are delimited by these. Bullets are made of repeated ``-``, ``+`` or ``*`` symbols: .. prodn:: bullet ::= {| {+ - } | {+ + } | {+ * } } Note again that when a focused goal is proved a message is displayed together with a suggestion about the right bullet or ``}`` to unfocus it or focus the next one. .. note:: In Proof General (``Emacs`` interface to Coq), you must use bullets with the priority ordering shown above to have a correct indentation. For example ``-`` must be the outer bullet and ``**`` the inner one in the example below. The following example script illustrates all these features: .. example:: .. coqtop:: all Goal (((True /\ True) /\ True) /\ True) /\ True. Proof. split. - split. + split. ** { split. - trivial. - trivial. } ** trivial. + trivial. - assert True. { trivial. } assumption. Qed. .. exn:: Wrong bullet @bullet__1: Current bullet @bullet__2 is not finished. Before using bullet :n:`@bullet__1` again, you should first finish proving the current focused goal. Note that :n:`@bullet__1` and :n:`@bullet__2` may be the same. .. exn:: Wrong bullet @bullet__1: Bullet @bullet__2 is mandatory here. You must put :n:`@bullet__2` to focus on the next goal. No other bullet is allowed here. .. exn:: No such goal. Focus next goal with bullet @bullet. You tried to apply a tactic but no goals were under focus. Using :n:`@bullet` is mandatory here. .. FIXME: the :noindex: below works around a Sphinx issue. (https://github.com/sphinx-doc/sphinx/issues/4979) It should be removed once that issue is fixed. .. exn:: No such goal. Try unfocusing with %}. :noindex: You just finished a goal focused by ``{``, you must unfocus it with ``}``. Mandatory Bullets ~~~~~~~~~~~~~~~~~ Using :opt:`Default Goal Selector` with the ``!`` selector forces tactic scripts to keep focus to exactly one goal (e.g. using bullets) or use explicit goal selectors. Set Bullet Behavior ~~~~~~~~~~~~~~~~~~~ .. opt:: Bullet Behavior {| "None" | "Strict Subproofs" } This :term:`option` controls the bullet behavior and can take two possible values: - "None": this makes bullets inactive. - "Strict Subproofs": this makes bullets active (this is the default behavior). Modifying the order of goals ```````````````````````````` .. tacn:: cycle @int_or_var Reorders the selected goals so that the first :n:`@integer` goals appear after the other selected goals. If :n:`@integer` is negative, it puts the last :n:`@integer` goals at the beginning of the list. The tactic is only useful with a goal selector, most commonly `all:`. Note that other selectors reorder goals; `1,3: cycle 1` is not equivalent to `all: cycle 1`. See :tacn:`… : … (goal selector)`. .. example:: .. coqtop:: none reset Parameter P : nat -> Prop. .. coqtop:: all abort Goal P 1 /\ P 2 /\ P 3 /\ P 4 /\ P 5. repeat split. all: cycle 2. all: cycle -3. .. tacn:: swap @int_or_var @int_or_var Exchanges the position of the specified goals. Negative values for :n:`@integer` indicate counting goals backward from the end of the list of selected goals. Goals are indexed from 1. The tactic is only useful with a goal selector, most commonly `all:`. Note that other selectors reorder goals; `1,3: swap 1 3` is not equivalent to `all: swap 1 3`. See :tacn:`… : … (goal selector)`. .. example:: .. coqtop:: all abort Goal P 1 /\ P 2 /\ P 3 /\ P 4 /\ P 5. repeat split. all: swap 1 3. all: swap 1 -1. .. tacn:: revgoals Reverses the order of the selected goals. The tactic is only useful with a goal selector, most commonly `all :`. Note that other selectors reorder goals; `1,3: revgoals` is not equivalent to `all: revgoals`. See :tacn:`… : … (goal selector)`. .. example:: .. coqtop:: all abort Goal P 1 /\ P 2 /\ P 3 /\ P 4 /\ P 5. repeat split. all: revgoals. Postponing the proof of some goals `````````````````````````````````` Goals can be :gdef:`shelved` so they are no longer displayed in the proof state. They can then be :gdef:`unshelved` to make them visible again. .. tacn:: shelve This tactic moves all goals under focus to a shelf. While on the shelf, goals will not be focused on. They can be solved by unification, or they can be called back into focus with the command :cmd:`Unshelve`. .. tacn:: shelve_unifiable Shelves only the goals under focus that are mentioned in other goals. Goals that appear in the type of other goals can be solved by unification. .. example:: .. coqtop:: all abort Goal exists n, n=0. refine (ex_intro _ _ _). all: shelve_unifiable. reflexivity. .. cmd:: Unshelve This command moves all the goals on the shelf (see :tacn:`shelve`) from the shelf into focus, by appending them to the end of the current list of focused goals. .. tacn:: unshelve @ltac_expr1 Performs :n:`@tactic`, then unshelves existential variables added to the shelf by the execution of :n:`@tactic`, prepending them to the current goal. .. tacn:: give_up This tactic removes the focused goals from the proof. They are not solved, and cannot be solved later in the proof. As the goals are not solved, the proof cannot be closed. The ``give_up`` tactic can be used while editing a proof, to choose to write the proof script in a non-sequential order. .. _requestinginformation: Requesting information ---------------------- .. cmd:: Show {? {| @ident | @natural } } Displays the current goals. :n:`@natural` Display only the :token:`natural`\-th goal. :n:`@ident` Displays the named goal :token:`ident`. This is useful in particular to display a shelved goal but only works if the corresponding existential variable has been named by the user (see :ref:`existential-variables`) as in the following example. .. example:: .. coqtop:: all abort Goal exists n, n = 0. eexists ?[n]. Show n. .. exn:: No focused proof. :undocumented: .. exn:: No such goal. :undocumented: .. cmd:: Show Proof {? Diffs {? removed } } Displays the proof term generated by the tactics that have been applied so far. If the proof is incomplete, the term will contain holes, which correspond to subterms which are still to be constructed. Each hole is an existential variable, which appears as a question mark followed by an identifier. Specifying “Diffs” highlights the difference between the current and previous proof step. By default, the command shows the output once with additions highlighted. Including “removed” shows the output twice: once showing removals and once showing additions. It does not examine the :opt:`Diffs` option. See :ref:`showing_proof_diffs`. .. cmd:: Show Conjectures Prints the names of all the theorems that are currently being proved. As it is possible to start proving a previous lemma during the proof of a theorem, there may be multiple names. .. cmd:: Show Intro If the current goal begins by at least one product, prints the name of the first product as it would be generated by an anonymous :tacn:`intro`. The aim of this command is to ease the writing of more robust scripts. For example, with an appropriate Proof General macro, it is possible to transform any anonymous :tacn:`intro` into a qualified one such as ``intro y13``. In the case of a non-product goal, it prints nothing. .. cmd:: Show Intros Similar to the previous command. Simulates the naming process of :tacn:`intros`. .. cmd:: Show Existentials Displays all open goals / existential variables in the current proof along with the context and type of each variable. .. cmd:: Show Match @qualid Displays a template of the Gallina :token:`match` construct with a branch for each constructor of the type :token:`qualid`. This is used internally by `company-coq `_. .. example:: .. coqtop:: all Show Match nat. .. exn:: Unknown inductive type. :undocumented: .. cmd:: Show Universes Displays the set of all universe constraints and its normalized form at the current stage of the proof, useful for debugging universe inconsistencies. .. cmd:: Show Goal @natural at @natural Available in coqtop. Displays a goal at a proof state using the goal ID number and the proof state ID number. It is primarily for use by tools such as Prooftree that need to fetch goal history in this way. Prooftree is a tool for visualizing a proof as a tree that runs in Proof General. .. cmd:: Guarded Some tactics (e.g. :tacn:`refine`) allow to build proofs using fixpoint or cofixpoint constructions. Due to the incremental nature of proof construction, the check of the termination (or guardedness) of the recursive calls in the fixpoint or cofixpoint constructions is postponed to the time of the completion of the proof. The command :cmd:`Guarded` allows checking if the guard condition for fixpoint and cofixpoint is violated at some time of the construction of the proof without having to wait the completion of the proof. .. _showing_diffs: Showing differences between proof steps --------------------------------------- Coq can automatically highlight the differences between successive proof steps and between values in some error messages. Coq can also highlight differences in the proof term. For example, the following screenshots of CoqIDE and coqtop show the application of the same :tacn:`intros` tactic. The tactic creates two new hypotheses, highlighted in green. The conclusion is entirely in pale green because although it’s changed, no tokens were added to it. The second screenshot uses the "removed" option, so it shows the conclusion a second time with the old text, with deletions marked in red. Also, since the hypotheses are new, no line of old text is shown for them. .. comment screenshot produced with: Inductive ev : nat -> Prop := | ev_0 : ev 0 | ev_SS : forall n : nat, ev n -> ev (S (S n)). Fixpoint double (n:nat) := match n with | O => O | S n' => S (S (double n')) end. Goal forall n, ev n -> exists k, n = double k. intros n E. .. .. image:: ../../_static/diffs-coqide-on.png :alt: CoqIDE with Set Diffs on .. .. image:: ../../_static/diffs-coqide-removed.png :alt: CoqIDE with Set Diffs removed .. .. image:: ../../_static/diffs-coqtop-on3.png :alt: coqtop with Set Diffs on This image shows an error message with diff highlighting in CoqIDE: .. .. image:: ../../_static/diffs-error-message.png :alt: CoqIDE error message with diffs How to enable diffs ``````````````````` .. opt:: Diffs {| "on" | "off" | "removed" } This :term:`option` is used to enable diffs. The “on” setting highlights added tokens in green, while the “removed” setting additionally reprints items with removed tokens in red. Unchanged tokens in modified items are shown with pale green or red. Diffs in error messages use red and green for the compared values; they appear regardless of the setting. (Colors are user-configurable.) For coqtop, showing diffs can be enabled when starting coqtop with the ``-diffs on|off|removed`` command-line option or by setting the :opt:`Diffs` option within Coq. You will need to provide the ``-color on|auto`` command-line option when you start coqtop in either case. Colors for coqtop can be configured by setting the ``COQ_COLORS`` environment variable. See section :ref:`customization-by-environment-variables`. Diffs use the tags ``diff.added``, ``diff.added.bg``, ``diff.removed`` and ``diff.removed.bg``. In CoqIDE, diffs should be enabled from the ``View`` menu. Don’t use the ``Set Diffs`` command in CoqIDE. You can change the background colors shown for diffs from the ``Edit | Preferences | Tags`` panel by changing the settings for the ``diff.added``, ``diff.added.bg``, ``diff.removed`` and ``diff.removed.bg`` tags. This panel also lets you control other attributes of the highlights, such as the foreground color, bold, italic, underline and strikeout. Proof General can also display Coq-generated proof diffs automatically. Please see the PG documentation section "`Showing Proof Diffs" `_) for details. How diffs are calculated ```````````````````````` Diffs are calculated as follows: 1. Select the old proof state to compare to, which is the proof state before the last tactic that changed the proof. Changes that only affect the view of the proof, such as ``all: swap 1 2``, are ignored. 2. For each goal in the new proof state, determine what old goal to compare it to—the one it is derived from or is the same as. Match the hypotheses by name (order is ignored), handling compacted items specially. 3. For each hypothesis and conclusion (the “items”) in each goal, pass them as strings to the lexer to break them into tokens. Then apply the Myers diff algorithm :cite:`Myers` on the tokens and add appropriate highlighting. Notes: * Aside from the highlights, output for the "on" option should be identical to the undiffed output. * Goals completed in the last proof step will not be shown even with the "removed" setting. .. comment The following screenshots show diffs working with multiple goals and with compacted hypotheses. In the first one, notice that the goal ``P 1`` is not highlighted at all after the split because it has not changed. .. todo: Use this script and remove the screenshots when COQ_COLORS works for coqtop in sphinx .. coqtop:: none Set Diffs "on". Parameter P : nat -> Prop. Goal P 1 /\ P 2 /\ P 3. .. coqtop:: out split. .. coqtop:: all abort 2: split. .. .. coqtop:: none Set Diffs "on". Goal forall n m : nat, n + m = m + n. Set Diffs "on". .. coqtop:: out intros n. .. coqtop:: all abort intros m. This screen shot shows the result of applying a :tacn:`split` tactic that replaces one goal with 2 goals. Notice that the goal ``P 1`` is not highlighted at all after the split because it has not changed. .. .. image:: ../../_static/diffs-coqide-multigoal.png :alt: coqide with Set Diffs on with multiple goals Diffs may appear like this after applying a :tacn:`intro` tactic that results in a compacted hypotheses: .. .. image:: ../../_static/diffs-coqide-compacted.png :alt: coqide with Set Diffs on with compacted hypotheses .. _showing_proof_diffs: "Show Proof" differences ```````````````````````` To show differences in the proof term: - In coqtop and Proof General, use the :cmd:`Show Proof` `Diffs` command. - In CoqIDE, position the cursor on or just after a tactic to compare the proof term after the tactic with the proof term before the tactic, then select `View / Show Proof` from the menu or enter the associated key binding. Differences will be shown applying the current `Show Diffs` setting from the `View` menu. If the current setting is `Don't show diffs`, diffs will not be shown. Output with the "added and removed" option looks like this: .. .. image:: ../../_static/diffs-show-proof.png :alt: coqide with Set Diffs on with compacted hypotheses Delaying solving unification constraints ---------------------------------------- .. tacn:: solve_constraints :undocumented: .. flag:: Solve Unification Constraints By default, after each tactic application, postponed typechecking unification problems are resolved using heuristics. Unsetting this :term:`flag` disables this behavior, allowing tactics to leave unification constraints unsolved. Use the :tacn:`solve_constraints` tactic at any point to solve the constraints. Proof maintenance ----------------- *Experimental.* Many tactics, such as :tacn:`intros`, can automatically generate names, such as "H0" or "H1" for a new hypothesis introduced from a goal. Subsequent proof steps may explicitly refer to these names. However, future versions of Coq may not assign names exactly the same way, which could cause the proof to fail because the new names don't match the explicit references in the proof. The following :flag:`Mangle Names` settings let users find all the places where proofs rely on automatically generated names, which can then be named explicitly to avoid any incompatibility. These settings cause Coq to generate different names, producing errors for references to automatically generated names. .. flag:: Mangle Names When this :term:`flag` is set (it is off by default), generated names use the prefix specified in the following option instead of the default prefix. .. opt:: Mangle Names Prefix @string This :term:`option` specifies the prefix to use when generating names. .. flag:: Mangle Names Light When this :term:`flag` is set (it is off by default), the names generated by :flag:`Mangle Names` only add the :opt:`Mangle Names Prefix` to the original name. Controlling proof mode ---------------------- .. opt:: Hyps Limit @natural This :term:`option` controls the maximum number of hypotheses displayed in goals after the application of a tactic. All the hypotheses remain usable in the proof development. When unset, it goes back to the default mode which is to print all available hypotheses. .. flag:: Nested Proofs Allowed When turned on (it is off by default), this :term:`flag` enables support for nested proofs: a new assertion command can be inserted before the current proof is finished, in which case Coq will temporarily switch to the proof of this *nested lemma*. When the proof of the nested lemma is finished (with :cmd:`Qed` or :cmd:`Defined`), its statement will be made available (as if it had been proved before starting the previous proof) and Coq will switch back to the proof of the previous assertion. .. flag:: Printing Goal Names When this :term:`flag` is turned on, the name of the goal is printed in proof mode, which can be useful in cases of cross references between goals. Controlling memory usage ------------------------ .. cmd:: Print Debug GC Prints heap usage statistics, which are values from the `stat` type of the `Gc` module described `here `_ in the OCaml documentation. The `live_words`, `heap_words` and `top_heap_words` values give the basic information. Words are 8 bytes or 4 bytes, respectively, for 64- and 32-bit executables. When experiencing high memory usage the following commands can be used to force Coq to optimize some of its internal data structures. .. cmd:: Optimize Proof Shrink the data structure used to represent the current proof. .. cmd:: Optimize Heap Perform a heap compaction. This is generally an expensive operation. See: `OCaml Gc.compact `_ There is also an analogous tactic :tacn:`optimize_heap`. Memory usage parameters can be set through the :ref:`OCAMLRUNPARAM ` environment variable. coq-8.15.0/doc/sphinx/proofs/writing-proofs/reasoning-inductives.rst000066400000000000000000001562661417001151100256470ustar00rootroot00000000000000============================== Reasoning with inductive types ============================== Applying constructors --------------------- The tactics presented here specialize :tacn:`apply` and :tacn:`eapply` to constructors of inductive types. .. tacn:: constructor {? @nat_or_var } {? with @bindings } First does :n:`repeat intro; hnf` on the goal. If the result is an inductive type, then apply the appropriate constructor(s), and otherwise fail. If :n:`@nat_or_var` is specified and has the value `i`, it uses :n:`apply c__i`, where :n:`c__i` is the i-th constructor of :g:`I`. If not specified, the tactic tries all the constructors, which can result in more than one success (e.g. for `\\/`) when using backtracking tactics such as `constructor; ...`. See :tacn:`ltac-seq`. :n:`{? with @bindings }` If specified, the :n:`apply` is done as :n:`apply … with @bindings`. .. warning:: The terms in :token:`bindings` are checked in the context where constructor is executed and not in the context where :tacn:`apply` is executed (the introductions are not taken into account). .. exn:: Not an inductive product. :undocumented: .. exn:: Not enough constructors. :undocumented: .. exn:: The type has no constructors. :undocumented: .. tacn:: split {? with @bindings } Equivalent to :n:`constructor 1 {? with @bindings }` when the conclusion is an inductive type with a single constructor. The :n:`@bindings` specify any parameters required for the constructor. It is typically used to split conjunctions in the conclusion such as `A /\\ B` into two new goals `A` and `B`. .. tacn:: exists {*, @bindings } Equivalent to :n:`constructor 1 with @bindings__i` for each set of bindings (or just :n:`constructor 1` if there are no :n:`@bindings`) when the conclusion is an inductive type with a single constructor. It is typically used on existential quantifications in the form `exists x, P x.` .. exn:: Not an inductive goal with 1 constructor. :undocumented: .. tacn:: left {? with @bindings } right {? with @bindings } These tactics apply only if :g:`I` has two constructors, for instance in the case of a disjunction `A \\/ B`. Then they are respectively equivalent to :n:`constructor 1 {? with @bindings }` and :n:`constructor 2 {? with @bindings }`. .. exn:: Not an inductive goal with 2 constructors. :undocumented: .. tacn:: econstructor {? @nat_or_var {? with @bindings } } eexists {*, @bindings } esplit {? with @bindings } eleft {? with @bindings } eright {? with @bindings } These tactics behave like :tacn:`constructor`, :tacn:`exists`, :tacn:`split`, :tacn:`left` and :tacn:`right`, but they introduce existential variables instead of failing when a variable can't be instantiated (cf. :tacn:`eapply` and :tacn:`apply`). .. example:: :tacn:`constructor`, :tacn:`left` and :tacn:`right` .. coqtop:: reset all Print or. (* or, represented by \/, has two constructors, or_introl and or_intror *) Goal forall P1 P2 : Prop, P1 -> P1 \/ P2. constructor 1. (* equivalent to "left" *) apply H. (* success *) In contrast, we won't be able to complete the proof if we select constructor 2: .. coqtop:: reset none Goal forall P1 P2 : Prop, P1 -> P1 \/ P2. .. coqtop:: all constructor 2. (* equivalent to "right" *) You can also apply a constructor by name: .. coqtop:: reset none Goal forall P1 P2 : Prop, P1 -> P1 \/ P2. .. coqtop:: all intros; apply or_introl. (* equivalent to "left" *) .. _CaseAnalysisAndInduction: Case analysis ------------- The tactics in this section implement case analysis on inductive or coinductive objects (see :ref:`variants`). .. comment Notes contrasting the various case analysis tactics: https://github.com/coq/coq/pull/14676#discussion_r697904963 .. tacn:: destruct {+, @induction_clause } {? @induction_principle } .. insertprodn induction_clause induction_arg .. prodn:: induction_clause ::= @induction_arg {? as @or_and_intropattern } {? eqn : @naming_intropattern } {? @occurrences } induction_arg ::= @one_term_with_bindings | @natural Performs case analysis by generating a subgoal for each constructor of the inductive or coinductive type selected by :n:`@induction_arg`. The selected subterm, after possibly doing an :tacn:`intros`, must have an inductive or coinductive type. Unlike :tacn:`induction`, :n:`destruct` generates no induction hypothesis. In each new subgoal, the tactic replaces the selected subterm with the associated constructor applied to its arguments, if any. :n:`{+, @induction_clause }` Giving multiple :n:`@induction_clause`\s is equivalent to applying :n:`destruct` serially on each :n:`@induction_clause`. :n:`@induction_arg` + If :n:`@one_term` (in :n:`@one_term_with_bindings`) is an identifier :n:`@ident`: + If :n:`@ident` denotes a quantified variable of the goal, then :n:`destruct @ident` behaves like :tacn:`intros` :n:`until @ident; destruct @ident`. + If :n:`@ident` is no longer dependent in the goal after application of :n:`destruct`, it is erased. To avoid erasure, use parentheses, as in :n:`destruct (@ident)`. + :n:`@one_term` may contain holes that are denoted by “_”. In this case, the tactic selects the first subterm that matches the pattern and performs case analysis using that subterm. + If :n:`@induction_arg` is a :n:`@natural`, then :n:`destruct @natural` behaves like :n:`intros until @natural` followed by :n:`destruct` applied to the last introduced hypothesis. :n:`as @or_and_intropattern` Provides names for (or applies further transformations to) the variables and hypotheses introduced in each new subgoal. The :token:`or_and_intropattern` must have one :n:`{* @intropattern }` for each constructor, given in the order in which the constructors are defined. If there are not enough names, Coq picks fresh names. Inner :n:`intropattern`\s can also split introduced hypotheses into multiple hypotheses or subgoals. :n:`eqn : @naming_intropattern` Generates a new hypothesis in each new subgoal that is an equality between the term being case-analyzed and the associated constructor (applied to its arguments). The name of the new item may be specified in the :n:`@naming_intropattern`. :n:`with @bindings` (in :n:`@one_term_with_bindings`) Provides explicit instances for the :term:`dependent premises ` of the type of :token:`one_term`. :n:`@occurrences` Selects specific subterms of the goal and/or hypotheses to apply the tactic to. See :ref:`Occurrence clauses `. If it occurs in the :n:`@induction_principle`, then there can only be one :n:`@induction_clause`, which can't have its own :n:`@occurrences` clause. :n:`@induction_principle` Makes the tactic equivalent to :tacn:`induction` :n:`{+, @induction_clause } @induction_principle`. .. _example_destruct_ind_concl: .. example:: Using :tacn:`destruct` on an argument with premises .. coqtop:: reset in Parameter A B C D : Prop. .. coqtop:: all Goal (A -> B \/ C) -> D. intros until 1. destruct H. Show 2. Show 3. The single tactic :n:`destruct 1` is equivalent to the :tacn:`intros` and :tacn:`destruct` used here. .. tacn:: edestruct {+, @induction_clause } {? @induction_principle } If the type of :n:`@one_term` (in :n:`@induction_arg`) has :term:`dependent premises ` or dependent premises whose values are not inferrable from the :n:`with @bindings` clause, :n:`edestruct` turns them into existential variables to be resolved later on. .. tacn:: case {+, @induction_clause } {? @induction_principle } An older, more basic tactic to perform case analysis without recursion. We recommend using :tacn:`destruct` instead where possible. `case` only modifies the goal; it does not modify the :term:`local context`. .. tacn:: ecase {+, @induction_clause } {? @induction_principle } If the type of :n:`@one_term` (in :n:`@induction_arg`) has :term:`dependent premises ` or dependent premises whose values are not inferrable from the :n:`with @bindings` clause, :n:`ecase` turns them into existential variables to be resolved later on. .. tacn:: case_eq @one_term A variant of the :n:`case` tactic that allows performing case analysis on a term without completely forgetting its original form. This is done by generating equalities between the original form of the term and the outcomes of the case analysis. We recommend using the :tacn:`destruct` tactic with an `eqn:` clause instead. .. tacn:: casetype @one_term :undocumented: .. tacn:: simple destruct {| @ident | @natural } Equivalent to :tacn:`intros` :n:`until {| @ident | @natural }; case @ident` where :n:`@ident` is a quantified variable of the goal and otherwise fails. .. tacn:: dependent destruction @ident {? generalizing {+ @ident } } {? using @one_term } :undocumented: There is a long example of :tacn:`dependent destruction` and an explanation of the underlying technique :ref:`here `. Induction --------- .. tacn:: induction {+, @induction_clause } {? @induction_principle } .. insertprodn induction_principle induction_principle .. prodn:: induction_principle ::= using @one_term {? with @bindings } {? @occurrences } Applies an :term:`induction principle` to generate a subgoal for each constructor of an inductive type. If the argument is :term:`dependent ` in the conclusion or some hypotheses of the goal, the argument is replaced by the appropriate constructor in each of the resulting subgoals and induction hypotheses are added to the local context using names whose prefix is **IH**. The tactic is similar to :tacn:`destruct`, except that `destruct` doesn't generate induction hypotheses. :n:`induction` and :tacn:`destruct` are very similar. Aside from the following differences, please refer to the description of :tacn:`destruct` while mentally substituting :n:`induction` for :tacn:`destruct`. :n:`{+, @induction_clause }` If no :n:`@induction_principle` clause is provided, this is equivalent to doing :n:`induction` on the first :n:`@induction_clause` followed by :n:`destruct` on any subsequent clauses. :n:`@induction_principle` :n:`@one_term` specifies which :term:`induction principle` to use. The optional :n:`with @bindings` gives any values that must be substituted into the induction principle. The number of :n:`@bindings` must be the same as the number of parameters of the induction principle. If unspecified, the tactic applies the appropriate :term:`induction principle` that was automatically generated when the inductive type was declared based on the sort of the goal. .. exn:: Not an inductive product. :undocumented: .. exn:: Unable to find an instance for the variables @ident … @ident. Use the :n:`with @bindings` clause or the :tacn:`einduction` tactic instead. .. example:: .. coqtop:: reset all Lemma induction_test : forall n:nat, n = n -> n <= n. intros n H. induction n. exact (le_n 0). .. example:: :n:`induction` with :n:`@occurrences` .. coqtop:: reset all Lemma induction_test2 : forall n:nat, n = n -> n <= n. intros. induction n in H |-. Show 2. .. tacn:: einduction {+, @induction_clause } {? @induction_principle } Behaves like :tacn:`induction` except that it does not fail if some dependent premise of the type of :n:`@one_term` is not inferrable. Instead, the unresolved premises are posed as existential variables to be inferred later, in the same way as :tacn:`eapply` does. .. tacn:: elim @one_term_with_bindings {? using @one_term {? with @bindings } } An older, more basic induction tactic. Unlike :tacn:`induction`, ``elim`` only modifies the goal; it does not modify the :term:`local context`. We recommend using :tacn:`induction` instead where possible. :n:`with @bindings` (in :n:`@one_term_with_bindings`) Explicitly gives instances to the premises of the type of :n:`@one_term` (see :ref:`bindings`). :n:`{? using @one_term {? with @bindings } }` Allows explicitly giving an induction principle :n:`@one_term` that is not the standard one for the underlying inductive type of :n:`@one_term`. The :n:`@bindings` clause allows instantiating premises of the type of :n:`@one_term`. .. tacn:: eelim @one_term_with_bindings {? using @one_term {? with @bindings } } If the type of :n:`@one_term` has dependent premises, this turns them into existential variables to be resolved later on. .. tacn:: elimtype @one_term The argument :token:`type` must be inductively defined. :n:`elimtype I` is equivalent to :tacn:`cut` :n:`I. intro Hn; elim Hn;` :tacn:`clear` :n:`Hn.` Therefore the hypothesis :g:`Hn` will not appear in the context(s) of the subgoal(s). Conversely, if :g:`t` is a :n:`@one_term` of (inductive) type :g:`I` that does not occur in the goal, then :n:`elim t` is equivalent to :n:`elimtype I; only 2:` :tacn:`exact` `t.` .. tacn:: simple induction {| @ident | @natural } Behaves like :n:`intros until {| @ident | @natural }; elim @ident` when :n:`@ident` is a quantified variable of the goal. .. tacn:: dependent induction @ident {? {| generalizing | in } {+ @ident } } {? using @one_term } The *experimental* tactic :tacn:`dependent induction` performs induction-inversion on an instantiated inductive predicate. One needs to first :cmd:`Require` the `Coq.Program.Equality` module to use this tactic. The tactic is based on the BasicElim tactic by Conor McBride :cite:`DBLP:conf/types/McBride00` and the work of Cristina Cornes around inversion :cite:`DBLP:conf/types/CornesT95`. From an instantiated inductive predicate and a goal, it generates an equivalent goal where the hypothesis has been generalized over its indexes which are then constrained by equalities to be the right instances. This permits to state lemmas without resorting to manually adding these equalities and still get enough information in the proofs. :n:`{| generalizing | in } {+ @ident }` First generalizes the goal by the given variables so that they are universally quantified in the goal. This is generally what one wants to do with variables that are inside constructors in the induction hypothesis. The other ones need not be further generalized. There is a long example of :tacn:`dependent induction` and an explanation of the underlying technique :ref:`here `. .. example:: .. coqtop:: reset all Lemma lt_1_r : forall n:nat, n < 1 -> n = 0. intros n H ; induction H. Here we did not get any information on the indexes to help fulfill this proof. The problem is that, when we use the ``induction`` tactic, we lose information on the hypothesis instance, notably that the second argument is 1 here. Dependent induction solves this problem by adding the corresponding equality to the context. .. coqtop:: reset all Require Import Coq.Program.Equality. Lemma lt_1_r : forall n:nat, n < 1 -> n = 0. intros n H ; dependent induction H. The subgoal is cleaned up as the tactic tries to automatically simplify the subgoals with respect to the generated equalities. In this enriched context, it becomes possible to solve this subgoal. .. coqtop:: all reflexivity. Now we are in a contradictory context and the proof can be solved. .. coqtop:: all abort inversion H. This technique works with any inductive predicate. In fact, the :tacn:`dependent induction` tactic is just a wrapper around the :tacn:`induction` tactic. One can make its own variant by just writing a new tactic based on the definition found in ``Coq.Program.Equality``. .. seealso:: :tacn:`functional induction` .. tacn:: fix @ident @natural {? with {+ ( @ident {* @simple_binder } {? %{ struct @name %} } : @type ) } } .. insertprodn simple_binder simple_binder .. prodn:: simple_binder ::= @name | ( {+ @name } : @term ) A primitive tactic that starts a proof by induction. Generally, higher-level tactics such as :tacn:`induction` or :tacn:`elim` are easier to use. The :n:`@ident`\s (including the first one before the `with` clause) are the names of the induction hypotheses. :n:`@natural` tells on which premise of the current goal the induction acts, starting from 1, counting both dependent and non-dependent products, but skipping local definitions. The current lemma must be composed of at least :n:`@natural` products. As in a fix expression, induction hypotheses must be used on structurally smaller arguments. The verification that inductive proof arguments are correct is done only when registering the lemma in the global environment. To know if the use of induction hypotheses is correct during the interactive development of a proof, use the command :cmd:`Guarded`. :n:`with {+ ( @ident {* @simple_binder } {? %{ struct @name %} } : @type ) }` Starts a proof by mutual induction. The statements to be proven are :n:`forall @simple_binder__i, @type__i`. The identifiers :n:`@ident` (including the first one before the `with` clause) are the names of the induction hypotheses. The identifiers :n:`@name` (in the `{ struct ... }` clauses) are the respective names of the premises on which the induction is performed in the statements to be proved (if not given, Coq guesses what they are). .. tacn:: cofix @ident {? with {+ ( @ident {* @simple_binder } : @type ) } } Starts a proof by coinduction. The :n:`@ident`\s (including the first one before the `with` clause) are the names of the coinduction hypotheses. As in a cofix expression, the use of induction hypotheses must be guarded by a constructor. The verification that the use of coinductive hypotheses is correct is done only at the time of registering the lemma in the global environment. To know if the use of coinduction hypotheses is correct at some time of the interactive development of a proof, use the command :cmd:`Guarded`. :n:`with {+ ( @ident {* @simple_binder } : @type ) }` Starts a proof by mutual coinduction. The statements to be proven are :n:`forall @simple_binder__i, @type__i`. The identifiers :n:`@ident` (including the first one before the `with` clause) are the names of the coinduction hypotheses. .. _equality-inductive_types: Equality of inductive types --------------------------- This section describes some special purpose tactics to work with :term:`Leibniz equality` of inductive sets or types. .. tacn:: discriminate {? @induction_arg } Proves any goal for which a hypothesis in the form :n:`@term__1 = @term__2` states an impossible structural equality for an inductive type. If :n:`@induction_arg` is not given, it checks all the hypotheses for impossible equalities. For example, :g:`(S (S O)) = (S O)` is impossible. If provided, :n:`@induction_arg` is a proof of an equality, typically specified as the name of a hypothesis. If no :n:`@induction_arg` is provided and the goal is in the form :n:`@term__1 <> @term__2`, then the tactic behaves like :n:`intro @ident; discriminate @ident`. The tactic traverses the normal forms of :n:`@term__1` and :n:`@term__2`, looking for subterms :g:`u` and :g:`w` placed in the same positions and whose head symbols are different constructors. If such subterms are present, the equality is impossible and the current goal is completed. Otherwise the tactic fails. Note that opaque constants are not expanded by δ reductions while computing the normal form. :n:`@ident` (in :n:`@induction_arg`) Checks the hypothesis :n:`@ident` for impossible equalities. If :n:`@ident` is not already in the context, this is equivalent to :n:`intros until @ident; discriminate @ident`. :n:`@natural` (in :n:`@induction_arg`) Equivalent to :tacn:`intros` :n:`until @natural; discriminate @ident`, where :n:`@ident` is the identifier for the last introduced hypothesis. :n:`@one_term with @bindings` (in :n:`@induction_arg`) Equivalent to :n:`discriminate @one_term` but uses the given bindings to instantiate parameters or hypotheses of :n:`@one_term`. :n:`@one_term` must be a proof of :n:`@term__1 = @term__2`. .. exn:: No primitive equality found. :undocumented: .. exn:: Not a discriminable equality. :undocumented: .. tacn:: ediscriminate {? @induction_arg } Works the same as :tacn:`discriminate` but if the type of :token:`one_term`, or the type of the hypothesis referred to by :token:`natural`, has uninstantiated parameters, these parameters are left as existential variables. .. tacn:: injection {? @induction_arg } {? as {* @simple_intropattern } } Exploits the property that constructors of inductive types are injective, i.e. that if :n:`c` is a constructor of an inductive type and :n:`c t__1 = c t__2` then :n:`t__1 = t__2` are equal too. If there is a hypothesis `H` in the form :n:`@term__1 = @term__2`, then :n:`injection H` applies the injectivity of constructors as deep as possible to derive the equality of subterms of :n:`@term__1` and :n:`@term__2` wherever the subterms start to differ. For example, from :g:`(S p, S n) = (q, S (S m))` we may derive :g:`S p = q` and :g:`n = S m`. The terms must have inductive types and the same head constructor, but must not be convertible. If so, the tactic derives the equalities and adds them to the current goal as :term:`premises ` (except if the :n:`as` clause is used). If no :n:`induction_arg` is provided and the current goal is of the form :n:`@term <> @term`, :tacn:`injection` is equivalent to :n:`intro @ident; injection @ident`. :n:`@ident` (in :n:`@induction_arg`) Derives equalities based on constructor injectivity for the hypothesis :n:`@ident`. If :n:`@ident` is not already in the context, this is equivalent to :n:`intros until @ident; injection @ident`. :n:`@natural` (in :n:`@induction_arg`) Equivalent to :tacn:`intros` :n:`until @natural` followed by :n:`injection @ident` where :n:`@ident` is the identifier for the last introduced hypothesis. :n:`@one_term with @bindings` (in :n:`@induction_arg`) Like :n:`injection @one_term` but uses the given bindings to instantiate parameters or hypotheses of :n:`@one_term`. :n:`as [= {* @intropattern } ]` Specifies names to apply after the injection so that all generated equalities become hypotheses, which (unlike :tacn:`intros`) may replace existing hypotheses with same name. The number of provided names must not exceed the number of newly generated equalities. If it is smaller, fresh names are generated for the unspecified items. The original equality is erased if it corresponds to a provided name or if the list of provided names is incomplete. Note that, as a convenience for users, specifying :n:`{+ @simple_intropattern }` is treated as if :n:`[= {+ @simple_intropattern } ]` was specified. .. example:: Consider the following goal: .. coqtop:: in Inductive list : Set := | nil : list | cons : nat -> list -> list. Parameter P : list -> Prop. Goal forall l n, P nil -> cons n l = cons 0 nil -> P l. .. coqtop:: all intros. injection H0. .. note:: Beware that injection yields an equality in a sigma type whenever the injected object has a dependent type :g:`P` with its two instances in different types :n:`(P t__1 … t__n)` and :n:`(P u__1 … u__n)`. If :n:`t__1` and :n:`u__1` are the same and have for type an inductive type for which a decidable equality has been declared using :cmd:`Scheme` :n:`Equality …`, the use of a sigma type is avoided. .. exn:: No information can be deduced from this equality and the injectivity of constructors. This may be because the terms are convertible, or due to pattern matching restrictions in the sort Prop. You can try to use option Set Keep Proof Equalities. :undocumented: .. exn:: No primitive equality found. :undocumented: .. exn:: Not a negated primitive equality When :n:`@induction_arg` is not provided, the goal must be in the form :n:`@term <> @term`. .. exn:: Nothing to inject. Generated when one side of the equality is not a constructor. .. tacn:: einjection {? @induction_arg } {? as {* @simple_intropattern } } Works the same as :n:`injection` but if the type of :n:`@one_term`, or the type of the hypothesis referred to by :n:`@natural` has uninstantiated parameters, these parameters are left as existential variables. .. flag:: Structural Injection This :term:`flag` ensures that :n:`injection @term` erases the original hypothesis and leaves the generated equalities in the context rather than adding them to the current goal as :term:`premises `, as if giving :n:`injection @term as` (with an empty list of names). This flag is off by default. .. flag:: Keep Proof Equalities By default, :tacn:`injection` only creates new equalities between :n:`@term`\s whose type is in sort :g:`Type` or :g:`Set`, thus implementing a special behavior for objects that are proofs of a statement in :g:`Prop`. This :term:`flag` controls this behavior. .. table:: Keep Equalities @qualid This :term:`table` specifies a set of inductive types for which proof equalities are always kept by :tacn:`injection`. This overrides the :flag:`Keep Proof Equalities` flag for those inductive types. :attr:`Template polymorphic ` inductive types are implicitly added to this table when defined. Use the :cmd:`Add` and :cmd:`Remove` commands to update this set manually. .. tacn:: simplify_eq {? @induction_arg } Examines a hypothesis that has the form :n:`@term__1 = @term__2`. If the terms are structurally different, the tactic does a :tacn:`discriminate`. Otherwise, it does an :tacn:`injection` to simplify the equality, if possible. If :n:`induction_arg` is not provided, the tactic examines the goal, which must be in the form :n:`@term__1 <> @term__2`. See the description of :token:`induction_arg` in :tacn:`injection` for an explanation of the parameters. .. tacn:: esimplify_eq {? @induction_arg } Works the same as :tacn:`simplify_eq` but if the type of :n:`@one_term` or the type of the hypothesis referred to by :n:`@natural` has uninstantiated parameters, these parameters are left as existential variables. .. tacn:: inversion {| @ident | @natural } {? as @or_and_intropattern } {? in {+ @ident } } inversion {| @ident | @natural } using @one_term {? in {+ @ident } } :name: inversion; _ .. comment: the other inversion* tactics don't support the using clause, but they should be able to, if desired. It wouldn't make sense for inversion_sigma. See https://github.com/coq/coq/pull/14179#discussion_r642193096 For a hypothesis whose type is a (co)inductively defined proposition, the tactic introduces a goal for each constructor of the proposition that isn't self-contradictory. Each such goal includes the hypotheses needed to deduce the proposition. :gdef:`(Co)inductively defined propositions ` are those defined with the :cmd:`Inductive` or :cmd:`CoInductive` commands whose contructors yield a `Prop`, as in this :ref:`example `. :n:`@ident` The name of the hypothesis to invert. If :n:`@ident` does not denote a hypothesis in the local context but refers to a hypothesis quantified in the goal, then the latter is first introduced in the local context using :n:`intros until @ident`. :n:`@natural` Equivalent to :n:`intros until @natural; inversion @ident` where :n:`@ident` is the identifier for the last introduced hypothesis. :n:`{? in {+ @ident } }` When :n:`{+ @ident}` are identifiers in the local context, this does a :tacn:`generalize` :n:`{+ @ident}` as the initial step of `inversion`. :n:`as @or_and_intropattern` Provides names for the variables introduced in each new subgoal. The :token:`or_and_intropattern` must have one :n:`{* @intropattern }` for each constructor of the (co)inductive predicate, given in the order in which the constructors are defined. If there are not enough names, Coq picks fresh names. If an equation splits into several equations (because ``inversion`` applies ``injection`` on the equalities it generates), the corresponding :n:`@intropattern` should be in the form :n:`[ {* @intropattern } ]` (or the equivalent :n:`{*, ( @simple_intropattern ) }`), with the number of entries equal to the number of subequalities obtained from splitting the original equation. Example :ref:`here `. .. note:: The ``inversion … as`` variant of ``inversion`` generally behaves in a slightly more expected way than ``inversion`` (no artificial duplication of some hypotheses referring to other hypotheses). To take advantage of these improvements, it is enough to use ``inversion … as []``, letting Coq choose fresh names. .. note:: As ``inversion`` proofs may be large, we recommend creating and using lemmas whenever the same instance needs to be inverted several times. See :ref:`derive-inversion`. .. note:: Part of the behavior of the :tacn:`inversion` tactic is to generate equalities between expressions that appeared in the hypothesis that is being processed. By default, no equalities are generated if they relate two proofs (i.e. equalities between :token:`term`\s whose type is in sort :g:`Prop`). This behavior can be turned off by using the :flag:`Keep Proof Equalities` setting. .. _inversion-intropattern-ex: .. example:: :tacn:`inversion` with :n:`as @or_and_intropattern` .. coqtop:: reset all Inductive contains0 : list nat -> Prop := | in_hd : forall l, contains0 (0 :: l) | in_tl : forall l b, contains0 l -> contains0 (b :: l). .. coqtop:: in Goal forall l:list nat, contains0 (1 :: l) -> contains0 l. .. coqtop:: all intros l H. inversion H as [ | l' p Hl' [Heqp Heql'] ]. .. tacn:: inversion_clear {| @ident | @natural } {? as @or_and_intropattern } {? in {+ @ident } } Does an :tacn:`inversion` and then erases the hypothesis that was used for the inversion. .. tacn:: simple inversion {| @ident | @natural } {? as @or_and_intropattern } {? in {+ @ident } } A very simple inversion tactic that derives all the necessary equalities but does not simplify the constraints as :tacn:`inversion` does. .. tacn:: dependent inversion {| @ident | @natural } {? as @or_and_intropattern } {? with @one_term } For use when the inverted hypothesis appears in the current goal. Does an :tacn:`inversion` and then substitutes the name of the hypothesis where the corresponding term appears in the goal. .. tacn:: dependent inversion_clear {| @ident | @natural } {? as @or_and_intropattern } {? with @one_term } Does a :tacn:`dependent inversion` and then erases the hypothesis that was used for the dependent inversion. .. tacn:: dependent simple inversion {| @ident | @natural } {? as @or_and_intropattern } {? with @one_term } :undocumented: .. tacn:: inversion_sigma {? @ident {? as @simple_intropattern } } Turns equalities of dependent pairs (e.g., :g:`existT P x p = existT P y q`, frequently left over by :tacn:`inversion` on a dependent type family) into pairs of equalities (e.g., a hypothesis :g:`H : x = y` and a hypothesis of type :g:`rew H in p = q`); these hypotheses can subsequently be simplified using :tacn:`subst`, without ever invoking any kind of axiom asserting uniqueness of identity proofs. If you want to explicitly specify the hypothesis to be inverted, you can pass it as an argument to :tacn:`inversion_sigma`. This tactic also works for :g:`sig`, :g:`sigT2`, :g:`sig2`, :g:`ex`, and :g:`ex2` and there are similar :g:`eq_sig` :g:`***_rect` induction lemmas. .. exn:: Type of @ident is not an equality of recognized Σ types: expected one of sig sig2 sigT sigT2 sigT2 ex or ex2 but got @term When applied to a hypothesis, :tacn:`inversion_sigma` can only handle equalities of the listed sigma types. .. exn:: @ident is not an equality of Σ types When applied to a hypothesis, :tacn:`inversion_sigma` can only be called on hypotheses that are equalities using :g:`Coq.Logic.Init.eq`. .. example:: Non-dependent inversion Let us consider the relation :g:`Le` over natural numbers: .. coqtop:: reset in Inductive Le : nat -> nat -> Set := | LeO : forall n:nat, Le 0 n | LeS : forall n m:nat, Le n m -> Le (S n) (S m). Let us consider the following goal: .. coqtop:: none Section Section. Variable P : nat -> nat -> Prop. Variable Q : forall n m:nat, Le n m -> Prop. Goal forall n m, Le (S n) m -> P n m. .. coqtop:: out intros. To prove the goal, we may need to reason by cases on :g:`H` and to derive that :g:`m` is necessarily of the form :g:`(S m0)` for certain :g:`m0` and that :g:`(Le n m0)`. Deriving these conditions corresponds to proving that the only possible constructor of :g:`(Le (S n) m)` is :g:`LeS` and that we can invert the arrow in the type of :g:`LeS`. This inversion is possible because :g:`Le` is the smallest set closed by the constructors :g:`LeO` and :g:`LeS`. .. coqtop:: all inversion_clear H. Note that :g:`m` has been substituted in the goal for :g:`(S m0)` and that the hypothesis :g:`(Le n m0)` has been added to the context. Sometimes it is interesting to have the equality :g:`m = (S m0)` in the context to use it after. In that case we can use :tacn:`inversion` that does not clear the equalities: .. coqtop:: none restart intros. .. coqtop:: all inversion H. .. example:: Dependent inversion Let us consider the following goal: .. coqtop:: none Abort. Goal forall n m (H:Le (S n) m), Q (S n) m H. .. coqtop:: out intros. As :g:`H` occurs in the goal, we may want to reason by cases on its structure and so, we would like inversion tactics to substitute :g:`H` by the corresponding @term in constructor form. Neither :tacn:`inversion` nor :tacn:`inversion_clear` do such a substitution. To have such a behavior we use the dependent inversion tactics: .. coqtop:: all dependent inversion_clear H. Note that :g:`H` has been substituted by :g:`(LeS n m0 l)` and :g:`m` by :g:`(S m0)`. .. example:: Using :tacn:`inversion_sigma` Let us consider the following inductive type of length-indexed lists, and a lemma about inverting equality of cons: .. coqtop:: reset all Require Import Coq.Logic.Eqdep_dec. Inductive vec A : nat -> Type := | nil : vec A O | cons {n} (x : A) (xs : vec A n) : vec A (S n). Lemma invert_cons : forall A n x xs y ys, @cons A n x xs = @cons A n y ys -> xs = ys. Proof. intros A n x xs y ys H. After performing inversion, we are left with an equality of existTs: .. coqtop:: all inversion H. We can turn this equality into a usable form with inversion_sigma: .. coqtop:: all inversion_sigma. To finish cleaning up the proof, we will need to use the fact that that all proofs of n = n for n a nat are eq_refl: .. coqtop:: all let H := match goal with H : n = n |- _ => H end in pose proof (Eqdep_dec.UIP_refl_nat _ H); subst H. simpl in *. Finally, we can finish the proof: .. coqtop:: all assumption. Qed. .. seealso:: :tacn:`functional inversion` Helper tactics ~~~~~~~~~~~~~~ .. tacn:: decide equality Solves a goal of the form :g:`forall x y : R, {x = y} + {~ x = y}`, where :g:`R` is an inductive type such that its constructors do not take proofs or functions as arguments, nor objects in dependent types. It solves goals of the form :g:`{x = y} + {~ x = y}` as well. .. tacn:: compare @one_term__1 @one_term__2 Compares two :n:`@one_term`\s of an inductive datatype. If :g:`G` is the current goal, it leaves the sub-goals :n:`@one_term__1 = @one_term__2 -> G` and :n:`~ @one_term__1 = @one_term__2 -> G`. The type of the :n:`@one_term`\s must satisfy the same restrictions as in the tactic :tacn:`decide equality`. .. tacn:: dependent rewrite {? {| -> | <- } } @one_term {? in @ident } If :n:`@ident` has type :g:`(existT B a b)=(existT B a' b')` in the local context (i.e. each term of the equality has a sigma type :g:`{ a:A & (B a)}`) this tactic rewrites :g:`a` into :g:`a'` and :g:`b` into :g:`b'` in the current goal. This tactic works even if :g:`B` is also a sigma type. This kind of equalities between dependent pairs may be derived by the :tacn:`injection` and :tacn:`inversion` tactics. :n:`{? {| -> | <- } }` By default, the equality is applied from left to right. Specify `<-` to apply the equality from right to left. .. _proofschemes-induction-principles: Generation of induction principles with ``Scheme`` -------------------------------------------------------- .. cmd:: Scheme {? @ident := } @scheme_kind {* with {? @ident := } @scheme_kind } .. insertprodn scheme_kind sort_family .. prodn:: scheme_kind ::= Equality for @reference | {| Induction | Minimality | Elimination | Case } for @reference Sort @sort_family sort_family ::= Set | Prop | SProp | Type A high-level tool for automatically generating (possibly mutual) :term:`induction principles ` for given types and sorts. Each :n:`@reference` is a different inductive type identifier belonging to the same package of mutual inductive definitions. The command generates the :n:`@ident`\s as mutually recursive definitions. Each term :n:`@ident` proves a general principle of mutual induction for objects in type :n:`@reference`. :n:`@ident` The name of the scheme. If not provided, the scheme name will be determined automatically from the sorts involved. :n:`Minimality for @reference Sort @sort_family` Defines a non-dependent elimination principle more natural for inductively defined relations. :n:`Equality for @reference` Tries to generate a Boolean equality and a proof of the decidability of the usual equality. If :token:`reference` involves other inductive types, their equality has to be defined first. .. example:: Induction scheme for tree and forest Currently the automatically-generated :term:`induction principles ` such as `odd_ind` are not useful for mutually-inductive types such as `odd` and `even`. You can define a mutual induction principle for tree and forest in sort ``Set`` with the :cmd:`Scheme` command: .. coqtop:: reset none Axiom A : Set. Axiom B : Set. .. coqtop:: in Inductive tree : Set := node : A -> forest -> tree with forest : Set := leaf : B -> forest | cons : tree -> forest -> forest. .. coqtop:: all Scheme tree_forest_rec := Induction for tree Sort Set with forest_tree_rec := Induction for forest Sort Set. You may now look at the type of tree_forest_rec: .. coqtop:: all Check tree_forest_rec. This principle involves two different predicates for trees and forests; it also has three premises each one corresponding to a constructor of one of the inductive definitions. The principle `forest_tree_rec` shares exactly the same premises, only the conclusion now refers to the property of forests. .. example:: Predicates odd and even on naturals Let odd and even be inductively defined as: .. coqtop:: in Inductive odd : nat -> Prop := oddS : forall n:nat, even n -> odd (S n) with even : nat -> Prop := | evenO : even 0 | evenS : forall n:nat, odd n -> even (S n). The following command generates a powerful elimination principle: .. coqtop:: all Scheme odd_even := Minimality for odd Sort Prop with even_odd := Minimality for even Sort Prop. The type of odd_even for instance will be: .. coqtop:: all Check odd_even. The type of `even_odd` shares the same premises but the conclusion is `forall n : nat, even n -> P0 n`. Automatic declaration of schemes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. flag:: Elimination Schemes This :term:`flag` enables automatic declaration of induction principles when defining a new inductive type. Defaults to on. .. flag:: Nonrecursive Elimination Schemes This :term:`flag` enables automatic declaration of induction principles for types declared with the :cmd:`Variant` and :cmd:`Record` commands. Defaults to off. .. flag:: Case Analysis Schemes This :term:`flag` governs the generation of case analysis lemmas for inductive types, i.e. corresponding to the pattern matching term alone and without fixpoint. .. flag:: Boolean Equality Schemes Decidable Equality Schemes These :term:`flags ` control the automatic declaration of those Boolean equalities (see the second variant of ``Scheme``). .. warning:: You have to be careful with these flags since Coq may now reject well-defined inductive types because it cannot compute a Boolean equality for them. .. flag:: Rewriting Schemes This :term:`flag` governs generation of equality-related schemes such as congruence. Combined Scheme ~~~~~~~~~~~~~~~ .. cmd:: Combined Scheme @ident__def from {+, @ident } Combines induction principles generated by the :cmd:`Scheme` command. Each :n:`@ident` is a different inductive principle that must belong to the same package of mutual inductive principle definitions. This command generates :n:`@ident__def` as the conjunction of the principles: it is built from the common premises of the principles and concluded by the conjunction of their conclusions. In the case where all the inductive principles used are in sort ``Prop``, the propositional conjunction ``and`` is used, otherwise the simple product ``prod`` is used instead. .. example:: We can define the induction principles for trees and forests using: .. coqtop:: all Scheme tree_forest_ind := Induction for tree Sort Prop with forest_tree_ind := Induction for forest Sort Prop. Then we can build the combined induction principle which gives the conjunction of the conclusions of each individual principle: .. coqtop:: all Combined Scheme tree_forest_mutind from tree_forest_ind,forest_tree_ind. The type of tree_forest_mutind will be: .. coqtop:: all Check tree_forest_mutind. .. example:: We can also combine schemes at sort ``Type``: .. coqtop:: all Scheme tree_forest_rect := Induction for tree Sort Type with forest_tree_rect := Induction for forest Sort Type. .. coqtop:: all Combined Scheme tree_forest_mutrect from tree_forest_rect, forest_tree_rect. .. coqtop:: all Check tree_forest_mutrect. .. seealso:: :ref:`functional-scheme` .. _derive-inversion: Generation of inversion principles with ``Derive`` ``Inversion`` ----------------------------------------------------------------- .. cmd:: Derive Inversion @ident with @one_term {? Sort @sort_family } Generates an inversion lemma for the :tacn:`inversion` tactic. :token:`ident` is the name of the generated lemma. :token:`one_term` should be in the form :token:`qualid` or :n:`(forall {+ @binder }, @qualid @term)` where :token:`qualid` is the name of an inductive predicate and :n:`{+ @binder }` binds the variables occurring in the term :token:`term`. The lemma is generated for the sort :token:`sort_family` corresponding to :token:`one_term`. Applying the lemma is equivalent to inverting the instance with the :tacn:`inversion` tactic. .. cmd:: Derive Inversion_clear @ident with @one_term {? Sort @sort_family } When applied, it is equivalent to having inverted the instance with the tactic inversion replaced by the tactic `inversion_clear`. .. cmd:: Derive Dependent Inversion @ident with @one_term Sort @sort_family When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion`. .. cmd:: Derive Dependent Inversion_clear @ident with @one_term Sort @sort_family When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion_clear`. .. example:: Consider the relation `Le` over natural numbers and the following parameter ``P``: .. coqtop:: all Inductive Le : nat -> nat -> Set := | LeO : forall n:nat, Le 0 n | LeS : forall n m:nat, Le n m -> Le (S n) (S m). Parameter P : nat -> nat -> Prop. To generate the inversion lemma for the instance :g:`(Le (S n) m)` and the sort :g:`Prop`, we do: .. coqtop:: all Derive Inversion_clear leminv with (forall n m:nat, Le (S n) m) Sort Prop. Check leminv. Then we can use the proven inversion lemma: .. the original LaTeX did not have any Coq code to setup the goal .. coqtop:: none Goal forall (n m : nat) (H : Le (S n) m), P n m. intros. .. coqtop:: all Show. inversion H using leminv. .. _dependent-induction-examples: Examples of :tacn:`dependent destruction` / :tacn:`dependent induction` ----------------------------------------------------------------------- The tactics :tacn:`dependent induction` and :tacn:`dependent destruction` are another solution for inverting inductive predicate instances and potentially doing induction at the same time. It is based on the ``BasicElim`` tactic of Conor McBride which works by abstracting each argument of an inductive instance by a variable and constraining it by equalities afterwards. This way, the usual induction and destruct tactics can be applied to the abstracted instance and after simplification of the equalities we get the expected goals. The abstracting tactic is called generalize_eqs and it takes as argument a hypothesis to generalize. It uses the JMeq datatype defined in Coq.Logic.JMeq, hence we need to require it before. For example, revisiting the first example of the inversion documentation: .. coqtop:: in reset Require Import Coq.Logic.JMeq. Inductive Le : nat -> nat -> Set := | LeO : forall n:nat, Le 0 n | LeS : forall n m:nat, Le n m -> Le (S n) (S m). Parameter P : nat -> nat -> Prop. Goal forall n m:nat, Le (S n) m -> P n m. intros n m H. .. coqtop:: all generalize_eqs H. The index ``S n`` gets abstracted by a variable here, but a corresponding equality is added under the abstract instance so that no information is actually lost. The goal is now almost amenable to do induction or case analysis. One should indeed first move ``n`` into the goal to strengthen it before doing induction, or ``n`` will be fixed in the inductive hypotheses (this does not matter for case analysis). As a rule of thumb, all the variables that appear inside constructors in the indices of the hypothesis should be generalized. This is exactly what the ``generalize_eqs_vars`` variant does: .. coqtop:: all abort generalize_eqs_vars H. induction H. As the hypothesis itself did not appear in the goal, we did not need to use an heterogeneous equality to relate the new hypothesis to the old one (which just disappeared here). However, the tactic works just as well in this case, e.g.: .. coqtop:: none Require Import Coq.Program.Equality. .. coqtop:: in Parameter Q : forall (n m : nat), Le n m -> Prop. Goal forall n m (p : Le (S n) m), Q (S n) m p. .. coqtop:: all intros n m p. generalize_eqs_vars p. One drawback of this approach is that in the branches one will have to substitute the equalities back into the instance to get the right assumptions. Sometimes injection of constructors will also be needed to recover the needed equalities. Also, some subgoals should be directly solved because of inconsistent contexts arising from the constraints on indexes. The nice thing is that we can make a tactic based on discriminate, injection and variants of substitution to automatically do such simplifications (which may involve the axiom K). This is what the ``simplify_dep_elim`` tactic from ``Coq.Program.Equality`` does. For example, we might simplify the previous goals considerably: .. coqtop:: all abort induction p ; simplify_dep_elim. The higher-order tactic ``do_depind`` defined in ``Coq.Program.Equality`` takes a tactic and combines the building blocks we have seen with it: generalizing by equalities calling the given tactic with the generalized induction hypothesis as argument and cleaning the subgoals with respect to equalities. Its most important instantiations are :tacn:`dependent induction` and :tacn:`dependent destruction` that do induction or simply case analysis on the generalized hypothesis. For example we can redo what we’ve done manually with dependent destruction: .. coqtop:: in Lemma ex : forall n m:nat, Le (S n) m -> P n m. .. coqtop:: in intros n m H. .. coqtop:: all abort dependent destruction H. This gives essentially the same result as inversion. Now if the destructed hypothesis actually appeared in the goal, the tactic would still be able to invert it, contrary to dependent inversion. Consider the following example on vectors: .. coqtop:: in Set Implicit Arguments. .. coqtop:: in Parameter A : Set. .. coqtop:: in Inductive vector : nat -> Type := | vnil : vector 0 | vcons : A -> forall n, vector n -> vector (S n). .. coqtop:: in Goal forall n, forall v : vector (S n), exists v' : vector n, exists a : A, v = vcons a v'. .. coqtop:: in intros n v. .. coqtop:: all dependent destruction v. In this case, the ``v`` variable can be replaced in the goal by the generalized hypothesis only when it has a type of the form ``vector (S n)``, that is only in the second case of the destruct. The first one is dismissed because ``S n <> 0``. A larger example ~~~~~~~~~~~~~~~~ Let’s see how the technique works with induction on inductive predicates on a real example. We will develop an example application to the theory of simply-typed lambda-calculus formalized in a dependently-typed style: .. coqtop:: in reset Inductive type : Type := | base : type | arrow : type -> type -> type. .. coqtop:: in Notation " t --> t' " := (arrow t t') (at level 20, t' at next level). .. coqtop:: in Inductive ctx : Type := | empty : ctx | snoc : ctx -> type -> ctx. .. coqtop:: in Notation " G , tau " := (snoc G tau) (at level 20, tau at next level). .. coqtop:: in Fixpoint conc (G D : ctx) : ctx := match D with | empty => G | snoc D' x => snoc (conc G D') x end. .. coqtop:: in Notation " G ; D " := (conc G D) (at level 20). .. coqtop:: in Inductive term : ctx -> type -> Type := | ax : forall G tau, term (G, tau) tau | weak : forall G tau, term G tau -> forall tau', term (G, tau') tau | abs : forall G tau tau', term (G , tau) tau' -> term G (tau --> tau') | app : forall G tau tau', term G (tau --> tau') -> term G tau -> term G tau'. We have defined types and contexts which are snoc-lists of types. We also have a ``conc`` operation that concatenates two contexts. The ``term`` datatype represents in fact the possible typing derivations of the calculus, which are isomorphic to the well-typed terms, hence the name. A term is either an application of: + the axiom rule to type a reference to the first variable in a context + the weakening rule to type an object in a larger context + the abstraction or lambda rule to type a function + the application to type an application of a function to an argument Once we have this datatype we want to do proofs on it, like weakening: .. coqtop:: in abort Lemma weakening : forall G D tau, term (G ; D) tau -> forall tau', term (G , tau' ; D) tau. The problem here is that we can’t just use induction on the typing derivation because it will forget about the ``G ; D`` constraint appearing in the instance. A solution would be to rewrite the goal as: .. coqtop:: in abort Lemma weakening' : forall G' tau, term G' tau -> forall G D, (G ; D) = G' -> forall tau', term (G, tau' ; D) tau. With this proper separation of the index from the instance and the right induction loading (putting ``G`` and ``D`` after the inducted-on hypothesis), the proof will go through, but it is a very tedious process. One is also forced to make a wrapper lemma to get back the more natural statement. The :tacn:`dependent induction` tactic alleviates this trouble by doing all of this plumbing of generalizing and substituting back automatically. Indeed we can simply write: .. coqtop:: in Require Import Coq.Program.Tactics. Require Import Coq.Program.Equality. .. coqtop:: in Lemma weakening : forall G D tau, term (G ; D) tau -> forall tau', term (G , tau' ; D) tau. .. coqtop:: in Proof with simpl in * ; simpl_depind ; auto. .. coqtop:: in intros G D tau H. dependent induction H generalizing G D ; intros. This call to :tacn:`dependent induction` has an additional arguments which is a list of variables appearing in the instance that should be generalized in the goal, so that they can vary in the induction hypotheses. By default, all variables appearing inside constructors (except in a parameter position) of the instantiated hypothesis will be generalized automatically but one can always give the list explicitly. .. coqtop:: all Show. The ``simpl_depind`` tactic includes an automatic tactic that tries to simplify equalities appearing at the beginning of induction hypotheses, generally using trivial applications of ``reflexivity``. In cases where the equality is not between constructor forms though, one must help the automation by giving some arguments, using the ``specialize`` tactic for example. .. coqtop:: in destruct D... apply weak; apply ax. apply ax. .. coqtop:: in destruct D... .. coqtop:: all Show. .. coqtop:: all specialize (IHterm G0 empty eq_refl). Once the induction hypothesis has been narrowed to the right equality, it can be used directly. .. coqtop:: all apply weak, IHterm. Now concluding this subgoal is easy. .. coqtop:: in constructor; apply IHterm; reflexivity. coq-8.15.0/doc/sphinx/proofs/writing-proofs/rewriting.rst000066400000000000000000000001261417001151100235000ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/refman-preamble.rst000066400000000000000000000022631417001151100202260ustar00rootroot00000000000000.. This file is automatically prepended to all other files using the ``rst_prolog`` option. .. only:: html .. This is included once per page in the HTML build, and a single time (in the document's preamble) in the LaTeX one. .. preamble:: /refman-preamble.sty .. Some handy replacements for common items .. role:: smallcaps .. |c_1| replace:: `c`\ :math:`_{1}` .. |c_i| replace:: `c`\ :math:`_{i}` .. |c_n| replace:: `c`\ :math:`_{n}` .. |Cic| replace:: CIC .. |eq_beta_delta_iota_zeta| replace:: `=`\ :math:`_{βδιζ}` .. |Latex| replace:: :smallcaps:`LaTeX` .. |Ltac| replace:: `L`:sub:`tac` .. |p_1| replace:: `p`\ :math:`_{1}` .. |p_i| replace:: `p`\ :math:`_{i}` .. |p_n| replace:: `p`\ :math:`_{n}` .. |Program| replace:: :strong:`Program` .. |Prop| replace:: :math:`\Prop` .. |SProp| replace:: :math:`\SProp` .. |Set| replace:: :math:`\Set` .. |SSR| replace:: :smallcaps:`SSReflect` .. |Type| replace:: :math:`\Type` .. |t_1| replace:: `t`\ :math:`_{1}` .. |t_i| replace:: `t`\ :math:`_{i}` .. |t_m| replace:: `t`\ :math:`_{m}` .. |t_n| replace:: `t`\ :math:`_{n}` .. |x_1| replace:: `x`\ :math:`_{1}` .. |x_i| replace:: `x`\ :math:`_{i}` .. |x_n| replace:: `x`\ :math:`_{n}` coq-8.15.0/doc/sphinx/refman-preamble.sty000066400000000000000000000054311417001151100202350ustar00rootroot00000000000000\newcommand{\as}{\kw{as}} \newcommand{\Assum}[3]{\kw{Assum}(#1)(#2:#3)} \newcommand{\case}{\kw{case}} \newcommand{\cons}{\textsf{cons}} \newcommand{\consf}{\textsf{consf}} \newcommand{\Def}[4]{\kw{Def}(#1)(#2:=#3:#4)} \newcommand{\emptyf}{\textsf{emptyf}} \newcommand{\End}{\kw{End}} \newcommand{\kwend}{\kw{end}} \newcommand{\even}{\textsf{even}} \newcommand{\evenO}{\textsf{even}_\textsf{O}} \newcommand{\evenS}{\textsf{even}_\textsf{S}} \newcommand{\Fix}{\kw{Fix}} \newcommand{\fix}{\kw{fix}} \newcommand{\for}{\textsf{for}} \newcommand{\forest}{\textsf{forest}} \newcommand{\Functor}{\kw{Functor}} \newcommand{\In}{\kw{in}} \newcommand{\Ind}[4]{\kw{Ind}[#2](#3:=#4)} \newcommand{\ind}[3]{\kw{Ind}~[#1]\left(#2\mathrm{~:=~}#3\right)} \newcommand{\Indp}[5]{\kw{Ind}_{#5}(#1)[#2](#3:=#4)} \newcommand{\Indpstr}[6]{\kw{Ind}_{#5}(#1)[#2](#3:=#4)/{#6}} \newcommand{\injective}{\kw{injective}} \newcommand{\kw}[1]{\textsf{#1}} \newcommand{\length}{\textsf{length}} \newcommand{\letin}[3]{\kw{let}~#1:=#2~\kw{in}~#3} \newcommand{\List}{\textsf{list}} \newcommand{\lra}{\longrightarrow} \newcommand{\Match}{\kw{match}} \newcommand{\Mod}[3]{{\kw{Mod}}({#1}:{#2}\,\zeroone{:={#3}})} \newcommand{\ModImp}[3]{{\kw{Mod}}({#1}:{#2}:={#3})} \newcommand{\ModA}[2]{{\kw{ModA}}({#1}=={#2})} \newcommand{\ModS}[2]{{\kw{Mod}}({#1}:{#2})} \newcommand{\ModType}[2]{{\kw{ModType}}({#1}:={#2})} \newcommand{\mto}{.\;} \newcommand{\nat}{\textsf{nat}} \newcommand{\Nil}{\textsf{nil}} \newcommand{\nilhl}{\textsf{nil\_hl}} \newcommand{\nO}{\textsf{O}} \newcommand{\node}{\textsf{node}} \newcommand{\nS}{\textsf{S}} \newcommand{\odd}{\textsf{odd}} \newcommand{\oddS}{\textsf{odd}_\textsf{S}} \newcommand{\ovl}[1]{\overline{#1}} \newcommand{\Pair}{\textsf{pair}} \newcommand{\plus}{\mathsf{plus}} \newcommand{\SProp}{\textsf{SProp}} \newcommand{\Prop}{\textsf{Prop}} \newcommand{\return}{\kw{return}} \newcommand{\Set}{\textsf{Set}} \newcommand{\Sort}{\mathcal{S}} \newcommand{\Str}{\textsf{Stream}} \newcommand{\Struct}{\kw{Struct}} \newcommand{\subst}[3]{#1\{#2/#3\}} \newcommand{\tl}{\textsf{tl}} \newcommand{\tree}{\textsf{tree}} \newcommand{\trii}{\triangleright_\iota} \newcommand{\Type}{\textsf{Type}} \newcommand{\WEV}[3]{\mbox{$#1[] \vdash #2 \lra #3$}} \newcommand{\WEVT}[3]{\mbox{$#1[] \vdash #2 \lra$}\\ \mbox{$ #3$}} \newcommand{\WF}[2]{{\mathcal{W\!F}}(#1)[#2]} \newcommand{\WFE}[1]{\WF{E}{#1}} \newcommand{\WFT}[2]{#1[] \vdash {\mathcal{W\!F}}(#2)} \newcommand{\WFTWOLINES}[2]{{\mathcal{W\!F}}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}} \newcommand{\with}{\kw{with}} \newcommand{\WS}[3]{#1[] \vdash #2 <: #3} \newcommand{\WSE}[2]{\WS{E}{#1}{#2}} \newcommand{\WT}[4]{#1[#2] \vdash #3 : #4} \newcommand{\WTE}[3]{\WT{E}{#1}{#2}{#3}} \newcommand{\WTEG}[2]{\WTE{\Gamma}{#1}{#2}} \newcommand{\WTM}[3]{\WT{#1}{}{#2}{#3}} \newcommand{\zeroone}[1]{[{#1}]} coq-8.15.0/doc/sphinx/std-glossindex.rst000066400000000000000000000001511417001151100201320ustar00rootroot00000000000000:orphan: .. hack to get index in TOC .. _glossary_index: -------------- Glossary index -------------- coq-8.15.0/doc/sphinx/user-extensions/000077500000000000000000000000001417001151100176075ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/user-extensions/proof-schemes.rst000066400000000000000000000001731417001151100231140ustar00rootroot00000000000000:orphan: .. raw:: html coq-8.15.0/doc/sphinx/user-extensions/syntax-extensions.rst000066400000000000000000002700151417001151100240710ustar00rootroot00000000000000.. _syntax-extensions-and-notation-scopes: Syntax extensions and notation scopes ===================================== In this chapter, we introduce advanced commands to modify the way Coq parses and prints objects, i.e. the translations between the concrete and internal representations of terms and commands. The main commands to provide custom symbolic notations for terms are :cmd:`Notation` and :cmd:`Infix`; they will be described in the :ref:`next section `. There is also a variant of :cmd:`Notation` which does not modify the parser; this provides a form of :ref:`abbreviation `. It is sometimes expected that the same symbolic notation has different meanings in different contexts; to achieve this form of overloading, Coq offers a notion of :ref:`notation scopes `. The main command to provide custom notations for tactics is :cmd:`Tactic Notation`. .. coqtop:: none Set Printing Depth 50. .. _Notations: Notations --------- Basic notations ~~~~~~~~~~~~~~~ .. cmd:: Notation @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @scope_name } Defines a *notation*, an alternate syntax for entering or displaying a specific term or term pattern. This command supports the :attr:`local` attribute, which limits its effect to the current module. If the command is inside a section, its effect is limited to the section. Specifying :token:`scope_name` associates the notation with that scope. Otherwise it is a *lonely notation*, that is, not associated with a scope. .. todo indentation of this chapter is not consistent with other chapters. Do we have a standard? For example, the following definition permits using the infix expression :g:`A /\ B` to represent :g:`(and A B)`: .. coqtop:: in Notation "A /\ B" := (and A B). :g:`"A /\ B"` is a *notation*, which tells how to represent the abbreviated term :g:`(and A B)`. Notations must be in double quotes, except when the abbreviation has the form of an ordinary applicative expression; see :ref:`Abbreviations`. The notation consists of *tokens* separated by spaces. Tokens which are identifiers (such as ``A``, ``x0'``, etc.) are the *parameters* of the notation. Each of them must occur at least once in the abbreviated term. The other elements of the string (such as ``/\``) are the *symbols*. Identifiers enclosed in single quotes are treated as symbols and thus lose their role of parameters. In the same vein, every symbol of at least 3 characters and starting with a simple quote must be quoted (then it starts with two single quotes). Here is an example. .. coqtop:: in Notation "'IF' c1 'then' c2 'else' c3" := (c1 /\ c2 \/ ~ c1 /\ c3) (at level 200, right associativity). A notation binds a syntactic expression to a term. Unless the parser and pretty-printer of Coq already know how to deal with the syntactic expression (such as through :cmd:`Reserved Notation` or for notations that contain only literals), explicit precedences and associativity rules have to be given. .. note:: The right-hand side of a notation is interpreted at the time the notation is given. In particular, disambiguation of constants, :ref:`implicit arguments ` and other notations are resolved at the time of the declaration of the notation. The right-hand side is currently typed only at use time but this may change in the future. Precedences and associativity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Mixing different symbolic notations in the same text may cause serious parsing ambiguity. To deal with the ambiguity of notations, Coq uses precedence levels ranging from 0 to 100 (plus one extra level numbered 200) and associativity rules. Consider for example the new notation .. coqtop:: in Notation "A \/ B" := (or A B). Clearly, an expression such as :g:`forall A:Prop, True /\ A \/ A \/ False` is ambiguous. To tell the Coq parser how to interpret the expression, a priority between the symbols ``/\`` and ``\/`` has to be given. Assume for instance that we want conjunction to bind more than disjunction. This is expressed by assigning a precedence level to each notation, knowing that a lower level binds more than a higher level. Hence the level for disjunction must be higher than the level for conjunction. Since connectives are not tight articulation points of a text, it is reasonable to choose levels not so far from the highest level which is 100, for example 85 for disjunction and 80 for conjunction [#and_or_levels]_. Similarly, an associativity is needed to decide whether :g:`True /\ False /\ False` defaults to :g:`True /\ (False /\ False)` (right associativity) or to :g:`(True /\ False) /\ False` (left associativity). We may even consider that the expression is not well-formed and that parentheses are mandatory (this is a “no associativity”) [#no_associativity]_. We do not know of a special convention for the associativity of disjunction and conjunction, so let us apply right associativity (which is the choice of Coq). Precedence levels and associativity rules of notations are specified with a list of parenthesized :n:`@syntax_modifier`\s. Here is how the previous examples refine: .. coqtop:: in Notation "A /\ B" := (and A B) (at level 80, right associativity). Notation "A \/ B" := (or A B) (at level 85, right associativity). By default, a notation is considered nonassociative, but the precedence level is mandatory (except for special cases whose level is canonical). The level is either a number or the phrase ``next level`` whose meaning is obvious. Some :ref:`associativities are predefined ` in the ``Notations`` module. .. TODO I don't find it obvious -- CPC Complex notations ~~~~~~~~~~~~~~~~~ Notations can be made from arbitrarily complex symbols. One can for instance define prefix notations. .. coqtop:: in Notation "~ x" := (not x) (at level 75, right associativity). One can also define notations for incomplete terms, with the hole expected to be inferred during type checking. .. coqtop:: in Notation "x = y" := (@eq _ x y) (at level 70, no associativity). One can define *closed* notations whose both sides are symbols. In this case, the default precedence level for the inner sub-expression is 200, and the default level for the notation itself is 0. .. coqtop:: in Notation "( x , y )" := (@pair _ _ x y). One can also define notations for binders. .. coqtop:: in Notation "{ x : A | P }" := (sig A (fun x => P)). In the last case though, there is a conflict with the notation for type casts. The notation for type casts, as shown by the command :cmd:`Print Grammar` `constr` is at level 100. To avoid ``x : A`` being parsed as a type cast, it is necessary to put ``x`` at a level below 100, typically 99. Hence, a correct definition is the following: .. coqtop:: reset all Notation "{ x : A | P }" := (sig A (fun x => P)) (x at level 99). More generally, it is required that notations are explicitly factorized on the left. See the next section for more about factorization. Simple factorization rules ~~~~~~~~~~~~~~~~~~~~~~~~~~ Coq extensible parsing is performed by *Camlp5* which is essentially a LL1 parser: it decides which notation to parse by looking at tokens from left to right. Hence, some care has to be taken not to hide already existing rules by new rules. Some simple left factorization work has to be done. Here is an example. .. coqtop:: all Notation "x < y" := (lt x y) (at level 70). Fail Notation "x < y < z" := (x < y /\ y < z) (at level 70). In order to factorize the left part of the rules, the subexpression referred to by ``y`` has to be at the same level in both rules. However the default behavior puts ``y`` at the next level below 70 in the first rule (``no associativity`` is the default), and at level 200 in the second rule (``level 200`` is the default for inner expressions). To fix this, we need to force the parsing level of ``y``, as follows. .. coqtop:: in Notation "x < y" := (lt x y) (at level 70). Notation "x < y < z" := (x < y /\ y < z) (at level 70, y at next level). For the sake of factorization with Coq predefined rules, simple rules have to be observed for notations starting with a symbol, e.g., rules starting with “\ ``{``\ ” or “\ ``(``\ ” should be put at level 0. The list of Coq predefined notations can be found in the chapter on :ref:`thecoqlibrary`. Use of notations for printing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The command :cmd:`Notation` has an effect both on the Coq parser and on the Coq printer. For example: .. coqtop:: all Check (and True True). However, printing, especially pretty-printing, also requires some care. We may want specific indentations, line breaks, alignment if on several lines, etc. For pretty-printing, Coq relies on OCaml formatting library, which provides indentation and automatic line breaks depending on page width by means of *formatting boxes*. The default printing of notations is rudimentary. For printing a notation, a formatting box is opened in such a way that if the notation and its arguments cannot fit on a single line, a line break is inserted before the symbols of the notation and the arguments on the next lines are aligned with the argument on the first line. A first, simple control that a user can have on the printing of a notation is the insertion of spaces at some places of the notation. This is performed by adding extra spaces between the symbols and parameters: each extra space (other than the single space needed to separate the components) is interpreted as a space to be inserted by the printer. Here is an example showing how to add spaces next to the curly braces. .. coqtop:: in Notation "{{ x : A | P }}" := (sig (fun x : A => P)) (at level 0, x at level 99). .. coqtop:: all Check (sig (fun x : nat => x=x)). The second, more powerful control on printing is by using :n:`@syntax_modifier`\s. Here is an example .. coqtop:: in Definition IF_then_else (P Q R:Prop) := P /\ Q \/ ~ P /\ R. .. coqtop:: all Notation "'If' c1 'then' c2 'else' c3" := (IF_then_else c1 c2 c3) (at level 200, right associativity, format "'[v ' 'If' c1 '/' '[' 'then' c2 ']' '/' '[' 'else' c3 ']' ']'"). .. coqtop:: all Check (IF_then_else (IF_then_else True False True) (IF_then_else True False True) (IF_then_else True False True)). A *format* is an extension of the string denoting the notation with the possible following elements delimited by single quotes: - tokens of the form ``'/ '`` are translated into breaking points. If there is a line break, indents the number of spaces appearing after the “``/``” (no indentation in the example) - tokens of the form ``'//'`` force writing on a new line - well-bracketed pairs of tokens of the form ``'[ '`` and ``']'`` are translated into printing boxes; if there is a line break, an extra indentation of the number of spaces after the “``[``” is applied - well-bracketed pairs of tokens of the form ``'[hv '`` and ``']'`` are translated into horizontal-or-else-vertical printing boxes; if the content of the box does not fit on a single line, then every breaking point forces a new line and an extra indentation of the number of spaces after the “``[hv``” is applied at the beginning of each new line - well-bracketed pairs of tokens of the form ``'[v '`` and ``']'`` are translated into vertical printing boxes; every breaking point forces a new line, even if the line is large enough to display the whole content of the box, and an extra indentation of the number of spaces after the “``[v``” is applied at the beginning of each new line (3 spaces in the example) - extra spaces in other tokens are preserved in the output Notations disappear when a section is closed. No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the notation. .. note:: The default for a notation is to be used both for parsing and printing. It is possible to declare a notation only for parsing by adding the option ``only parsing`` to the list of :n:`@syntax_modifier`\s of :cmd:`Notation`. Symmetrically, the ``only printing`` :n:`@syntax_modifier` can be used to declare that a notation should only be used for printing. If a notation to be used both for parsing and printing is overridden, both the parsing and printing are invalided, even if the overriding rule is only parsing. If a given notation string occurs only in ``only printing`` rules, the parser is not modified at all. To a given notation string and scope can be attached at most one notation with both parsing and printing or with only parsing. Contrastingly, an arbitrary number of ``only printing`` notations differing in their right-hand sides but only a unique right-hand side can be attached to a given string and scope. Obviously, expressions printed by means of such extra printing rules will not be reparsed to the same form. .. note:: When several notations can be used to print a given term, the notations which capture the largest subterm of the term are used preferentially. Here is an example: .. coqtop:: in Notation "x < y" := (lt x y) (at level 70). Notation "x < y < z" := (lt x y /\ lt y z) (at level 70, y at next level). Check (0 < 1 /\ 1 < 2). When several notations match the same subterm, or incomparable subterms of the term to print, the notation declared most recently is selected. Moreover, reimporting a library or module declares the notations of this library or module again. If the notation is in a scope (see :ref:`Scopes`), either the scope has to be opened or a delimiter has to exist in the scope for the notation to be usable. The Infix command ~~~~~~~~~~~~~~~~~~ The :cmd:`Infix` command is a shortcut for declaring notations for infix symbols. .. cmd:: Infix @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @scope_name } This command is equivalent to :n:`Notation "x @string y" := (@one_term x y) {? ( {+, @syntax_modifier } ) } {? : @scope_name }` where ``x`` and ``y`` are fresh names and omitting the quotes around :n:`@string`. Here is an example: .. coqtop:: in Infix "/\" := and (at level 80, right associativity). .. _ReservingNotations: Reserving notations ~~~~~~~~~~~~~~~~~~~ .. cmd:: Reserved Notation @string {? ( {+, @syntax_modifier } ) } A given notation may be used in different contexts. Coq expects all uses of the notation to be defined at the same precedence and with the same associativity. To avoid giving the precedence and associativity every time, this command declares a parsing rule (:token:`string`) in advance without giving its interpretation. Here is an example from the initial state of Coq. .. coqtop:: in Reserved Notation "x = y" (at level 70, no associativity). Reserving a notation is also useful for simultaneously defining an inductive type or a recursive constant and a notation for it. .. note:: The notations mentioned in the module :ref:`init-notations` are reserved. Hence their precedence and associativity cannot be changed. .. cmd:: Reserved Infix @string {? ( {+, @syntax_modifier } ) } This command declares an infix parsing rule without giving its interpretation. When a format is attached to a reserved notation (with the `format` :token:`syntax_modifier`), it is used by default by all subsequent interpretations of the corresponding notation. Individual interpretations can override the format. Simultaneous definition of terms and notations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Thanks to reserved notations, inductive, coinductive, record, recursive and corecursive definitions can use customized notations. To do this, insert a :token:`decl_notations` clause after the definition of the (co)inductive type or (co)recursive term (or after the definition of each of them in case of mutual definitions). The exact syntax is given by :n:`@decl_notation` for inductive, coinductive, recursive and corecursive definitions and in :ref:`record-types` for records. Note that only syntax modifiers that do not require adding or changing a parsing rule are accepted. .. insertprodn decl_notations decl_notation .. prodn:: decl_notations ::= where @decl_notation {* and @decl_notation } decl_notation ::= @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @scope_name } Here are examples: .. coqtop:: in Reserved Notation "A & B" (at level 80). .. coqtop:: in Inductive and' (A B : Prop) : Prop := conj' : A -> B -> A & B where "A & B" := (and' A B). .. without this we get "not a truly recursive fixpoint" .. coqtop:: none Arguments S _ : clear scopes. .. coqtop:: in Fixpoint plus (n m : nat) {struct n} : nat := match n with | O => m | S p => S (p + m) end where "n + m" := (plus n m). Displaying information about notations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. flag:: Printing Notations This :term:`flag` controls whether to use notations for printing terms wherever possible. Default is on. .. flag:: Printing Raw Literals This :term:`flag` controls whether to use string and number notations for printing terms wherever possible (see :ref:`string-notations`). Default is off. .. flag:: Printing Parentheses When this :term:`flag` is on, parentheses are printed even if implied by associativity and precedence. Default is off. .. seealso:: :flag:`Printing All` to disable other elements in addition to notations. .. cmd:: Print Grammar @ident Shows the grammar for the nonterminal :token:`ident`, which must be one of the following: - `constr` - for :token:`term`\s - `pattern` - for :token:`pattern`\s - `tactic` - for currently-defined tactic notations, :token:`tactic`\s and tacticals (corresponding to :token:`ltac_expr` in the documentation). - `vernac` - for :token:`command`\s - `ltac2` - for Ltac2 notations (corresponding to :token:`ltac2_expr`) This command doesn't display all nonterminals of the grammar. For example, productions shown by `Print Grammar tactic` refer to nonterminals `tactic_then_locality` and `for_each_goal` which are not shown and can't be printed. Most of the grammar in the documentation was updated in 8.12 to make it accurate and readable. This was done using a new developer tool that extracts the grammar from the source code, edits it and inserts it into the documentation files. While the edited grammar is equivalent to the original, for readability some nonterminals have been renamed and others have been eliminated by substituting the nonterminal definition where the nonterminal was referenced. This command shows the original grammar, so it won't exactly match the documentation. The Coq parser is based on Camlp5. The documentation for `Extensible grammars `_ is the most relevant but it assumes considerable knowledge. Here are the essentials: Productions can contain the following elements: - nonterminal names - identifiers in the form `[a-zA-Z0-9_]*` - `"…"` - a literal string that becomes a keyword and cannot be used as an :token:`ident`. The string doesn't have to be a valid identifier; frequently the string will contain only punctuation characters. - `IDENT "…"` - a literal string that has the form of an :token:`ident` - `OPT element` - optionally include `element` (e.g. a nonterminal, IDENT "…" or "…") - `LIST1 element` - a list of one or more `element`\s - `LIST0 element` - an optional list of `element`\s - `LIST1 element SEP sep` - a list of `element`\s separated by `sep` - `LIST0 element SEP sep` - an optional list of `element`\s separated by `sep` - `[ elements1 | elements2 | … ]` - alternatives (either `elements1` or `elements2` or …) Nonterminals can have multiple **levels** to specify precedence and associativity of its productions. This feature of grammars makes it simple to parse input such as `1+2*3` in the usual way as `1+(2*3)`. However, most nonterminals have a single level. For example, this output from `Print Grammar tactic` shows the first 3 levels for `ltac_expr`, designated as "5", "4" and "3". Level 3 is right-associative, which applies to the productions within it, such as the `try` construct:: Entry ltac_expr is [ "5" RIGHTA [ binder_tactic ] | "4" LEFTA [ SELF; ";"; binder_tactic | SELF; ";"; SELF | SELF; ";"; tactic_then_locality; for_each_goal; "]" ] | "3" RIGHTA [ IDENT "try"; SELF : The interpretation of `SELF` depends on its position in the production and the associativity of the level: - At the beginning of a production, `SELF` means the next level. In the fragment shown above, the next level for `try` is "2". (This is defined by the order of appearance in the grammar or output; the levels could just as well be named "foo" and "bar".) - In the middle of a production, `SELF` means the top level ("5" in the fragment) - At the end of a production, `SELF` means the next level within `LEFTA` levels and the current level within `RIGHTA` levels. `NEXT` always means the next level. `nonterminal LEVEL "…"` is a reference to the specified level for `nonterminal`. `Associativity `_ explains `SELF` and `NEXT` in somewhat more detail. The output for `Print Grammar constr` includes :cmd:`Notation` definitions, which are dynamically added to the grammar at run time. For example, in the definition for `term`, the production on the second line shown here is defined by a :cmd:`Reserved Notation` command in `Notations.v`:: | "50" LEFTA [ SELF; "||"; NEXT Similarly, `Print Grammar tactic` includes :cmd:`Tactic Notation`\s, such as :tacn:`dintuition`. The file `doc/tools/docgram/fullGrammar `_ in the source tree extracts the full grammar for Coq (not including notations and tactic notations defined in `*.v` files nor some optionally-loaded plugins) in a single file with minor changes to handle nonterminals using multiple levels (described in `doc/tools/docgram/README.md `_). This is complete and much easier to read than the grammar source files. `doc/tools/docgram/orderedGrammar `_ has the edited grammar that's used in the documentation. Developer documentation for parsing is in `dev/doc/parsing.md `_. .. _locating-notations: Locating notations ~~~~~~~~~~~~~~~~~~ To know to which notations a given symbol belongs to, use the :cmd:`Locate` command. You can call it on any (composite) symbol surrounded by double quotes. To locate a particular notation, use a string where the variables of the notation are replaced by “``_``” and where possible single quotes inserted around identifiers or tokens starting with a single quote are dropped. .. coqtop:: all Locate "exists". Locate "exists _ .. _ , _". Inheritance of the properties of arguments of constants bound to a notation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the right-hand side of a notation is a partially applied constant, the notation inherits the implicit arguments (see :ref:`ImplicitArguments`) and notation scopes (see :ref:`Scopes`) of the constant. For instance: .. coqtop:: in reset Record R := {dom : Type; op : forall {A}, A -> dom}. Notation "# x" := (@op x) (at level 8). .. coqtop:: all Check fun x:R => # x 3. As an exception, if the right-hand side is just of the form :n:`@@qualid`, this conventionally stops the inheritance of implicit arguments (but not of notation scopes). .. _notations-and-binders: Notations and binders ~~~~~~~~~~~~~~~~~~~~~ Notations can include binders. This section lists different ways to deal with binders. For further examples, see also :ref:`RecursiveNotationsWithBinders`. Binders bound in the notation and parsed as identifiers +++++++++++++++++++++++++++++++++++++++++++++++++++++++ Here is the basic example of a notation using a binder: .. coqtop:: in Notation "'sigma' x : A , B" := (sigT (fun x : A => B)) (at level 200, x name, A at level 200, right associativity). The binding variables in the right-hand side that occur as a parameter of the notation (here :g:`x`) dynamically bind all the occurrences in their respective binding scope after instantiation of the parameters of the notation. This means that the term bound to :g:`B` can refer to the variable name bound to :g:`x` as shown in the following application of the notation: .. coqtop:: all Check sigma z : nat, z = 0. Note the :n:`@syntax_modifier x name` in the declaration of the notation. It tells to parse :g:`x` as a single identifier (or as the unnamed variable :g:`_`). Binders bound in the notation and parsed as patterns ++++++++++++++++++++++++++++++++++++++++++++++++++++ In the same way as patterns can be used as binders, as in :g:`fun '(x,y) => x+y` or :g:`fun '(existT _ x _) => x`, notations can be defined so that any :n:`@pattern` can be used in place of the binder. Here is an example: .. coqtop:: in reset Notation "'subset' ' p , P " := (sig (fun p => P)) (at level 200, p pattern, format "'subset' ' p , P"). .. coqtop:: all Check subset '(x,y), x+y=0. The :n:`@syntax_modifier p pattern` in the declaration of the notation tells to parse :g:`p` as a pattern. Note that a single variable is both an identifier and a pattern, so, e.g., the following also works: .. coqtop:: all Check subset 'x, x=0. If one wants to prevent such a notation to be used for printing when the pattern is reduced to a single identifier, one has to use instead the :n:`@syntax_modifier p strict pattern`. For parsing, however, a ``strict pattern`` will continue to include the case of a variable. Here is an example showing the difference: .. coqtop:: in Notation "'subset_bis' ' p , P" := (sig (fun p => P)) (at level 200, p strict pattern). Notation "'subset_bis' p , P " := (sig (fun p => P)) (at level 200, p name). .. coqtop:: all Check subset_bis 'x, x=0. The default level for a ``pattern`` is 0. One can use a different level by using ``pattern at level`` :math:`n` where the scale is the same as the one for terms (see :ref:`init-notations`). Binders bound in the notation and parsed as terms +++++++++++++++++++++++++++++++++++++++++++++++++ Sometimes, for the sake of factorization of rules, a binder has to be parsed as a term. This is typically the case for a notation such as the following: .. coqdoc:: Notation "{ x : A | P }" := (sig (fun x : A => P)) (at level 0, x at level 99 as name). This is so because the grammar also contains rules starting with :g:`{}` and followed by a term, such as the rule for the notation :g:`{ A } + { B }` for the constant :g:`sumbool` (see :ref:`specification`). Then, in the rule, ``x name`` is replaced by ``x at level 99 as name`` meaning that ``x`` is parsed as a term at level 99 (as done in the notation for :g:`sumbool`), but that this term has actually to be a name, i.e. an identifier or :g:`_`. The notation :g:`{ x | P }` is already defined in the standard library with the ``as name`` :n:`@syntax_modifier`. We cannot redefine it but one can define an alternative notation, say :g:`{ p such that P }`, using instead ``as pattern``. .. coqtop:: in Notation "{ p 'such' 'that' P }" := (sig (fun p => P)) (at level 0, p at level 99 as pattern). Then, the following works: .. coqtop:: all Check {(x,y) such that x+y=0}. To enforce that the pattern should not be used for printing when it is just a name, one could have said ``p at level 99 as strict pattern``. Note also that in the absence of a ``as name``, ``as strict pattern`` or ``as pattern`` :n:`@syntax_modifier`\s, the default is to consider sub-expressions occurring in binding position and parsed as terms to be ``as name``. Binders bound in the notation and parsed as general binders +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ It is also possible to rely on Coq's syntax of binders using the `binder` modifier as follows: .. coqtop:: in Notation "'myforall' p , [ P , Q ] " := (forall p, P -> Q) (at level 200, p binder). In this case, all of :n:`@ident`, :n:`{@ident}`, :n:`[@ident]`, :n:`@ident:@type`, :n:`{@ident:@type}`, :n:`[@ident:@type]`, :n:`'@pattern` can be used in place of the corresponding notation variable. In particular, the binder can declare implicit arguments: .. coqtop:: all Check fun (f : myforall {a}, [a=0, Prop]) => f eq_refl. Check myforall '((x,y):nat*nat), [ x = y, True ]. By using instead `closed binder`, the same list of binders is allowed except that :n:`@ident:@type` requires parentheses around. .. _NotationsWithBinders: Binders not bound in the notation +++++++++++++++++++++++++++++++++ We can also have binders in the right-hand side of a notation which are not themselves bound in the notation. In this case, the binders are considered up to renaming of the internal binder. E.g., for the notation .. coqtop:: in Notation "'exists_different' n" := (exists p:nat, p<>n) (at level 200). the next command fails because p does not bind in the instance of n. .. coqtop:: all Fail Check (exists_different p). .. coqtop:: in Notation "[> a , .. , b <]" := (cons a .. (cons b nil) .., cons b .. (cons a nil) ..). Notations with expressions used both as binder and term +++++++++++++++++++++++++++++++++++++++++++++++++++++++ It is possible to use parameters of the notation both in term and binding position. Here is an example: .. coqtop:: in Definition force n (P:nat -> Prop) := forall n', n' >= n -> P n'. Notation "▢_ n P" := (force n (fun n => P)) (at level 0, n name, P at level 9, format "▢_ n P"). .. coqtop:: all Check exists p, ▢_p (p >= 1). More generally, the parameter can be a pattern, as in the following variant: .. coqtop:: in reset Definition force2 q (P:nat*nat -> Prop) := (forall n', n' >= fst q -> forall p', p' >= snd q -> P q). Notation "▢_ p P" := (force2 p (fun p => P)) (at level 0, p pattern at level 0, P at level 9, format "▢_ p P"). .. coqtop:: all Check exists x y, ▢_(x,y) (x >= 1 /\ y >= 2). This support is experimental. For instance, the notation is used for printing only if the occurrence of the parameter in term position comes in the right-hand side before the occurrence in binding position. .. _RecursiveNotations: Notations with recursive patterns ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A mechanism is provided for declaring elementary notations with recursive patterns. The basic example is: .. coqtop:: all Notation "[ x ; .. ; y ]" := (cons x .. (cons y nil) ..). On the right-hand side, an extra construction of the form ``.. t ..`` can be used. Notice that ``..`` is part of the Coq syntax and it must not be confused with the three-dots notation “``…``” used in this manual to denote a sequence of arbitrary size. On the left-hand side, the part “``x s .. s y``” of the notation parses any number of times (but at least once) a sequence of expressions separated by the sequence of tokens ``s`` (in the example, ``s`` is just “``;``”). The right-hand side must contain a subterm of the form either ``φ(x, .. φ(y,t) ..)`` or ``φ(y, .. φ(x,t) ..)`` where :math:`φ([~]_E , [~]_I)`, called the *iterator* of the recursive notation is an arbitrary expression with distinguished placeholders and where :math:`t` is called the *terminating expression* of the recursive notation. In the example, we choose the names :math:`x` and :math:`y` but in practice they can of course be chosen arbitrarily. Note that the placeholder :math:`[~]_I` has to occur only once but :math:`[~]_E` can occur several times. Parsing the notation produces a list of expressions which are used to fill the first placeholder of the iterating pattern which itself is repeatedly nested as many times as the length of the list, the second placeholder being the nesting point. In the innermost occurrence of the nested iterating pattern, the second placeholder is finally filled with the terminating expression. In the example above, the iterator :math:`φ([~]_E , [~]_I)` is :math:`cons [~]_E\, [~]_I` and the terminating expression is ``nil``. Here is another example with the pattern associating on the left: .. coqtop:: in Notation "( x , y , .. , z )" := (pair .. (pair x y) .. z) (at level 0). Here is an example with more involved recursive patterns: .. coqtop:: in Notation "[| t * ( x , y , .. , z ) ; ( a , b , .. , c ) * u |]" := (pair (pair .. (pair (pair t x) (pair t y)) .. (pair t z)) (pair .. (pair (pair a u) (pair b u)) .. (pair c u))) (t at level 39). To give a flavor of the extent and limits of the mechanism, here is an example showing a notation for a chain of equalities. It relies on an artificial expansion of the intended denotation so as to expose a ``φ(x, .. φ(y,t) ..)`` structure, with the drawback that if ever the beta-redexes are contracted, the notations stops to be used for printing. Support for notations defined in this way should be considered experimental. .. coqtop:: in Notation "x ⪯ y ⪯ .. ⪯ z ⪯ t" := ((fun b A a => a <= b /\ A b) y .. ((fun b A a => a <= b /\ A b) z (fun b => b <= t)) .. x) (at level 70, y at next level, z at next level, t at next level). Note finally that notations with recursive patterns can be reserved like standard notations, they can also be declared within :ref:`notation scopes `. .. _RecursiveNotationsWithBinders: Notations with recursive patterns involving binders ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Recursive notations can also be used with binders. The basic example is: .. coqtop:: in Notation "'exists' x .. y , p" := (ex (fun x => .. (ex (fun y => p)) ..)) (at level 200, x binder, y binder, right associativity). The principle is the same as in :ref:`RecursiveNotations` except that in the iterator :math:`φ([~]_E , [~]_I)`, the placeholder :math:`[~]_E` can also occur in position of the binding variable of a ``fun`` or a ``forall``. To specify that the part “``x .. y``” of the notation parses a sequence of binders, ``x`` and ``y`` must be marked as ``binder`` in the list of :n:`@syntax_modifier`\s of the notation. The binders of the parsed sequence are used to fill the occurrences of the first placeholder of the iterating pattern which is repeatedly nested as many times as the number of binders generated. If ever the generalization operator ``'`` (see :ref:`implicit-generalization`) is used in the binding list, the added binders are taken into account too. There are two flavors of binder parsing. If ``x`` and ``y`` are marked as binder, then a sequence such as :g:`a b c : T` will be accepted and interpreted as the sequence of binders :g:`(a:T) (b:T) (c:T)`. For instance, in the notation above, the syntax :g:`exists a b : nat, a = b` is valid. The variables ``x`` and ``y`` can also be marked as closed binder in which case only well-bracketed binders of the form :g:`(a b c:T)` or :g:`{a b c:T}` etc. are accepted. With closed binders, the recursive sequence in the left-hand side can be of the more general form ``x s .. s y`` where ``s`` is an arbitrary sequence of tokens. With open binders though, ``s`` has to be empty. Here is an example of recursive notation with closed binders: .. coqtop:: in Notation "'mylet' f x .. y := t 'in' u":= (let f := fun x => .. (fun y => t) .. in u) (at level 200, x closed binder, y closed binder, right associativity). A recursive pattern for binders can be used in position of a recursive pattern for terms. Here is an example: .. coqtop:: in Notation "'FUNAPP' x .. y , f" := (fun x => .. (fun y => (.. (f x) ..) y ) ..) (at level 200, x binder, y binder, right associativity). If an occurrence of the :math:`[~]_E` is not in position of a binding variable but of a term, it is the name used in the binding which is used. Here is an example: .. coqtop:: in Notation "'exists_non_null' x .. y , P" := (ex (fun x => x <> 0 /\ .. (ex (fun y => y <> 0 /\ P)) ..)) (at level 200, x binder). Predefined entries ~~~~~~~~~~~~~~~~~~ By default, sub-expressions are parsed as terms and the corresponding grammar entry is called ``constr``. However, one may sometimes want to restrict the syntax of terms in a notation. For instance, the following notation will accept to parse only global reference in position of :g:`x`: .. coqtop:: in Notation "'apply' f a1 .. an" := (.. (f a1) .. an) (at level 10, f global, a1, an at level 9). In addition to ``global``, one can restrict the syntax of a sub-expression by using the entry names ``ident``, ``name`` or ``pattern`` already seen in :ref:`NotationsWithBinders`, even when the corresponding expression is not used as a binder in the right-hand side. E.g.: .. todo: these two Set Warnings and the note should be removed when ident is reactivated with its literal meaning. .. coqtop:: none Set Warnings "-deprecated-ident-entry". .. coqtop:: in Notation "'apply_id' f a1 .. an" := (.. (f a1) .. an) (at level 10, f ident, a1, an at level 9). .. coqtop:: none Set Warnings "+deprecated-ident-entry". .. note:: As of version 8.13, the entry ``ident`` is a deprecated alias for ``name``. In the future, it is planned to strictly parse an identifier (excluding :g:`_`). .. _custom-entries: Custom entries ~~~~~~~~~~~~~~ .. cmd:: Declare Custom Entry @ident Defines new grammar entries, called *custom entries*, that can later be referred to using the entry name :n:`custom @ident`. This command supports the :attr:`local` attribute, which limits the entry to the current module. Non-local custom entries survive module closing and are declared when a file is Required. .. example:: For instance, we may want to define an ad hoc parser for arithmetical operations and proceed as follows: .. coqtop:: all Inductive Expr := | One : Expr | Mul : Expr -> Expr -> Expr | Add : Expr -> Expr -> Expr. Declare Custom Entry expr. Notation "[ e ]" := e (e custom expr at level 2). Notation "1" := One (in custom expr at level 0). Notation "x y" := (Mul x y) (in custom expr at level 1, left associativity). Notation "x + y" := (Add x y) (in custom expr at level 2, left associativity). Notation "( x )" := x (in custom expr, x at level 2). Notation "{ x }" := x (in custom expr, x constr). Notation "x" := x (in custom expr at level 0, x ident). Axiom f : nat -> Expr. Check fun x y z => [1 + y z + {f x}]. Unset Printing Notations. Check fun x y z => [1 + y z + {f x}]. Set Printing Notations. Check fun e => match e with | [1 + 1] => [1] | [x y + z] => [x + y z] | y => [y + e] end. Custom entries have levels, like the main grammar of terms and grammar of patterns have. The lower level is 0 and this is the level used by default to put rules delimited with tokens on both ends. The level is left to be inferred by Coq when using :n:`in custom @ident`. The level is otherwise given explicitly by using the syntax :n:`in custom @ident at level @natural`, where :n:`@natural` refers to the level. Levels are cumulative: a notation at level ``n`` of which the left end is a term shall use rules at level less than ``n`` to parse this subterm. More precisely, it shall use rules at level strictly less than ``n`` if the rule is declared with ``right associativity`` and rules at level less or equal than ``n`` if the rule is declared with ``left associativity``. Similarly, a notation at level ``n`` of which the right end is a term shall use by default rules at level strictly less than ``n`` to parse this subterm if the rule is declared left associative and rules at level less or equal than ``n`` if the rule is declared right associative. This is what happens for instance in the rule .. coqtop:: in Notation "x + y" := (Add x y) (in custom expr at level 2, left associativity). where ``x`` is any expression parsed in entry ``expr`` at level less or equal than ``2`` (including, recursively, the given rule) and ``y`` is any expression parsed in entry ``expr`` at level strictly less than ``2``. Rules associated with an entry can refer different sub-entries. The grammar entry name ``constr`` can be used to refer to the main grammar of term as in the rule .. coqtop:: in Notation "{ x }" := x (in custom expr at level 0, x constr). which indicates that the subterm ``x`` should be parsed using the main grammar. If not indicated, the level is computed as for notations in ``constr``, e.g. using 200 as default level for inner sub-expressions. The level can otherwise be indicated explicitly by using ``constr at level n`` for some ``n``, or ``constr at next level``. Conversely, custom entries can be used to parse sub-expressions of the main grammar, or from another custom entry as is the case in .. coqtop:: in Notation "[ e ]" := e (e custom expr at level 2). to indicate that ``e`` has to be parsed at level ``2`` of the grammar associated with the custom entry ``expr``. The level can be omitted, as in .. coqdoc:: Notation "[ e ]" := e (e custom expr). in which case Coq infer it. If the sub-expression is at a border of the notation (as e.g. ``x`` and ``y`` in ``x + y``), the level is determined by the associativity. If the sub-expression is not at the border of the notation (as e.g. ``e`` in ``"[ e ]``), the level is inferred to be the highest level used for the entry. In particular, this level depends on the highest level existing in the entry at the time of use of the notation. In the absence of an explicit entry for parsing or printing a sub-expression of a notation in a custom entry, the default is to consider that this sub-expression is parsed or printed in the same custom entry where the notation is defined. In particular, if ``x at level n`` is used for a sub-expression of a notation defined in custom entry ``foo``, it shall be understood the same as ``x custom foo at level n``. In general, rules are required to be *productive* on the right-hand side, i.e. that they are bound to an expression which is not reduced to a single variable. If the rule is not productive on the right-hand side, as it is the case above for .. coqtop:: in Notation "( x )" := x (in custom expr at level 0, x at level 2). and .. coqtop:: in Notation "{ x }" := x (in custom expr at level 0, x constr). it is used as a *grammar coercion* which means that it is used to parse or print an expression which is not available in the current grammar at the current level of parsing or printing for this grammar but which is available in another grammar or in another level of the current grammar. For instance, .. coqtop:: in Notation "( x )" := x (in custom expr at level 0, x at level 2). tells that parentheses can be inserted to parse or print an expression declared at level ``2`` of ``expr`` whenever this expression is expected to be used as a subterm at level 0 or 1. This allows for instance to parse and print :g:`Add x y` as a subterm of :g:`Mul (Add x y) z` using the syntax ``(x + y) z``. Similarly, .. coqtop:: in Notation "{ x }" := x (in custom expr at level 0, x constr). gives a way to let any arbitrary expression which is not handled by the custom entry ``expr`` be parsed or printed by the main grammar of term up to the insertion of a pair of curly brackets. Another special situation is when parsing global references or identifiers. To indicate that a custom entry should parse identifiers, use the following form: .. coqtop:: reset none Declare Custom Entry expr. .. coqtop:: in Notation "x" := x (in custom expr at level 0, x ident). Similarly, to indicate that a custom entry should parse global references (i.e. qualified or unqualified identifiers), use the following form: .. coqtop:: reset none Declare Custom Entry expr. .. coqtop:: in Notation "x" := x (in custom expr at level 0, x global). .. cmd:: Print Custom Grammar @ident This displays the state of the grammar for terms associated with the custom entry :token:`ident`. .. _NotationSyntax: Syntax ~~~~~~~ Here are the syntax elements used by the various notation commands. .. insertprodn syntax_modifier level .. prodn:: syntax_modifier ::= at level @natural | in custom @ident {? at level @natural } | {+, @ident } {| at @level | in scope @ident } | @ident at @level {? @binder_interp } | @ident @explicit_subentry | @ident @binder_interp | left associativity | right associativity | no associativity | only parsing | only printing | format @string {? @string } explicit_subentry ::= ident | name | global | bigint | strict pattern {? at level @natural } | binder | closed binder | constr {? at @level } {? @binder_interp } | custom @ident {? at @level } {? @binder_interp } | pattern {? at level @natural } binder_interp ::= as ident | as name | as pattern | as strict pattern level ::= level @natural | next level .. note:: No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the notation. .. note:: Some examples of Notation may be found in the files composing the initial state of Coq (see directory :file:`$COQLIB/theories/Init`). .. note:: The notation ``"{ x }"`` has a special status in the main grammars of terms and patterns so that complex notations of the form ``"x + { y }"`` or ``"x * { y }"`` can be nested with correct precedences. Especially, every notation involving a pattern of the form ``"{ x }"`` is parsed as a notation where the pattern ``"{ x }"`` has been simply replaced by ``"x"`` and the curly brackets are parsed separately. E.g. ``"y + { z }"`` is not parsed as a term of the given form but as a term of the form ``"y + z"`` where ``z`` has been parsed using the rule parsing ``"{ x }"``. Especially, level and precedences for a rule including patterns of the form ``"{ x }"`` are relative not to the textual notation but to the notation where the curly brackets have been removed (e.g. the level and the associativity given to some notation, say ``"{ y } & { z }"`` in fact applies to the underlying ``"{ x }"``\-free rule which is ``"y & z"``). .. note:: Notations such as ``"( p | q )"`` (or starting with ``"( x | "``, more generally) are deprecated as they conflict with the syntax for nested disjunctive patterns (see :ref:`extendedpatternmatching`), and are not honored in pattern expressions. .. warn:: Use of @string Notation is deprecated as it is inconsistent with pattern syntax. This warning is disabled by default to avoid spurious diagnostics due to legacy notation in the Coq standard library. It can be turned on with the ``-w disj-pattern-notation`` flag. .. note:: As of version 8.13, the entry ``ident`` is a deprecated alias for ``name``. In the future, it is planned to strictly parse an identifier (to the exclusion of :g:`_`). If the intent was to use ``ident`` as an identifier (excluding :g:`_`), just silence the warning with :n:`Set Warnings "-deprecated-ident-entry"` and it should automatically get its intended meaning in version 8.15. Similarly, ``as ident`` is a deprecated alias for ``as name``, which will only accept an identifier in the future. If the intent was to use ``as ident`` as an identifier (excluding :g:`_`), just silence the warning with :n:`Set Warnings "-deprecated-as-ident-kind"`. However, this deprecation does not apply to custom entries, where it already denotes an identifier, as expected. .. todo: the note above should be removed at the end of deprecation phase of ident .. .. _Scopes: Notation scopes --------------- A *notation scope* is a set of notations for terms with their interpretations. Notation scopes provide a weak, purely syntactic form of notation overloading: a symbol may refer to different definitions depending on which notation scopes are currently open. For instance, the infix symbol ``+`` can be used to refer to distinct definitions of the addition operator, such as for natural numbers, integers or reals. Notation scopes can include an interpretation for numbers and strings with the :cmd:`Number Notation` and :cmd:`String Notation` commands. .. insertprodn scope scope_key .. prodn:: scope ::= @scope_name | @scope_key scope_name ::= @ident scope_key ::= @ident Each notation scope has a single :token:`scope_name`, which by convention ends with the suffix "_scope", as in "nat_scope". One or more :token:`scope_key`\s (delimiting keys) may be associated with a notation scope with the :cmd:`Delimit Scope` command. Most commands use :token:`scope_name`; :token:`scope_key`\s are used within :token:`term`\s. .. cmd:: Declare Scope @scope_name Declares a new notation scope. Note that the initial state of Coq declares the following notation scopes: ``core_scope``, ``type_scope``, ``function_scope``, ``nat_scope``, ``bool_scope``, ``list_scope``, ``int_scope``, ``uint_scope``. Use commands such as :cmd:`Notation` to add notations to the scope. Global interpretation rules for notations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ At any time, the interpretation of a notation for a term is done within a *stack* of notation scopes and lonely notations. If a notation is defined in multiple scopes, Coq uses the interpretation from the most recently opened notation scope or declared lonely notation. Note that "stack" is a misleading name. Each scope or lonely notation can only appear in the stack once. New items are pushed onto the top of the stack, except that adding a item that's already in the stack moves it to the top of the stack instead. Scopes are removed by name (e.g. by :cmd:`Close Scope`) wherever they are in the stack, rather than through "pop" operations. Use the :cmd:`Print Visibility` command to display the current notation scope stack. .. cmd:: Open Scope @scope Adds a scope to the notation scope stack. If the scope is already present, the command moves it to the top of the stack. If the command appears in a section: By default, the scope is only added within the section. Specifying :attr:`global` marks the scope for export as part of the current module. Specifying :attr:`local` behaves like the default. If the command does not appear in a section: By default, the scope marks the scope for export as part of the current module. Specifying :attr:`local` prevents exporting the scope. Specifying :attr:`global` behaves like the default. .. cmd:: Close Scope @scope Removes a scope from the notation scope stack. If the command appears in a section: By default, the scope is only removed within the section. Specifying :attr:`global` marks the scope removal for export as part of the current module. Specifying :attr:`local` behaves like the default. If the command does not appear in a section: By default, the scope marks the scope removal for export as part of the current module. Specifying :attr:`local` prevents exporting the removal. Specifying :attr:`global` behaves like the default. .. todo: Strange notion, exporting something that _removes_ a scope. See https://github.com/coq/coq/pull/11718#discussion_r413667817 .. _LocalInterpretationRulesForNotations: Local interpretation rules for notations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In addition to the global rules of interpretation of notations, some ways to change the interpretation of subterms are available. Opening a notation scope locally ++++++++++++++++++++++++++++++++ .. insertprodn term_scope term_scope .. prodn:: term_scope ::= @term0 % @scope_key The notation scope stack can be locally extended within a :token:`term` with the syntax :n:`(@term)%@scope_key` (or simply :n:`@term0%@scope_key` for atomic terms). In this case, :n:`@term` is interpreted in the scope stack extended with the scope bound to :n:`@scope_key`. .. cmd:: Delimit Scope @scope_name with @scope_key Binds the delimiting key :token:`scope_key` to a scope. .. cmd:: Undelimit Scope @scope_name Removes the delimiting keys associated with a scope. The arguments of an :ref:`abbreviation ` can be interpreted in a scope stack locally extended with a given scope by using the modifier :n:`{+, @ident } in scope @scope_name`.s Binding types or coercion classes to a notation scope ++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. cmd:: Bind Scope @scope_name with {+ @class } Binds the notation scope :token:`scope_name` to the type or coercion class :token:`class`. When bound, arguments of that type for any function will be interpreted in that scope by default. This default can be overridden for individual functions with the :cmd:`Arguments` command. The association may be convenient when a notation scope is naturally associated with a :token:`type` (e.g. `nat` and the natural numbers). Whether the argument of a function has some type ``type`` is determined statically. For instance, if ``f`` is a polymorphic function of type :g:`forall X:Type, X -> X` and type :g:`t` is bound to a scope ``scope``, then :g:`a` of type :g:`t` in :g:`f t a` is not recognized as an argument to be interpreted in scope ``scope``. .. coqtop:: in reset Parameter U : Set. Declare Scope U_scope. Bind Scope U_scope with U. Parameter Uplus : U -> U -> U. Parameter P : forall T:Set, T -> U -> Prop. Parameter f : forall T:Set, T -> U. Infix "+" := Uplus : U_scope. Unset Printing Notations. Open Scope nat_scope. .. coqtop:: all Check (fun x y1 y2 z t => P _ (x + t) ((f _ (y1 + y2) + z))). .. note:: When active, a bound scope has effect on all defined functions (even if they are defined after the :cmd:`Bind Scope` directive), except if argument scopes were assigned explicitly using the :cmd:`Arguments` command. .. note:: The scopes ``type_scope`` and ``function_scope`` also have a local effect on interpretation. See the next section. The ``type_scope`` notation scope ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: type_scope The scope ``type_scope`` has a special status. It is a primitive interpretation scope which is temporarily activated each time a subterm of an expression is expected to be a type. It is delimited by the key ``type``, and bound to the coercion class ``Sortclass``. It is also used in certain situations where an expression is statically known to be a type, including the conclusion and the type of hypotheses within an Ltac goal match (see :ref:`ltac-match-goal`), the statement of a theorem, the type of a definition, the type of a binder, the domain and codomain of implication, the codomain of products, and more generally any type argument of a declared or defined constant. The ``function_scope`` notation scope ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. index:: function_scope The scope ``function_scope`` also has a special status. It is temporarily activated each time the argument of a global reference is recognized to be a ``Funclass`` instance, i.e., of type :g:`forall x:A, B` or :g:`A -> B`. .. _notation-scopes: Notation scopes used in the standard library of Coq ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We give an overview of the scopes used in the standard library of Coq. For a complete list of notations in each scope, use the commands :cmd:`Print Scopes` or :cmd:`Print Scope`. ``type_scope`` This scope includes infix * for product types and infix + for sum types. It is delimited by the key ``type``, and bound to the coercion class ``Sortclass``, as described above. ``function_scope`` This scope is delimited by the key ``function``, and bound to the coercion class ``Funclass``, as described above. ``nat_scope`` This scope includes the standard arithmetical operators and relations on type nat. Positive integer numbers in this scope are mapped to their canonical representent built from :g:`O` and :g:`S`. The scope is delimited by the key ``nat``, and bound to the type :g:`nat` (see above). ``N_scope`` This scope includes the standard arithmetical operators and relations on type :g:`N` (binary natural numbers). It is delimited by the key ``N`` and comes with an interpretation for numbers as closed terms of type :g:`N`. ``Z_scope`` This scope includes the standard arithmetical operators and relations on type :g:`Z` (binary integer numbers). It is delimited by the key ``Z`` and comes with an interpretation for numbers as closed terms of type :g:`Z`. ``positive_scope`` This scope includes the standard arithmetical operators and relations on type :g:`positive` (binary strictly positive numbers). It is delimited by key ``positive`` and comes with an interpretation for numbers as closed terms of type :g:`positive`. ``Q_scope`` This scope includes the standard arithmetical operators and relations on type :g:`Q` (rational numbers defined as fractions of an integer and a strictly positive integer modulo the equality of the numerator- denominator cross-product) and comes with an interpretation for numbers as closed terms of type :g:`Q`. ``Qc_scope`` This scope includes the standard arithmetical operators and relations on the type :g:`Qc` of rational numbers defined as the type of irreducible fractions of an integer and a strictly positive integer. ``R_scope`` This scope includes the standard arithmetical operators and relations on type :g:`R` (axiomatic real numbers). It is delimited by the key ``R`` and comes with an interpretation for numbers using the :g:`IZR` morphism from binary integer numbers to :g:`R` and :g:`Z.pow_pos` for potential exponent parts. ``bool_scope`` This scope includes notations for the boolean operators. It is delimited by the key ``bool``, and bound to the type :g:`bool` (see above). ``list_scope`` This scope includes notations for the list operators. It is delimited by the key ``list``, and bound to the type :g:`list` (see above). ``core_scope`` This scope includes the notation for pairs. It is delimited by the key ``core``. ``string_scope`` This scope includes notation for strings as elements of the type string. Special characters and escaping follow Coq conventions on strings (see :ref:`lexical-conventions`). Especially, there is no convention to visualize non printable characters of a string. The file :file:`String.v` shows an example that contains quotes, a newline and a beep (i.e. the ASCII character of code 7). ``char_scope`` This scope includes interpretation for all strings of the form ``"c"`` where :g:`c` is an ASCII character, or of the form ``"nnn"`` where nnn is a three-digit number (possibly with leading 0s), or of the form ``""""``. Their respective denotations are the ASCII code of :g:`c`, the decimal ASCII code ``nnn``, or the ascii code of the character ``"`` (i.e. the ASCII code 34), all of them being represented in the type :g:`ascii`. Displaying information about scopes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. cmd:: Print Visibility {? @scope_name } Displays the current notation scope stack. The top of the stack is displayed last. Notations in scopes whose interpretation is hidden by the same notation in a more recently opened scope are not displayed. Hence each notation is displayed only once. If :n:`@scope_name` is specified, displays the current notation scope stack as if the scope :n:`@scope_name` is pushed on top of the stack. This is useful to see how a subterm occurring locally in the scope is interpreted. .. cmd:: Print Scopes Displays, for each existing notation scope, all accessible notations (whether or not currently in the notation scope stack), the most-recently defined delimiting key and the class the notation scope is bound to. The display also includes lonely notations. .. todo should the command report all delimiting keys? Use the :cmd:`Print Visibility` command to display the current notation scope stack. .. cmd:: Print Scope @scope_name Displays all notations defined in the notation scope :n:`@scope_name`. It also displays the delimiting key and the class to which the scope is bound, if any. .. _Abbreviations: Abbreviations -------------- .. cmd:: Notation @ident {* @ident__parm } := @one_term {? ( {+, @syntax_modifier } ) } :name: Notation (abbreviation) .. todo: for some reason, Sphinx doesn't complain about a duplicate name if :name: is omitted Defines an abbreviation :token:`ident` with the parameters :n:`@ident__parm`. This command supports the :attr:`local` attribute, which limits the notation to the current module. An *abbreviation* is a name, possibly applied to arguments, that denotes a (presumably) more complex expression. Here are examples: .. coqtop:: none Require Import List. Require Import Relations. Set Printing Notations. .. coqtop:: in Notation Nlist := (list nat). .. coqtop:: all Check 1 :: 2 :: 3 :: nil. .. coqtop:: in Notation reflexive R := (forall x, R x x). .. coqtop:: all Check forall A:Prop, A <-> A. Check reflexive iff. .. coqtop:: in Notation Plus1 B := (Nat.add B 1). .. coqtop:: all Compute (Plus1 3). An abbreviation expects no precedence nor associativity, since it is parsed as an usual application. Abbreviations are used as much as possible by the Coq printers unless the modifier ``(only parsing)`` is given. An abbreviation is bound to an absolute name as an ordinary definition is and it also can be referred to by a qualified name. Abbreviations are syntactic in the sense that they are bound to expressions which are not typed at the time of the definition of the abbreviation but at the time they are used. Especially, abbreviations can be bound to terms with holes (i.e. with “``_``”). For example: .. coqtop:: none reset Set Strict Implicit. Set Printing Depth 50. .. coqtop:: in Definition explicit_id (A:Set) (a:A) := a. .. coqtop:: in Notation id := (explicit_id _). .. coqtop:: all Check (id 0). Abbreviations disappear when a section is closed. No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the abbreviation. Like for notations, if the right-hand side of an abbreviation is a partially applied constant, the abbreviation inherits the implicit arguments and notation scopes of the constant. As an exception, if the right-hand side is just of the form :n:`@@qualid`, this conventionally stops the inheritance of implicit arguments. Like for notations, it is possible to bind binders in abbreviations. Here is an example: .. coqtop:: in reset Definition force2 q (P:nat*nat -> Prop) := (forall n', n' >= fst q -> forall p', p' >= snd q -> P q). Notation F p P := (force2 p (fun p => P)). Check exists x y, F (x,y) (x >= 1 /\ y >= 2). .. extracted from Gallina chapter Numbers and strings ------------------- .. insertprodn primitive_notations primitive_notations .. prodn:: primitive_notations ::= @number | @string Numbers and strings have no predefined semantics in the calculus. They are merely notations that can be bound to objects through the notation mechanism. Initially, numbers are bound to Peano’s representation of natural numbers (see :ref:`datatypes`). .. note:: Negative integers are not at the same level as :n:`@natural`, for this would make precedence unnatural. .. _number-notations: Number notations ~~~~~~~~~~~~~~~~ .. cmd:: Number Notation @qualid__type @qualid__parse @qualid__print {? ( {+, @number_modifier } ) } : @scope_name .. insertprodn number_modifier number_string_via .. prodn:: number_modifier ::= warning after @bignat | abstract after @bignat | @number_string_via number_string_via ::= via @qualid mapping [ {+, {| @qualid => @qualid | [ @qualid ] => @qualid } } ] This command allows the user to customize the way number literals are parsed and printed. :n:`@qualid__type` the name of an inductive type, while :n:`@qualid__parse` and :n:`@qualid__print` should be the names of the parsing and printing functions, respectively. The parsing function :n:`@qualid__parse` should have one of the following types: * :n:`Number.int -> @qualid__type` * :n:`Number.int -> option @qualid__type` * :n:`Number.uint -> @qualid__type` * :n:`Number.uint -> option @qualid__type` * :n:`Z -> @qualid__type` * :n:`Z -> option @qualid__type` * :n:`PrimInt63.pos_neg_int63 -> @qualid__type` * :n:`PrimInt63.pos_neg_int63 -> option @qualid__type` * :n:`PrimFloat.float -> @qualid__type` * :n:`PrimFloat.float -> option @qualid__type` * :n:`Number.number -> @qualid__type` * :n:`Number.number -> option @qualid__type` And the printing function :n:`@qualid__print` should have one of the following types: * :n:`@qualid__type -> Number.int` * :n:`@qualid__type -> option Number.int` * :n:`@qualid__type -> Number.uint` * :n:`@qualid__type -> option Number.uint` * :n:`@qualid__type -> Z` * :n:`@qualid__type -> option Z` * :n:`@qualid__type -> PrimInt63.pos_neg_int63` * :n:`@qualid__type -> option PrimInt63.pos_neg_int63` * :n:`@qualid__type -> PrimFloat.float` * :n:`@qualid__type -> option PrimFloat.float` * :n:`@qualid__type -> Number.number` * :n:`@qualid__type -> option Number.number` When parsing, the application of the parsing function :n:`@qualid__parse` to the number will be fully reduced, and universes of the resulting term will be refreshed. Note that only fully-reduced ground terms (terms containing only function application, constructors, inductive type families, sorts, primitive integers, primitive floats, primitive arrays and type constants for primitive types) will be considered for printing. .. note:: Instead of an inductive type, :n:`@qualid__type` can be :n:`PrimInt63.int` or :n:`PrimFloat.float`, in which case :n:`@qualid__print` takes :n:`PrimInt63.int_wrapper` or :n:`PrimFloat.float_wrapper` as input instead of :n:`PrimInt63.int` or :n:`PrimFloat.float`. See below for an :ref:`example `. .. note:: When :n:`PrimFloat.float` is used as input type of :n:`@qualid__parse`, only numerical values will be parsed this way, (no infinities nor NaN). Similarly, printers :n:`@qualid__print` with output type :n:`PrimFloat.float` or :n:`option PrimFloat.float` are ignored when they return non numerical values. .. _number-string-via: :n:`via @qualid__ind mapping [ {+, @qualid__constant => @qualid__constructor } ]` When using this option, :n:`@qualid__type` no longer needs to be an inductive type and is instead mapped to the inductive type :n:`@qualid__ind` according to the provided list of pairs, whose first component :n:`@qualid__constant` is a constant of type :n:`@qualid__type` (or a function of type :n:`{* _ -> } @qualid__type`) and the second a constructor of type :n:`@qualid__ind`. The type :n:`@qualid__type` is then replaced by :n:`@qualid__ind` in the above parser and printer types. When :n:`@qualid__constant` is surrounded by square brackets, all the implicit arguments of :n:`@qualid__constant` (whether maximally inserted or not) are ignored when translating to :n:`@qualid__constructor` (i.e., before applying :n:`@qualid__print`) and replaced with implicit argument holes :g:`_` when translating from :n:`@qualid__constructor` to :n:`@qualid__constant` (after :n:`@qualid__parse`). See below for an :ref:`example `. .. note:: The implicit status of the arguments is considered only at notation declaration time, any further modification of this status has no impact on the previously declared notations. .. note:: In case of multiple implicit options (for instance :g:`Arguments eq_refl {A}%type_scope {x}, [_] _`), an argument is considered implicit when it is implicit in any of the options. .. note:: To use a :token:`sort` as the target type :n:`@qualid__type`, use an :ref:`abbreviation ` as in the :ref:`example below `. :n:`warning after @bignat` displays a warning message about a possible stack overflow when calling :n:`@qualid__parse` to parse a literal larger than :n:`@bignat`. .. warn:: Stack overflow or segmentation fault happens when working with large numbers in @type (threshold may vary depending on your system limits and on the command executed). When a :cmd:`Number Notation` is registered in the current scope with :n:`(warning after @bignat)`, this warning is emitted when parsing a number greater than or equal to :token:`bignat`. :n:`abstract after @bignat` returns :n:`(@qualid__parse m)` when parsing a literal :n:`m` that's greater than :n:`@bignat` rather than reducing it to a normal form. Here :g:`m` will be a :g:`Number.int`, :g:`Number.uint`, :g:`Z` or :g:`Number.number`, depending on the type of the parsing function :n:`@qualid__parse`. This allows for a more compact representation of literals in types such as :g:`nat`, and limits parse failures due to stack overflow. Note that a warning will be emitted when an integer larger than :token:`bignat` is parsed. Note that :n:`(abstract after @bignat)` has no effect when :n:`@qualid__parse` lands in an :g:`option` type. .. warn:: To avoid stack overflow, large numbers in @type are interpreted as applications of @qualid__parse. When a :cmd:`Number Notation` is registered in the current scope with :n:`(abstract after @bignat)`, this warning is emitted when parsing a number greater than or equal to :token:`bignat`. Typically, this indicates that the fully computed representation of numbers can be so large that non-tail-recursive OCaml functions run out of stack space when trying to walk them. .. warn:: The 'abstract after' directive has no effect when the parsing function (@qualid__parse) targets an option type. As noted above, the :n:`(abstract after @natural)` directive has no effect when :n:`@qualid__parse` lands in an :g:`option` type. .. exn:: 'via' and 'abstract' cannot be used together. With the :n:`abstract after` option, the parser function :n:`@qualid__parse` does not reduce large numbers to a normal form, which prevents doing the translation given in the :n:`mapping` list. .. exn:: Cannot interpret this number as a value of type @type The number notation registered for :token:`type` does not support the given number. This error is given when the interpretation function returns :g:`None`, or if the interpretation is registered only for integers or non-negative integers, and the given number has a fractional or exponent part or is negative. .. exn:: overflow in int63 literal @bigint The constant's absolute value is too big to fit into a 63-bit integer :n:`PrimInt63.int`. .. exn:: @qualid__parse should go from Number.int to @type or (option @type). Instead of Number.int, the types Number.uint or Z or PrimInt63.pos_neg_int63 or PrimFloat.float or Number.number could be used (you may need to require BinNums or Number or PrimInt63 or PrimFloat first). The parsing function given to the :cmd:`Number Notation` command is not of the right type. .. exn:: @qualid__print should go from @type to Number.int or (option Number.int). Instead of Number.int, the types Number.uint or Z or PrimInt63.pos_neg_int63 or Number.number could be used (you may need to require BinNums or Number or PrimInt63 first). The printing function given to the :cmd:`Number Notation` command is not of the right type. .. exn:: Unexpected term @term while parsing a number notation. Parsing functions must always return ground terms, made up of function application, constructors, inductive type families, sorts and primitive integers. Parsing functions may not return terms containing axioms, bare (co)fixpoints, lambdas, etc. .. exn:: Unexpected non-option term @term while parsing a number notation. Parsing functions expected to return an :g:`option` must always return a concrete :g:`Some` or :g:`None` when applied to a concrete number expressed as a (hexa)decimal. They may not return opaque constants. .. exn:: Multiple 'via' options. At most one :g:`via` option can be given. .. exn:: Multiple 'warning after' or 'abstract after' options. At most one :g:`warning after` or :g:`abstract after` option can be given. .. _string-notations: String notations ~~~~~~~~~~~~~~~~ .. cmd:: String Notation @qualid__type @qualid__parse @qualid__print {? ( @number_string_via ) } : @scope_name Allows the user to customize how strings are parsed and printed. :n:`@qualid__type` the name of an inductive type, while :n:`@qualid__parse` and :n:`@qualid__print` should be the names of the parsing and printing functions, respectively. The parsing function :n:`@qualid__parse` should have one of the following types: * :n:`Byte.byte -> @qualid__type` * :n:`Byte.byte -> option @qualid__type` * :n:`list Byte.byte -> @qualid__type` * :n:`list Byte.byte -> option @qualid__type` The printing function :n:`@qualid__print` should have one of the following types: * :n:`@qualid__type -> Byte.byte` * :n:`@qualid__type -> option Byte.byte` * :n:`@qualid__type -> list Byte.byte` * :n:`@qualid__type -> option (list Byte.byte)` When parsing, the application of the parsing function :n:`@qualid__parse` to the string will be fully reduced, and universes of the resulting term will be refreshed. Note that only fully-reduced ground terms (terms containing only function application, constructors, inductive type families, sorts, primitive integers, primitive floats, primitive arrays and type constants for primitive types) will be considered for printing. :n:`via @qualid__ind mapping [ {+, @qualid__constant => @qualid__constructor } ]` works as for :ref:`number notations above `. .. exn:: Cannot interpret this string as a value of type @type The string notation registered for :token:`type` does not support the given string. This error is given when the interpretation function returns :g:`None`. .. exn:: @qualid__parse should go from Byte.byte or (list Byte.byte) to @type or (option @type). The parsing function given to the :cmd:`String Notation` command is not of the right type. .. exn:: @qualid__print should go from @type to Byte.byte or (option Byte.byte) or (list Byte.byte) or (option (list Byte.byte)). The printing function given to the :cmd:`String Notation` command is not of the right type. .. exn:: Unexpected term @term while parsing a string notation. Parsing functions must always return ground terms, made up of function application, constructors, inductive type families, sorts and primitive integers. Parsing functions may not return terms containing axioms, bare (co)fixpoints, lambdas, etc. .. exn:: Unexpected non-option term @term while parsing a string notation. Parsing functions expected to return an :g:`option` must always return a concrete :g:`Some` or :g:`None` when applied to a concrete string expressed as a decimal. They may not return opaque constants. .. note:: Number or string notations for parameterized inductive types can be added by declaring an :ref:`abbreviation ` for the inductive which instantiates all parameters. See :ref:`example below `. The following errors apply to both string and number notations: .. exn:: @type is not an inductive type. String and number notations can only be declared for inductive types. Declare string or numeral notations for non-inductive types using :n:`@number_string_via`. .. exn:: @qualid was already mapped to @qualid and cannot be remapped to @qualid Duplicates are not allowed in the :n:`mapping` list. .. exn:: Missing mapping for constructor @qualid A mapping should be provided for :n:`@qualid` in the :n:`mapping` list. .. warn:: @type was already mapped to @type, mapping it also to @type might yield ill typed terms when using the notation. Two pairs in the :n:`mapping` list associate types that might be incompatible. .. warn:: Type of @qualid seems incompatible with the type of @qualid. Expected type is: @type instead of @type. This might yield ill typed terms when using the notation. A mapping given in the :n:`mapping` list associates a constant with a seemingly incompatible constructor. .. exn:: Cannot interpret in @scope_name because @qualid could not be found in the current environment. The inductive type used to register the string or number notation is no longer available in the environment. Most likely, this is because the notation was declared inside a functor for an inductive type inside the functor. This use case is not currently supported. Alternatively, you might be trying to use a primitive token notation from a plugin which forgot to specify which module you must :g:`Require` for access to that notation. .. exn:: Syntax error: [prim:reference] expected after 'Notation' (in [vernac:command]). The type passed to :cmd:`String Notation` or :cmd:`Number Notation` must be a single qualified identifier. .. exn:: Syntax error: [prim:reference] expected after [prim:reference] (in [vernac:command]). Both functions passed to :cmd:`String Notation` or :cmd:`Number Notation` must be single qualified identifiers. .. todo: generally we don't document syntax errors. Is this a good execption? .. exn:: @qualid is bound to a notation that does not denote a reference. Identifiers passed to :cmd:`String Notation` or :cmd:`Number Notation` must be global references, or notations which evaluate to single qualified identifiers. .. todo note on "single qualified identifiers" https://github.com/coq/coq/pull/11718#discussion_r415076703 .. example:: Number Notation for radix 3 The following example parses and prints natural numbers whose digits are :g:`0`, :g:`1` or :g:`2` as terms of the following inductive type encoding radix 3 numbers. .. coqtop:: in reset Inductive radix3 : Set := | x0 : radix3 | x3 : radix3 -> radix3 | x3p1 : radix3 -> radix3 | x3p2 : radix3 -> radix3. We first define a parsing function .. coqtop:: in Definition of_uint_dec (u : Decimal.uint) : option radix3 := let fix f u := match u with | Decimal.Nil => Some x0 | Decimal.D0 u => match f u with Some u => Some (x3 u) | None => None end | Decimal.D1 u => match f u with Some u => Some (x3p1 u) | None => None end | Decimal.D2 u => match f u with Some u => Some (x3p2 u) | None => None end | _ => None end in f (Decimal.rev u). Definition of_uint (u : Number.uint) : option radix3 := match u with Number.UIntDecimal u => of_uint_dec u | Number.UIntHexadecimal _ => None end. and a printing function .. coqtop:: in Definition to_uint_dec (x : radix3) : Decimal.uint := let fix f x := match x with | x0 => Decimal.Nil | x3 x => Decimal.D0 (f x) | x3p1 x => Decimal.D1 (f x) | x3p2 x => Decimal.D2 (f x) end in Decimal.rev (f x). Definition to_uint (x : radix3) : Number.uint := Number.UIntDecimal (to_uint_dec x). before declaring the notation .. coqtop:: in Declare Scope radix3_scope. Open Scope radix3_scope. Number Notation radix3 of_uint to_uint : radix3_scope. We can check the printer .. coqtop:: all Check x3p2 (x3p1 x0). and the parser .. coqtop:: all Set Printing All. Check 120. Digits other than :g:`0`, :g:`1` and :g:`2` are rejected. .. coqtop:: all fail Check 3. .. _example-number-notation-primitive-int: .. example:: Number Notation for primitive integers This shows the use of the primitive integers :n:`PrimInt63.int` as :n:`@qualid__type`. It is the way parsing and printing of primitive integers are actually implemented in `PrimInt63.v`. .. coqtop:: in reset Require Import PrimInt63. Definition parser (x : pos_neg_int63) : option int := match x with Pos p => Some p | Neg _ => None end. Definition printer (x : int_wrapper) : pos_neg_int63 := Pos (int_wrap x). Number Notation int parser printer : uint63_scope. .. _example-number-notation-non-inductive: .. example:: Number Notation for a non-inductive type The following example encodes the terms in the form :g:`sum unit ( ... (sum unit unit) ... )` as the number of units in the term. For instance :g:`sum unit (sum unit unit)` is encoded as :g:`3` while :g:`unit` is :g:`1` and :g:`0` stands for :g:`Empty_set`. The inductive :g:`I` will be used as :n:`@qualid__ind`. .. coqtop:: in reset Inductive I := Iempty : I | Iunit : I | Isum : I -> I -> I. We then define :n:`@qualid__parse` and :n:`@qualid__print` .. coqtop:: in Definition of_uint (x : Number.uint) : I := let fix f n := match n with | O => Iempty | S O => Iunit | S n => Isum Iunit (f n) end in f (Nat.of_num_uint x). Definition to_uint (x : I) : Number.uint := let fix f i := match i with | Iempty => O | Iunit => 1 | Isum i1 i2 => f i1 + f i2 end in Nat.to_num_uint (f x). Inductive sum (A : Set) (B : Set) : Set := pair : A -> B -> sum A B. the number notation itself .. coqtop:: in Notation nSet := Set (only parsing). Number Notation nSet of_uint to_uint (via I mapping [Empty_set => Iempty, unit => Iunit, sum => Isum]) : type_scope. and check the printer .. coqtop:: all Local Open Scope type_scope. Check sum unit (sum unit unit). and the parser .. coqtop:: all Set Printing All. Check 3. .. _example-number-notation-implicit-args: .. example:: Number Notation with implicit arguments The following example parses and prints natural numbers between :g:`0` and :g:`n-1` as terms of type :g:`Fin.t n`. .. coqtop:: all reset Require Import Vector. Print Fin.t. Note the implicit arguments of :g:`Fin.F1` and :g:`Fin.FS`, which won't appear in the corresponding inductive type. .. coqtop:: in Inductive I := I1 : I | IS : I -> I. Definition of_uint (x : Number.uint) : I := let fix f n := match n with O => I1 | S n => IS (f n) end in f (Nat.of_num_uint x). Definition to_uint (x : I) : Number.uint := let fix f i := match i with I1 => O | IS n => S (f n) end in Nat.to_num_uint (f x). Declare Scope fin_scope. Delimit Scope fin_scope with fin. Local Open Scope fin_scope. Number Notation Fin.t of_uint to_uint (via I mapping [[Fin.F1] => I1, [Fin.FS] => IS]) : fin_scope. Now :g:`2` is parsed as :g:`Fin.FS (Fin.FS Fin.F1)`, that is :g:`@Fin.FS _ (@Fin.FS _ (@Fin.F1 _))`. .. coqtop:: all Check 2. which can be of type :g:`Fin.t 3` (numbers :g:`0`, :g:`1` and :g:`2`) .. coqtop:: all Check 2 : Fin.t 3. but cannot be of type :g:`Fin.t 2` (only :g:`0` and :g:`1`) .. coqtop:: all fail Check 2 : Fin.t 2. .. _example-string-notation-parameterized-inductive: .. example:: String Notation with a parameterized inductive type The parameter :g:`Byte.byte` for the parameterized inductive type :g:`list` is given through an :ref:`abbreviation `. .. coqtop:: in reset Notation string := (list Byte.byte) (only parsing). Definition id_string := @id string. String Notation string id_string id_string : list_scope. .. coqtop:: all Check "abc"%list. .. _TacticNotation: Tactic Notations ----------------- Tactic notations allow customizing the syntax of tactics. .. todo move to the Ltac chapter .. todo to discuss after moving to the ltac chapter: any words of wisdom on when to use tactic notation vs ltac? can you run into problems if you shadow another tactic or tactic notation? If so, how to avoid ambiguity? .. cmd:: Tactic Notation {? ( at level @natural ) } {+ @ltac_production_item } := @ltac_expr .. insertprodn ltac_production_item ltac_production_item .. prodn:: ltac_production_item ::= @string | @ident {? ( @ident {? , @string } ) } Defines a *tactic notation*, which extends the parsing and pretty-printing of tactics. This command supports the :attr:`local` attribute, which limits the notation to the current module. :token:`natural` The parsing precedence to assign to the notation. This information is particularly relevant for notations for tacticals. Levels can be in the range 0 .. 5 (default is 5). :n:`{+ @ltac_production_item }` The notation syntax. Notations for simple tactics should begin with a :token:`string`. Note that `Tactic Notation foo := idtac` is not valid; it should be `Tactic Notation "foo" := idtac`. .. todo: "Tactic Notation constr := idtac" gives a nice message, would be good to show that message for the "foo" example above. :token:`string` represents a literal value in the notation :n:`@ident` is the name of a grammar nonterminal listed in the table below. In a few cases, to maintain backward compatibility, the name differs from the nonterminal name used elsewhere in the documentation. :n:`( @ident__parm {? , @string__s } )` :n:`@ident__parm` is the parameter name associated with :n:`@ident`. The :n:`@string__s` is the separator string to use when :n:`@ident` specifies a list with separators (i.e. :n:`@ident` ends with `_list_sep`). :n:`@ltac_expr` The tactic expression to substitute for the notation. :n:`@ident__parm` tokens appearing in :n:`@ltac_expr` are substituted with the associated nonterminal value. For example, the following command defines a notation with a single parameter `x`. .. coqtop:: in Tactic Notation "destruct_with_eqn" constr(x) := destruct x eqn:?. For a complex example, examine the 16 `Tactic Notation "setoid_replace"`\s defined in :file:`$COQLIB/theories/Classes/SetoidTactics.v`, which are designed to accept any subset of 4 optional parameters. The nonterminals that can specified in the tactic notation are: .. todo uconstr represents a type with holes. At the moment uconstr doesn't appear in the documented grammar. Maybe worth ressurecting with a better name, maybe "open_term"? see https://github.com/coq/coq/pull/11718#discussion_r413721234 .. todo 'open_constr' appears to be another possible value based on the the message from "Tactic Notation open_constr := idtac". Also (at least) "ref", "string", "preident", "int" and "ssrpatternarg". (from reading .v files). Looks like any string passed to "make0" in the code is valid. But do we want to support all these? @JasonGross's opinion here: https://github.com/coq/coq/pull/11718#discussion_r415387421 .. list-table:: :header-rows: 1 * - Specified :token:`ident` - Parsed as - Interpreted as - as in tactic * - ``ident`` - :token:`ident` - a user-given name - :tacn:`intro` * - ``simple_intropattern`` - :token:`simple_intropattern` - an introduction pattern - :tacn:`assert` `as` * - ``hyp`` - :token:`ident` - a hypothesis defined in context - :tacn:`clear` * - ``reference`` - :token:`qualid` - a qualified identifier - name of an |Ltac|-defined tactic * - ``smart_global`` - :token:`reference` - a global reference of term - :tacn:`unfold`, :tacn:`with_strategy` * - ``constr`` - :token:`one_term` - a term - :tacn:`exact` * - ``uconstr`` - :token:`one_term` - an untyped term - :tacn:`refine` * - ``integer`` - :token:`integer` - an integer - * - ``int_or_var`` - :token:`int_or_var` - an integer - :tacn:`do` * - ``strategy_level`` - :token:`strategy_level` - a strategy level - * - ``strategy_level_or_var`` - :token:`strategy_level_or_var` - a strategy level - :tacn:`with_strategy` * - ``tactic`` - :token:`ltac_expr` - a tactic - * - ``tactic``\ *n* (*n* in 0..5) - :token:`ltac_expr`\ *n* - a tactic at level *n* - * - *entry*\ ``_list`` - :n:`{* entry }` - a list of how *entry* is interpreted - * - ``ne_``\ *entry*\ ``_list`` - :n:`{+ entry }` - a list of how *entry* is interpreted - * - *entry*\ ``_list_sep`` - :n:`{*s entry }` - a list of how *entry* is interpreted - * - ``ne_``\ *entry*\ ``_list_sep`` - :n:`{+s entry }` - a list of how *entry* is interpreted - .. todo: notation doesn't support italics .. note:: In order to be bound in tactic definitions, each syntactic entry for argument type must include the case of a simple |Ltac| identifier as part of what it parses. This is naturally the case for ``ident``, ``simple_intropattern``, ``reference``, ``constr``, ... but not for ``integer`` nor for ``strategy_level``. This is the reason for introducing special entries ``int_or_var`` and ``strategy_level_or_var`` which evaluate to integers or strategy levels only, respectively, but which syntactically includes identifiers in order to be usable in tactic definitions. .. note:: The *entry*\ ``_list*`` and ``ne_``\ *entry*\ ``_list*`` entries can be used in primitive tactics or in other notations at places where a list of the underlying entry can be used: entry is either ``constr``, ``hyp``, ``integer``, ``reference``, ``strategy_level``, ``strategy_level_or_var``, or ``int_or_var``. .. rubric:: Footnotes .. [#and_or_levels] which are the levels effectively chosen in the current implementation of Coq .. [#no_associativity] Coq accepts notations declared as nonassociative but the parser on which Coq is built, namely Camlp5, currently does not implement ``no associativity`` and replaces it with ``left associativity``; hence it is the same for Coq: ``no associativity`` is in fact ``left associativity`` for the purposes of parsing coq-8.15.0/doc/sphinx/using/000077500000000000000000000000001417001151100155615ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/using/libraries/000077500000000000000000000000001417001151100175355ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/using/libraries/funind.rst000066400000000000000000000325271417001151100215630ustar00rootroot00000000000000Functional induction ==================== .. _advanced-recursive-functions: Advanced recursive functions ---------------------------- The following command is available when the ``FunInd`` library has been loaded via ``Require Import FunInd``: .. cmd:: Function @fix_definition {* with @fix_definition } This command is a generalization of :cmd:`Fixpoint`. It is a wrapper for several ways of defining a function *and* other useful related objects, namely: an induction principle that reflects the recursive structure of the function (see :tacn:`functional induction`) and its fixpoint equality. This defines a function similar to those defined by :cmd:`Fixpoint`. As in :cmd:`Fixpoint`, the decreasing argument must be given (unless the function is not recursive), but it might not necessarily be *structurally* decreasing. Use the :n:`@fixannot` clause to name the decreasing argument *and* to describe which kind of decreasing criteria to use to ensure termination of recursive calls. :cmd:`Function` also supports the :n:`with` clause to create mutually recursive definitions, however this feature is limited to structurally recursive functions (i.e. when :n:`@fixannot` is a :n:`struct` clause). See :tacn:`functional induction` and :cmd:`Functional Scheme` for how to use the induction principle to reason easily about the function. The form of the :n:`@fixannot` clause determines which definition mechanism :cmd:`Function` uses. (Note that references to :n:`ident` below refer to the name of the function being defined.): * If :n:`@fixannot` is not specified, :cmd:`Function` defines the nonrecursive function :token:`ident` as if it was declared with :cmd:`Definition`. In addition, the following are defined: + :token:`ident`\ ``_rect``, :token:`ident`\ ``_rec`` and :token:`ident`\ ``_ind``, which reflect the pattern matching structure of :token:`term` (see :cmd:`Inductive`); + The inductive :n:`R_@ident` corresponding to the graph of :token:`ident` (silently); + :token:`ident`\ ``_complete`` and :token:`ident`\ ``_correct`` which are inversion information linking the function and its graph. * If :n:`{ struct ... }` is specified, :cmd:`Function` defines the structural recursive function :token:`ident` as if it was declared with :cmd:`Fixpoint`. In addition, the following are defined: + The same objects as above; + The fixpoint equation of :token:`ident`: :n:`@ident`\ ``_equation``. * If :n:`{ measure ... }` or :n:`{ wf ... }` are specified, :cmd:`Function` defines a recursive function by well-founded recursion. The module ``Recdef`` of the standard library must be loaded for this feature. + :n:`{measure @one_term__1 {? @ident } {? @one_term__2 } }`\: where :n:`@ident` is the decreasing argument and :n:`@one_term__1` is a function from the type of :n:`@ident` to :g:`nat` for which the decreasing argument decreases (for the :g:`lt` order on :g:`nat`) for each recursive call of the function. The parameters of the function are bound in :n:`@one_term__1`. + :n:`{wf @one_term @ident }`\: where :n:`@ident` is the decreasing argument and :n:`@one_term` is an ordering relation on the type of :n:`@ident` (i.e. of type `T`\ :math:`_{\sf ident}` → `T`\ :math:`_{\sf ident}` → ``Prop``) for which the decreasing argument decreases for each recursive call of the function. The order must be well-founded. The parameters of the function are bound in :n:`@one_term`. If the clause is ``measure`` or ``wf``, the user is left with some proof obligations that will be used to define the function. These proofs are: proofs that each recursive call is actually decreasing with respect to the given criteria, and (if the criteria is `wf`) a proof that the ordering relation is well-founded. Once proof obligations are discharged, the following objects are defined: + The same objects as with the ``struct`` clause; + The lemma :n:`@ident`\ ``_tcc`` which collects all proof obligations in one property; + The lemmas :n:`@ident`\ ``_terminate`` and :n:`@ident`\ ``_F`` which will be inlined during extraction of :n:`@ident`. The way this recursive function is defined is the subject of several papers by Yves Bertot and Antonia Balaa on the one hand, and Gilles Barthe, Julien Forest, David Pichardie, and Vlad Rusu on the other hand. .. note:: To obtain the right principle, it is better to put rigid parameters of the function as first arguments. For example it is better to define plus like this: .. coqtop:: reset none Require Import FunInd. .. coqtop:: all Function plus (m n : nat) {struct n} : nat := match n with | 0 => m | S p => S (plus m p) end. than like this: .. coqtop:: reset none Require Import FunInd. .. coqtop:: all Function plus (n m : nat) {struct n} : nat := match n with | 0 => m | S p => S (plus p m) end. *Limitations* :token:`term` must be built as a *pure pattern matching tree* (:g:`match … with`) with applications only *at the end* of each branch. :cmd:`Function` does not support partial application of the function being defined. Thus, the following example cannot be accepted due to the presence of partial application of :g:`wrong` in the body of :g:`wrong`: .. coqtop:: none Require List. Import List.ListNotations. .. coqtop:: all fail Function wrong (C:nat) : nat := List.hd 0 (List.map wrong (C::nil)). For now, dependent cases are not treated for non-structurally terminating functions. .. exn:: The recursive argument must be specified. :undocumented: .. exn:: No argument name @ident. :undocumented: .. exn:: Cannot use mutual definition with well-founded recursion or measure. :undocumented: .. warn:: Cannot define graph for @ident. The generation of the graph relation (:n:`R_@ident`) used to compute the induction scheme of ident raised a typing error. Only :token:`ident` is defined; the induction scheme will not be generated. This error happens generally when: - the definition uses pattern matching on dependent types, which :cmd:`Function` cannot deal with yet. - the definition is not a *pattern matching tree* as explained above. .. warn:: Cannot define principle(s) for @ident. The generation of the graph relation (:n:`R_@ident`) succeeded but the induction principle could not be built. Only :token:`ident` is defined. Please report. .. warn:: Cannot build functional inversion principle. :tacn:`functional inversion` will not be available for the function. Tactics ------- .. tacn:: functional induction @term {? using @one_term {? with @bindings } } {? as @simple_intropattern } Performs case analysis and induction following the definition of a function :token:`qualid`, which must be fully applied to its arguments as part of :token:`term`. It uses a principle generated by :cmd:`Function` or :cmd:`Functional Scheme`. Note that this tactic is only available after a ``Require Import FunInd``. See the :cmd:`Function` command. :n:`using @one_term` Specifies the induction principle (aka elimination scheme). :n:`with @bindings` Specifies the arguments of the induction principle. :n:`as @simple_intropattern` Provides names for the introduced variables. .. example:: .. coqtop:: reset all Require Import FunInd. Functional Scheme minus_ind := Induction for minus Sort Prop. Check minus_ind. Lemma le_minus (n m:nat) : n - m <= n. functional induction (minus n m) using minus_ind; simpl; auto. Qed. .. note:: :n:`functional induction (f x1 x2 x3)` is actually a wrapper for :n:`induction x1, x2, x3, (f x1 x2 x3) using @qualid` followed by a cleaning phase, where :n:`@qualid` is the induction principle registered for :g:`f` (by the :cmd:`Function` or :cmd:`Functional Scheme` command) corresponding to the sort of the goal. Therefore :tacn:`functional induction` may fail if the induction scheme :n:`@qualid` is not defined. .. note:: There is a difference between obtaining an induction scheme for a function by using :cmd:`Function` and by using :cmd:`Functional Scheme` after a normal definition using :cmd:`Fixpoint` or :cmd:`Definition`. .. exn:: Cannot find induction information on @qualid. :undocumented: .. exn:: Not the right number of induction arguments. :undocumented: .. tacn:: functional inversion {| @ident | @natural } {? @qualid } Performs inversion on hypothesis :n:`@ident` of the form :n:`@qualid {+ @term} = @term` or :n:`@term = @qualid {+ @term}` when :n:`@qualid` is defined using :cmd:`Function`. Note that this tactic is only available after a ``Require Import FunInd``. :n:`@natural` Does the same thing as :n:`intros until @natural` followed by :n:`functional inversion @ident` where :token:`ident` is the identifier for the last introduced hypothesis. :n:`@qualid` If the hypothesis :token:`ident` (or :token:`natural`) has a type of the form :n:`@qualid__1 {+ @term__i } = @qualid__2 {+ @term__j }` where :n:`@qualid__1` and :n:`@qualid__2` are valid candidates to functional inversion, this variant allows choosing which :token:`qualid` is inverted. .. exn:: Hypothesis @ident must contain at least one Function. :undocumented: .. exn:: Cannot find inversion information for hypothesis @ident. This error may be raised when some inversion lemma failed to be generated by Function. .. _functional-scheme: Generation of induction principles with ``Functional`` ``Scheme`` ----------------------------------------------------------------- .. cmd:: Functional Scheme @func_scheme_def {* with @func_scheme_def } .. insertprodn func_scheme_def func_scheme_def .. prodn:: func_scheme_def ::= @ident := Induction for @qualid Sort @sort_family An experimental high-level tool that automatically generates induction principles corresponding to functions that may be mutually recursive. The command generates an induction principle named :n:`@ident` for each given function named :n:`@qualid`. The :n:`@qualid`\s must be given in the same order as when they were defined. Note the command must be made available via :cmd:`Require Import` ``FunInd``. .. warning:: There is a difference between induction schemes generated by the command :cmd:`Functional Scheme` and these generated by the :cmd:`Function`. Indeed, :cmd:`Function` generally produces smaller principles that are closer to how a user would implement them. See :ref:`advanced-recursive-functions` for details. .. example:: Induction scheme for div2. We define the function div2 as follows: .. coqtop:: all Require Import FunInd. Require Import Arith. Fixpoint div2 (n:nat) : nat := match n with | O => 0 | S O => 0 | S (S n') => S (div2 n') end. The definition of a principle of induction corresponding to the recursive structure of `div2` is defined by the command: .. coqtop:: all Functional Scheme div2_ind := Induction for div2 Sort Prop. You may now look at the type of div2_ind: .. coqtop:: all Check div2_ind. We can now prove the following lemma using this principle: .. coqtop:: all Lemma div2_le' : forall n:nat, div2 n <= n. intro n. pattern n, (div2 n). apply div2_ind; intros. auto with arith. auto with arith. simpl; auto with arith. Qed. We can use directly the functional induction (:tacn:`functional induction`) tactic instead of the pattern/apply trick: .. coqtop:: all Reset div2_le'. Lemma div2_le : forall n:nat, div2 n <= n. intro n. functional induction (div2 n). auto with arith. auto with arith. auto with arith. Qed. .. example:: Induction scheme for tree_size. We define trees by the following mutual inductive type: .. original LaTeX had "Variable" instead of "Axiom", which generates an ugly warning .. coqtop:: reset all Axiom A : Set. Inductive tree : Set := node : A -> forest -> tree with forest : Set := | empty : forest | cons : tree -> forest -> forest. We define the function tree_size that computes the size of a tree or a forest. Note that we use ``Function`` which generally produces better principles. .. coqtop:: all Require Import FunInd. Function tree_size (t:tree) : nat := match t with | node A f => S (forest_size f) end with forest_size (f:forest) : nat := match f with | empty => 0 | cons t f' => (tree_size t + forest_size f') end. Notice that the induction principles ``tree_size_ind`` and ``forest_size_ind`` generated by ``Function`` are not mutual. .. coqtop:: all Check tree_size_ind. Mutual induction principles following the recursive structure of ``tree_size`` and ``forest_size`` can be generated by the following command: .. coqtop:: all Functional Scheme tree_size_ind2 := Induction for tree_size Sort Prop with forest_size_ind2 := Induction for forest_size Sort Prop. You may now look at the type of `tree_size_ind2`: .. coqtop:: all Check tree_size_ind2. coq-8.15.0/doc/sphinx/using/libraries/index.rst000066400000000000000000000014441417001151100214010ustar00rootroot00000000000000.. _libraries: ===================== Libraries and plugins ===================== Coq is distributed with a standard library and a set of internal plugins (most of which provide tactics that have already been presented in :ref:`writing-proofs`). This chapter presents this standard library and some of these internal plugins which provide features that are not tactics. In addition, Coq has a rich ecosystem of external libraries and plugins. These libraries and plugins can be browsed online through the `Coq Package Index `_ and installed with the `opam package manager `_. .. toctree:: :maxdepth: 1 ../../language/coq-library ../../addendum/extraction ../../addendum/miscellaneous-extensions funind writing coq-8.15.0/doc/sphinx/using/libraries/writing.rst000066400000000000000000000051521417001151100217550ustar00rootroot00000000000000Writing Coq libraries and plugins =================================== This section presents the part of the Coq language that is useful only to library and plugin authors. A tutorial for writing Coq plugins is available in the Coq repository in `doc/plugin_tutorial `_. Deprecating library objects or tactics -------------------------------------- You may use the following :term:`attribute` to deprecate a notation or tactic. When renaming a definition or theorem, you can introduce a deprecated compatibility alias using :cmd:`Notation (abbreviation)` (see :ref:`the example below `). .. attr:: deprecated ( {? since = @string , } {? note = @string } ) :name: deprecated At least one of :n:`since` or :n:`note` must be present. If both are present, either one may appear first and they must be separated by a comma. This attribute is supported by the following commands: :cmd:`Ltac`, :cmd:`Tactic Notation`, :cmd:`Notation`, :cmd:`Infix`, :cmd:`Ltac2`, :cmd:`Ltac2 Notation`, :cmd:`Ltac2 external`. It can trigger the following warnings: .. warn:: Tactic @qualid is deprecated since @string__since. @string__note. Tactic Notation @qualid is deprecated since @string__since. @string__note. Notation @string is deprecated since @string__since. @string__note. Ltac2 definition @qualid is deprecated since @string__since. @string__note. Ltac2 alias @qualid is deprecated since @string__since. @string__note. Ltac2 notation {+ @ltac2_scope } is deprecated since @string__since. @string__note. :n:`@qualid` or :n:`@string` is the notation, :n:`@string__since` is the version number, :n:`@string__note` is the note (usually explains the replacement). .. example:: Deprecating a tactic. .. coqtop:: all abort warn #[deprecated(since="0.9", note="Use idtac instead.")] Ltac foo := idtac. Goal True. Proof. now foo. .. _compatibility-alias: .. example:: Introducing a compatibility alias Let's say your library initially contained: .. coqtop:: in Definition foo x := S x. and you want to rename `foo` into `bar`, but you want to avoid breaking your users' code without advanced notice. To do so, replace the previous code by the following: .. coqtop:: in reset Definition bar x := S x. #[deprecated(since="1.2", note="Use bar instead.")] Notation foo := bar (only parsing). Then, the following code still works, but emits a warning: .. coqtop:: all warn Check (foo 0). coq-8.15.0/doc/sphinx/using/tools/000077500000000000000000000000001417001151100167215ustar00rootroot00000000000000coq-8.15.0/doc/sphinx/using/tools/coqdoc.rst000066400000000000000000000360751417001151100207360ustar00rootroot00000000000000.. index:: coqdoc .. _coqdoc: Documenting Coq files with coqdoc ----------------------------------- coqdoc is a documentation tool for the proof assistant Coq, similar to ``javadoc`` or ``ocamldoc``. The task of coqdoc is #. to produce a nice |Latex| and/or HTML document from Coq source files, readable for a human and not only for the proof assistant; #. to help the user navigate his own (or third-party) sources. Principles ~~~~~~~~~~ Documentation is inserted into Coq files as *special comments*. Thus your files will compile as usual, whether you use coqdoc or not. coqdoc presupposes that the given Coq files are well-formed (at least lexically). Documentation starts with ``(**``, followed by a space, and ends with ``*)``. The documentation format is inspired by Todd A. Coram’s *Almost Free Text (AFT)* tool: it is mainly ``ASCII`` text with some syntax-light controls, described below. coqdoc is robust: it shouldn’t fail, whatever the input is. But remember: “garbage in, garbage out”. Coq material inside documentation. ++++++++++++++++++++++++++++++++++++ Coq material is quoted between the delimiters ``[`` and ``]``. Square brackets may be nested, the inner ones being understood as being part of the quoted code (thus you can quote a term like ``let id := fun [T : Type] (x : t) => x in id 0`` by writing ``[let id := fun [T : Type] (x : t) => x in id 0]``). Inside quotations, the code is pretty-printed the same way as in code parts. Preformatted vernacular is enclosed by ``[[`` and ``]]``. The former must be followed by a newline and the latter must follow a newline. Pretty-printing. ++++++++++++++++ coqdoc uses different faces for identifiers and keywords. The pretty- printing of Coq tokens (identifiers or symbols) can be controlled using one of the following commands: :: (** printing *token* %...LATEX...% #...html...# *) or :: (** printing *token* $...LATEX math...$ #...html...# *) It gives the |Latex| and HTML texts to be produced for the given Coq token. Either the |Latex| or the HTML rule may be omitted, causing the default pretty-printing to be used for this token. The printing for one token can be removed with :: (** remove printing *token* *) Initially, the pretty-printing table contains the following mapping: ===== === ==== ===== === ==== ==== === `->` → `<-` ← `*` × `<=` ≤ `>=` ≥ `=>` ⇒ `<>` ≠ `<->` ↔ `|-` ⊢ `\\/` ∨ `/\\` ∧ `~` ¬ ===== === ==== ===== === ==== ==== === Any of these can be overwritten or suppressed using the printing commands. .. note:: The recognition of tokens is done by a (``ocaml``) lex automaton and thus applies the longest-match rule. For instance, `->~` is recognized as a single token, where Coq sees two tokens. It is the responsibility of the user to insert space between tokens *or* to give pretty-printing rules for the possible combinations, e.g. :: (** printing ->~ %\ensuremath{\rightarrow\lnot}% *) Sections ++++++++ Sections are introduced by 1 to 4 asterisks at the beginning of a line followed by a space and the title of the section. One asterisk is a section, two a subsection, etc. .. example:: :: (** * Well-founded relations In this section, we introduce... *) Lists. ++++++ List items are introduced by a leading dash. coqdoc uses whitespace to determine the depth of a new list item and which text belongs in which list items. A list ends when a line of text starts at or before the level of indenting of the list’s dash. A list item’s dash must always be the first non-space character on its line (so, in particular, a list can not begin on the first line of a comment - start it on the second line instead). .. example:: :: We go by induction on [n]: - If [n] is 0... - If [n] is [S n'] we require... two paragraphs of reasoning, and two subcases: - In the first case... - In the second case... So the theorem holds. Rules. ++++++ More than 4 leading dashes produce a horizontal rule. Emphasis. +++++++++ Text can be italicized by enclosing it in underscores. A non-identifier character must precede the leading underscore and follow the trailing underscore, so that uses of underscores in names aren’t mistaken for emphasis. Usually, these are spaces or punctuation. :: This sentence contains some _emphasized text_. Escaping to |Latex| and HTML. +++++++++++++++++++++++++++++++ Pure |Latex| or HTML material can be inserted using the following escape sequences: + ``$...LATEX stuff...$`` inserts some |Latex| material in math mode. Simply discarded in HTML output. + ``%...LATEX stuff...%`` inserts some |Latex| material. Simply discarded in HTML output. + ``#...HTML stuff...#`` inserts some HTML material. Simply discarded in |Latex| output. .. note:: to simply output the characters ``$``, ``%`` and ``#`` and escaping their escaping role, these characters must be doubled. Verbatim ++++++++ Verbatim material is introduced by a leading ``<<`` and closed by ``>>`` at the beginning of a line. .. example:: :: Here is the corresponding caml code: << let rec fact n = if n <= 1 then 1 else n * fact (n-1) >> Verbatim material on a single line is also possible (assuming that ``>>`` is not part of the text to be presented as verbatim). .. example:: :: Here is the corresponding caml expression: << fact (n-1) >> Hyperlinks ++++++++++ Hyperlinks can be inserted into the HTML output, so that any identifier is linked to the place of its definition. ``coqc file.v`` automatically dumps localization information in ``file.glob`` or appends it to a file specified using the option ``--dump-glob file``. Take care of erasing this global file, if any, when starting the whole compilation process. Then invoke coqdoc or ``coqdoc --glob-from file`` to tell coqdoc to look for name resolutions in the file ``file`` (it will look in ``file.glob`` by default). Identifiers from the Coq standard library are linked to the Coq website ``_. This behavior can be changed using command line options ``--no-externals`` and ``--coqlib_url``; see below. .. _coqdoc-hide-show: Hiding / Showing parts of the source ++++++++++++++++++++++++++++++++++++ Some parts of the source can be hidden using command line options ``-g`` and ``-l`` (see below), or using such comments: :: (* begin hide *) *some Coq material* (* end hide *) Conversely, some parts of the source which would be hidden can be shown using such comments: :: (* begin show *) *some Coq material* (* end show *) The latter cannot be used around some inner parts of a proof, but can be used around a whole proof. Lastly, it is possible to adopt a middle-ground approach when the desired output is HTML, where a given snippet of Coq material is hidden by default, but can be made visible with user interaction. :: (* begin details *) *some Coq material* (* end details *) There is also an alternative syntax available. :: (* begin details : Some summary describing the snippet *) *some Coq material* (* end details *) Usage ~~~~~ coqdoc is invoked on a shell command line as follows: ``coqdoc ``. Any command line argument which is not an option is considered to be a file (even if it starts with a ``-``). Coq files are identified by the suffixes ``.v`` and ``.g`` and |Latex| files by the suffix ``.tex``. :HTML output: This is the default output format. One HTML file is created for each Coq file given on the command line, together with a file ``index.html`` (unless ``option-no-index is passed``). The HTML pages use a style sheet named ``style.css``. Such a file is distributed with coqdoc. :|Latex| output: A single |Latex| file is created, on standard output. It can be redirected to a file using the option ``-o``. The order of files on the command line is kept in the final document. |Latex| files given on the command line are copied ‘as is’ in the final document . DVI and PostScript can be produced directly with the options ``-dvi`` and ``-ps`` respectively. :TEXmacs output: To translate the input files to TEXmacs format, to be used by the TEXmacs Coq interface. Command line options ++++++++++++++++++++ **Overall options** :--HTML: Select a HTML output. :--|Latex|: Select a |Latex| output. :--dvi: Select a DVI output. :--ps: Select a PostScript output. :--texmacs: Select a TEXmacs output. :--stdout: Write output to stdout. :-o file, --output file: Redirect the output into the file ‘file’ (meaningless with ``-html``). :-d dir, --directory dir: Output files into directory ‘dir’ instead of the current directory (option ``-d`` does not change the filename specified with the option ``-o``, if any). :--body-only: Suppress the header and trailer of the final document. Thus, you can insert the resulting document into a larger one. :-p string, --preamble string: Insert some material in the |Latex| preamble, right before ``\begin{document}`` (meaningless with ``-html``). :--vernac-file file,--tex-file file: Considers the file ‘file’ respectively as a ``.v`` (or ``.g``) file or a ``.tex`` file. :--files-from file: Read filenames to be processed from the file ‘file’ as if they were given on the command line. Useful for program sources split up into several directories. :-q, --quiet: Be quiet. Do not print anything except errors. :-h, --help: Give a short summary of the options and exit. :-v, --version: Print the version and exit. **Index options** The default behavior is to build an index, for the HTML output only, into ``index.html``. :--no-index: Do not output the index. :--multi-index: Generate one page for each category and each letter in the index, together with a top page ``index.html``. :--index string: Make the filename of the index string instead of “index”. Useful since “index.html” is special. **Table of contents option** :-toc, --table-of-contents: Insert a table of contents. For a |Latex| output, it inserts a ``\tableofcontents`` at the beginning of the document. For a HTML output, it builds a table of contents into ``toc.html``. :--toc-depth int: Only include headers up to depth ``int`` in the table of contents. **Hyperlink options** :--glob-from file: Make references using Coq globalizations from file file. (Such globalizations are obtained with Coq option ``-dump-glob``). :--no-externals: Do not insert links to the Coq standard library. :--external url coqdir: Use given URL for linking references whose name starts with prefix ``coqdir``. :--coqlib_url url: Set base URL for the Coq standard library (default is ``_). This is equivalent to ``--external url Coq``. :-R dir coqdir: Recursively map physical directory dir to Coq logical directory ``coqdir`` (similarly to Coq option ``-R``). :-Q dir coqdir: Map physical directory dir to Coq logical directory ``coqdir`` (similarly to Coq option ``-Q``). .. note:: options ``-R`` and ``-Q`` only have effect on the files *following* them on the command line, so you will probably need to put this option first. **Title options** :-s , --short: Do not insert titles for the files. The default behavior is to insert a title like “Library Foo” for each file. :--lib-name string: Print “string Foo” instead of “Library Foo” in titles. For example “Chapter” and “Module” are reasonable choices. :--no-lib-name: Print just “Foo” instead of “Library Foo” in titles. :--lib-subtitles: Look for library subtitles. When enabled, the beginning of each file is checked for a comment of the form: :: (** * ModuleName : text *) where ``ModuleName`` must be the name of the file. If it is present, the text is used as a subtitle for the module in appropriate places. :-t string, --title string: Set the document title. **Contents options** :-g, --gallina: Do not print proofs. :-l, --light: Light mode. Suppress proofs (as with ``-g``) and the following commands: + [Recursive] Tactic Definition + Hint / Hints + Require + Transparent / Opaque + Implicit Argument / Implicits + Section / Variable / Hypothesis / End The behavior of options ``-g`` and ``-l`` can be locally overridden using the ``(* begin show *) … (* end show *)`` environment (see above). There are a few options that control the parsing of comments: :--parse-comments: Parse regular comments delimited by ``(*`` and ``*)`` as well. They are typeset inline. :--plain-comments: Do not interpret comments, simply copy them as plain-text. :--interpolate: Use the globalization information to typeset identifiers appearing in Coq escapings inside comments. **Language options** The default behavior is to assume ASCII 7 bit input files. :-latin1, --latin1: Select ISO-8859-1 input files. It is equivalent to --inputenc latin1 --charset iso-8859-1. :-utf8, --utf8: Set --inputenc utf8x for |Latex| output and--charset utf-8 for HTML output. Also use Unicode replacements for a couple of standard plain ASCII notations such as → for ``->`` and ∀ for ``forall``. |Latex| UTF-8 support can be found at ``_. For the interpretation of Unicode characters by |Latex|, extra packages which coqdoc does not provide by default might be required, such as textgreek for some Greek letters or ``stmaryrd`` for some mathematical symbols. If a Unicode character is missing an interpretation in the utf8x input encoding, add ``\DeclareUnicodeCharacter{code}{LATEX-interpretation}``. Packages and declarations can be added with option ``-p``. :--inputenc string: Give a |Latex| input encoding, as an option to |Latex| package ``inputenc``. :--charset string: Specify the HTML character set, to be inserted in the HTML header. The coqdoc |Latex| style file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In case you choose to produce a document without the default |Latex| preamble (by using option ``--no-preamble``), then you must insert into your own preamble the command :: \usepackage{coqdoc} The package optionally takes the argument ``[color]`` to typeset identifiers with colors (this requires the ``xcolor`` package). Then you may alter the rendering of the document by redefining some macros: :coqdockw, coqdocid, …: The one-argument macros for typesetting keywords and identifiers. Defaults are sans-serif for keywords and italic for identifiers.For example, if you would like a slanted font for keywords, you may insert :: \renewcommand{\coqdockw}[1]{\textsl{#1}} anywhere between ``\usepackage{coqdoc}`` and ``\begin{document}``. :coqdocmodule: One-argument macro for typesetting the title of a ``.v`` file. Default is :: \newcommand{\coqdocmodule}[1]{\section*{Module #1}} and you may redefine it using ``\renewcommand``. coq-8.15.0/doc/sphinx/using/tools/index.rst000066400000000000000000000013151417001151100205620ustar00rootroot00000000000000.. _tools: ================================ Command-line and graphical tools ================================ This chapter presents the command-line tools that users will need to build their Coq project, the documentation of the CoqIDE standalone user interface and the documentation of the parallel proof processing feature that is supported by CoqIDE and several other user interfaces. A list of available user interfaces to interact with Coq is available on the `Coq website `_. .. toctree:: :maxdepth: 1 ../../practical-tools/coq-commands ../../practical-tools/utilities coqdoc ../../practical-tools/coqide ../../addendum/parallel-proof-processing coq-8.15.0/doc/sphinx/zebibliography.html.rst000066400000000000000000000013311417001151100211410ustar00rootroot00000000000000.. There are multiple issues with sphinxcontrib-bibtex that we have to work around: - The list of cited entries is computed right after encountering `.. bibliography`, so the file containing that command has to come last alphabetically: https://sphinxcontrib-bibtex.readthedocs.io/en/latest/usage.html#unresolved-citations-across-documents - `.. bibliography::` puts the bibliography on its own page with its own title in LaTeX, but includes it inline without a title in HTML: https://sphinxcontrib-bibtex.readthedocs.io/en/latest/usage.html#mismatch-between-output-of-html-and-latex-backends .. _bibliography: ============== Bibliography ============== .. bibliography:: biblio.bib :cited: coq-8.15.0/doc/sphinx/zebibliography.latex.rst000066400000000000000000000001471417001151100213160ustar00rootroot00000000000000.. See zebibliography.html.rst for details .. _bibliography: .. bibliography:: biblio.bib :cited: coq-8.15.0/doc/stdlib/000077500000000000000000000000001417001151100144045ustar00rootroot00000000000000coq-8.15.0/doc/stdlib/Library.tex000066400000000000000000000043371417001151100165410ustar00rootroot00000000000000\documentclass[11pt]{report} \usepackage[mathletters]{ucs} \usepackage[utf8x]{inputenc} \usepackage[T1]{fontenc} \usepackage{fullpage} \usepackage{amsfonts} \usepackage{amssymb} \usepackage{url} \usepackage[color]{../../lib/coq-core/tools/coqdoc/coqdoc} \input{../common/version} \input{../common/title} \input{../common/macros} \begin{document} \coverpage{The standard library}% {\ } {This material is distributed under the terms of the GNU Lesser General Public License Version 2.1.} \tableofcontents \newpage % \section*{The \Coq\ standard library} This document is a short description of the \Coq\ standard library. This library comes with the system as a complement of the core library (the {\bf Init} library ; see the Reference Manual for a description of this library). It provides a set of modules directly available through the \verb!Require! command. The standard library is composed of the following subdirectories: \begin{description} \item[Logic] Classical logic and dependent equality \item[Bool] Booleans (basic functions and results) \item[Arith] Basic Peano arithmetic \item[ZArith] Basic integer arithmetic \item[Reals] Classical Real Numbers and Analysis \item[Lists] Monomorphic and polymorphic lists (basic functions and results), Streams (infinite sequences defined with co-inductive types) \item[Sets] Sets (classical, constructive, finite, infinite, power set, etc.) \item[Relations] Relations (definitions and basic results). \item[Sorting] Sorted list (basic definitions and heapsort correctness). \item[Wellfounded] Well-founded relations (basic results). \item[Program] Tactics to deal with dependently-typed programs and their proofs. \item[Classes] Standard type class instances on relations and Coq part of the setoid rewriting tactic. \end{description} Each of these subdirectories contains a set of modules, whose specifications (\gallina{} files) have been roughly, and automatically, pasted in the following pages. There is also a version of this document in HTML format on the WWW, which you can access from the \Coq\ home page at \texttt{http://coq.inria.fr/library}. \input{Library.coqdoc} \end{document} coq-8.15.0/doc/stdlib/dune000066400000000000000000000034061417001151100152650ustar00rootroot00000000000000; This is an ad-hoc rule to ease the migration, it should be handled ; natively by Dune in the future. (rule (targets index-list.html) (deps make-library-index index-list.html.template hidden-files (source_tree %{project_root}/theories) (source_tree %{project_root}/user-contrib)) (action (chdir %{project_root} ; On windows run will fail (bash "doc/stdlib/make-library-index doc/stdlib/index-list.html doc/stdlib/hidden-files")))) (rule (targets html) (alias stdlib-html) (package coq-doc) (deps ; This will be replaced soon by `theories/**/*.v` soon, thanks to rgrinberg (source_tree %{project_root}/theories) (source_tree %{project_root}/user-contrib) (:header %{project_root}/doc/common/styles/html/coqremote/header.html) (:footer %{project_root}/doc/common/styles/html/coqremote/footer.html) ; For .glob files, should be gone when Coq Dune is smarter. (package coq-core) (package coq-stdlib)) (action (progn (run mkdir -p html) (bash "%{bin:coqdoc} -q -d html --with-header %{header} --with-footer %{footer} --multi-index --html -g -coqlib %{project_root} -R %{project_root}/theories Coq -Q %{project_root}/user-contrib/Ltac2 Ltac2 $(find %{project_root}/theories %{project_root}/user-contrib -name *.v)") (run mv html/index.html html/genindex.html) (with-stdout-to _index.html (progn (cat %{header}) (cat index-list.html) (cat %{footer}))) (run cp _index.html html/index.html)))) ; Installable directories are not yet fully supported by Dune. See ; ocaml/dune#1868. Yet, this makes coq-doc.install a valid target to ; generate the whole Coq documentation. And the result under ; _build/install/default/doc/coq-doc looks just right! (install (files (html as html/stdlib)) (section doc) (package coq-doc)) coq-8.15.0/doc/stdlib/hidden-files000066400000000000000000000061001417001151100166570ustar00rootroot00000000000000theories/btauto/Algebra.v theories/btauto/Btauto.v theories/btauto/Reflect.v theories/derive/Derive.v theories/extraction/ExtrHaskellBasic.v theories/extraction/ExtrHaskellNatInt.v theories/extraction/ExtrHaskellNatInteger.v theories/extraction/ExtrHaskellNatNum.v theories/extraction/ExtrHaskellString.v theories/extraction/ExtrHaskellZInt.v theories/extraction/ExtrHaskellZInteger.v theories/extraction/ExtrHaskellZNum.v theories/extraction/ExtrOcamlBasic.v theories/extraction/ExtrOcamlChar.v theories/extraction/ExtrOCamlInt63.v theories/extraction/ExtrOCamlFloats.v theories/extraction/ExtrOCamlPArray.v theories/extraction/ExtrOcamlIntConv.v theories/extraction/ExtrOcamlNatBigInt.v theories/extraction/ExtrOcamlNatInt.v theories/extraction/ExtrOcamlString.v theories/extraction/ExtrOcamlNativeString.v theories/extraction/ExtrOcamlZBigInt.v theories/extraction/ExtrOcamlZInt.v theories/extraction/Extraction.v theories/funind/FunInd.v theories/funind/Recdef.v theories/ltac/Ltac.v theories/micromega/Ztac.v theories/micromega/DeclConstant.v theories/micromega/Env.v theories/micromega/EnvRing.v theories/micromega/Fourier.v theories/micromega/Fourier_util.v theories/micromega/Lia.v theories/micromega/Lqa.v theories/micromega/Lra.v theories/micromega/MExtraction.v theories/micromega/OrderedRing.v theories/micromega/Psatz.v theories/micromega/QMicromega.v theories/micromega/RMicromega.v theories/micromega/Refl.v theories/micromega/RingMicromega.v theories/micromega/Tauto.v theories/micromega/VarMap.v theories/micromega/ZArith_hints.v theories/micromega/ZCoeff.v theories/micromega/ZMicromega.v theories/micromega/ZifyInst.v theories/micromega/ZifyBool.v theories/micromega/ZifyInt63.v theories/micromega/ZifyUint63.v theories/micromega/ZifySint63.v theories/micromega/ZifyNat.v theories/micromega/ZifyN.v theories/micromega/ZifyComparison.v theories/micromega/ZifyClasses.v theories/micromega/ZifyPow.v theories/micromega/Zify.v theories/nsatz/NsatzTactic.v theories/nsatz/Nsatz.v theories/omega/OmegaLemmas.v theories/omega/PreOmega.v theories/quote/Quote.v theories/romega/ROmega.v theories/romega/ReflOmegaCore.v theories/rtauto/Bintree.v theories/rtauto/Rtauto.v theories/setoid_ring/Algebra_syntax.v theories/setoid_ring/ArithRing.v theories/setoid_ring/BinList.v theories/setoid_ring/Cring.v theories/setoid_ring/Field.v theories/setoid_ring/Field_tac.v theories/setoid_ring/Field_theory.v theories/setoid_ring/InitialRing.v theories/setoid_ring/Integral_domain.v theories/setoid_ring/NArithRing.v theories/setoid_ring/Ncring.v theories/setoid_ring/Ncring_initial.v theories/setoid_ring/Ncring_polynom.v theories/setoid_ring/Ncring_tac.v theories/setoid_ring/RealField.v theories/setoid_ring/Ring.v theories/setoid_ring/Ring_base.v theories/setoid_ring/Ring_polynom.v theories/setoid_ring/Ring_tac.v theories/setoid_ring/Ring_theory.v theories/setoid_ring/Rings_Q.v theories/setoid_ring/Rings_R.v theories/setoid_ring/Rings_Z.v theories/setoid_ring/ZArithRing.v theories/ssr/ssrunder.v theories/ssr/ssrsetoid.v theories/Reals/Cauchy/ConstructiveExtra.v theories/Reals/Cauchy/PosExtra.v theories/Reals/Cauchy/QExtra.v coq-8.15.0/doc/stdlib/index-list.html.template000066400000000000000000000512041417001151100211660ustar00rootroot00000000000000

The Coq Standard Library

Here is a short description of the Coq standard library, which is distributed with the system. It provides a set of modules directly available through the Require Import command.

The standard library is composed of the following subdirectories:

Init: The core library (automatically loaded when starting Coq)
theories/Init/Ltac.v theories/Init/Notations.v theories/Init/Datatypes.v theories/Init/Logic.v theories/Init/Logic_Type.v theories/Init/Byte.v theories/Init/Nat.v theories/Init/Decimal.v theories/Init/Hexadecimal.v theories/Init/Number.v theories/Init/Peano.v theories/Init/Specif.v theories/Init/Tactics.v theories/Init/Tauto.v theories/Init/Wf.v (theories/Init/Prelude.v)
Logic: Classical logic, dependent equality, extensionality, choice axioms
theories/Logic/SetIsType.v theories/Logic/StrictProp.v theories/Logic/Classical_Pred_Type.v theories/Logic/Classical_Prop.v (theories/Logic/Classical.v) theories/Logic/ClassicalFacts.v theories/Logic/Decidable.v theories/Logic/Eqdep_dec.v theories/Logic/EqdepFacts.v theories/Logic/Eqdep.v theories/Logic/JMeq.v theories/Logic/ChoiceFacts.v theories/Logic/RelationalChoice.v theories/Logic/ClassicalChoice.v theories/Logic/ClassicalDescription.v theories/Logic/ClassicalEpsilon.v theories/Logic/ClassicalUniqueChoice.v theories/Logic/SetoidChoice.v theories/Logic/Berardi.v theories/Logic/Diaconescu.v theories/Logic/Hurkens.v theories/Logic/ProofIrrelevance.v theories/Logic/ProofIrrelevanceFacts.v theories/Logic/ConstructiveEpsilon.v theories/Logic/Description.v theories/Logic/Epsilon.v theories/Logic/IndefiniteDescription.v theories/Logic/PropExtensionality.v theories/Logic/PropExtensionalityFacts.v theories/Logic/FunctionalExtensionality.v theories/Logic/ExtensionalFunctionRepresentative.v theories/Logic/ExtensionalityFacts.v theories/Logic/WeakFan.v theories/Logic/WKL.v theories/Logic/FinFun.v theories/Logic/PropFacts.v theories/Logic/HLevels.v theories/Logic/Adjointification.v
Structures: Algebraic structures (types with equality, with order, ...). DecidableType* and OrderedType* are there only for compatibility.
theories/Structures/Equalities.v theories/Structures/EqualitiesFacts.v theories/Structures/Orders.v theories/Structures/OrdersTac.v theories/Structures/OrdersAlt.v theories/Structures/OrdersEx.v theories/Structures/OrdersFacts.v theories/Structures/OrdersLists.v theories/Structures/GenericMinMax.v theories/Structures/DecidableType.v theories/Structures/DecidableTypeEx.v theories/Structures/OrderedType.v theories/Structures/OrderedTypeAlt.v theories/Structures/OrderedTypeEx.v
Bool: Booleans (basic functions and results)
theories/Bool/Bool.v theories/Bool/BoolEq.v theories/Bool/BoolOrder.v theories/Bool/DecBool.v theories/Bool/IfProp.v theories/Bool/Sumbool.v theories/Bool/Zerob.v theories/Bool/Bvector.v
Arith: Basic Peano arithmetic
theories/Arith/PeanoNat.v theories/Arith/Le.v theories/Arith/Lt.v theories/Arith/Plus.v theories/Arith/Minus.v theories/Arith/Mult.v theories/Arith/Gt.v theories/Arith/Between.v theories/Arith/Peano_dec.v theories/Arith/Compare_dec.v (theories/Arith/Arith_base.v) (theories/Arith/Arith.v) theories/Arith/Min.v theories/Arith/Max.v theories/Arith/Compare.v theories/Arith/Div2.v theories/Arith/EqNat.v theories/Arith/Euclid.v theories/Arith/Even.v theories/Arith/Bool_nat.v theories/Arith/Factorial.v theories/Arith/Wf_nat.v theories/Arith/Cantor.v
PArith: Binary positive integers
theories/PArith/BinPosDef.v theories/PArith/BinPos.v theories/PArith/Pnat.v theories/PArith/POrderedType.v (theories/PArith/PArith.v)
NArith: Binary natural numbers
theories/NArith/BinNatDef.v theories/NArith/BinNat.v theories/NArith/Nnat.v theories/NArith/Ndigits.v theories/NArith/Ndist.v theories/NArith/Ndec.v theories/NArith/Ndiv_def.v theories/NArith/Ngcd_def.v theories/NArith/Nsqrt_def.v (theories/NArith/NArith.v)
ZArith: Binary integers
theories/ZArith/BinIntDef.v theories/ZArith/BinInt.v theories/ZArith/Zorder.v theories/ZArith/Zcompare.v theories/ZArith/Znat.v theories/ZArith/Zmin.v theories/ZArith/Zmax.v theories/ZArith/Zminmax.v theories/ZArith/Zabs.v theories/ZArith/Zeven.v theories/ZArith/auxiliary.v theories/ZArith/ZArith_dec.v theories/ZArith/Zbool.v theories/ZArith/Zmisc.v theories/ZArith/Wf_Z.v theories/ZArith/Zhints.v (theories/ZArith/ZArith_base.v) theories/ZArith/Zcomplements.v theories/ZArith/Zpow_def.v theories/ZArith/Zpow_alt.v theories/ZArith/Zpower.v theories/ZArith/Zdiv.v theories/ZArith/Zquot.v theories/ZArith/Zeuclid.v (theories/ZArith/ZArith.v) theories/ZArith/Zgcd_alt.v theories/ZArith/Zwf.v theories/ZArith/Znumtheory.v theories/ZArith/Int.v theories/ZArith/Zpow_facts.v theories/ZArith/Zdigits.v
QArith: Rational numbers
theories/QArith/QArith_base.v theories/QArith/Qabs.v theories/QArith/Qpower.v theories/QArith/Qreduction.v theories/QArith/Qring.v theories/QArith/Qfield.v (theories/QArith/QArith.v) theories/QArith/Qreals.v theories/QArith/Qcanon.v theories/QArith/Qcabs.v theories/QArith/Qround.v theories/QArith/QOrderedType.v theories/QArith/Qminmax.v
Numbers: An experimental modular architecture for arithmetic
  Prelude:
theories/Numbers/BinNums.v theories/Numbers/NumPrelude.v theories/Numbers/NaryFunctions.v theories/Numbers/AltBinNotations.v theories/Numbers/DecimalFacts.v theories/Numbers/DecimalNat.v theories/Numbers/DecimalPos.v theories/Numbers/DecimalN.v theories/Numbers/DecimalZ.v theories/Numbers/DecimalQ.v theories/Numbers/DecimalR.v theories/Numbers/DecimalString.v theories/Numbers/HexadecimalFacts.v theories/Numbers/HexadecimalNat.v theories/Numbers/HexadecimalPos.v theories/Numbers/HexadecimalN.v theories/Numbers/HexadecimalZ.v theories/Numbers/HexadecimalQ.v theories/Numbers/HexadecimalR.v theories/Numbers/HexadecimalString.v
  NatInt: Abstract mixed natural/integer/cyclic arithmetic
theories/Numbers/NatInt/NZAdd.v theories/Numbers/NatInt/NZAddOrder.v theories/Numbers/NatInt/NZAxioms.v theories/Numbers/NatInt/NZBase.v theories/Numbers/NatInt/NZMul.v theories/Numbers/NatInt/NZDiv.v theories/Numbers/NatInt/NZMulOrder.v theories/Numbers/NatInt/NZOrder.v theories/Numbers/NatInt/NZDomain.v theories/Numbers/NatInt/NZProperties.v theories/Numbers/NatInt/NZParity.v theories/Numbers/NatInt/NZPow.v theories/Numbers/NatInt/NZSqrt.v theories/Numbers/NatInt/NZLog.v theories/Numbers/NatInt/NZGcd.v theories/Numbers/NatInt/NZBits.v
  Cyclic: Abstract and 63-bits-based cyclic arithmetic
theories/Numbers/Cyclic/Abstract/CyclicAxioms.v theories/Numbers/Cyclic/Abstract/NZCyclic.v theories/Numbers/Cyclic/Abstract/CarryType.v theories/Numbers/Cyclic/Abstract/DoubleType.v theories/Numbers/Cyclic/Int31/Cyclic31.v theories/Numbers/Cyclic/Int31/Ring31.v theories/Numbers/Cyclic/Int31/Int31.v theories/Numbers/Cyclic/Int63/Cyclic63.v theories/Numbers/Cyclic/Int63/PrimInt63.v theories/Numbers/Cyclic/Int63/Int63.v theories/Numbers/Cyclic/Int63/Uint63.v theories/Numbers/Cyclic/Int63/Sint63.v theories/Numbers/Cyclic/Int63/Ring63.v theories/Numbers/Cyclic/ZModulo/ZModulo.v
  Natural: Abstract and 63-bits-words-based natural arithmetic
theories/Numbers/Natural/Abstract/NAdd.v theories/Numbers/Natural/Abstract/NAddOrder.v theories/Numbers/Natural/Abstract/NAxioms.v theories/Numbers/Natural/Abstract/NBase.v theories/Numbers/Natural/Abstract/NDefOps.v theories/Numbers/Natural/Abstract/NIso.v theories/Numbers/Natural/Abstract/NMulOrder.v theories/Numbers/Natural/Abstract/NOrder.v theories/Numbers/Natural/Abstract/NStrongRec.v theories/Numbers/Natural/Abstract/NSub.v theories/Numbers/Natural/Abstract/NDiv.v theories/Numbers/Natural/Abstract/NMaxMin.v theories/Numbers/Natural/Abstract/NParity.v theories/Numbers/Natural/Abstract/NPow.v theories/Numbers/Natural/Abstract/NSqrt.v theories/Numbers/Natural/Abstract/NLog.v theories/Numbers/Natural/Abstract/NGcd.v theories/Numbers/Natural/Abstract/NLcm.v theories/Numbers/Natural/Abstract/NBits.v theories/Numbers/Natural/Abstract/NProperties.v theories/Numbers/Natural/Binary/NBinary.v theories/Numbers/Natural/Peano/NPeano.v
  Integer: Abstract and concrete (especially 63-bits-words-based) integer arithmetic
theories/Numbers/Integer/Abstract/ZAdd.v theories/Numbers/Integer/Abstract/ZAddOrder.v theories/Numbers/Integer/Abstract/ZAxioms.v theories/Numbers/Integer/Abstract/ZBase.v theories/Numbers/Integer/Abstract/ZLt.v theories/Numbers/Integer/Abstract/ZMul.v theories/Numbers/Integer/Abstract/ZMulOrder.v theories/Numbers/Integer/Abstract/ZSgnAbs.v theories/Numbers/Integer/Abstract/ZMaxMin.v theories/Numbers/Integer/Abstract/ZParity.v theories/Numbers/Integer/Abstract/ZPow.v theories/Numbers/Integer/Abstract/ZGcd.v theories/Numbers/Integer/Abstract/ZLcm.v theories/Numbers/Integer/Abstract/ZBits.v theories/Numbers/Integer/Abstract/ZProperties.v theories/Numbers/Integer/Abstract/ZDivEucl.v theories/Numbers/Integer/Abstract/ZDivFloor.v theories/Numbers/Integer/Abstract/ZDivTrunc.v theories/Numbers/Integer/Binary/ZBinary.v theories/Numbers/Integer/NatPairs/ZNatPairs.v
  Floats: Floating-point arithmetic
theories/Floats/FloatClass.v theories/Floats/PrimFloat.v theories/Floats/SpecFloat.v theories/Floats/FloatOps.v theories/Floats/FloatAxioms.v theories/Floats/FloatLemmas.v (theories/Floats/Floats.v)
Relations: Relations (definitions and basic results)
theories/Relations/Relation_Definitions.v theories/Relations/Relation_Operators.v theories/Relations/Relations.v theories/Relations/Operators_Properties.v
Sets: Sets (classical, constructive, finite, infinite, powerset, etc.)
theories/Sets/Classical_sets.v theories/Sets/Constructive_sets.v theories/Sets/Cpo.v theories/Sets/Ensembles.v theories/Sets/Finite_sets_facts.v theories/Sets/Finite_sets.v theories/Sets/Image.v theories/Sets/Infinite_sets.v theories/Sets/Integers.v theories/Sets/Multiset.v theories/Sets/Partial_Order.v theories/Sets/Permut.v theories/Sets/Powerset_Classical_facts.v theories/Sets/Powerset_facts.v theories/Sets/Powerset.v theories/Sets/Relations_1_facts.v theories/Sets/Relations_1.v theories/Sets/Relations_2_facts.v theories/Sets/Relations_2.v theories/Sets/Relations_3_facts.v theories/Sets/Relations_3.v theories/Sets/Uniset.v
Classes:
theories/Classes/Init.v theories/Classes/RelationClasses.v theories/Classes/Morphisms.v theories/Classes/Morphisms_Prop.v theories/Classes/Morphisms_Relations.v theories/Classes/Equivalence.v theories/Classes/CRelationClasses.v theories/Classes/CMorphisms.v theories/Classes/CEquivalence.v theories/Classes/EquivDec.v theories/Classes/SetoidTactics.v theories/Classes/SetoidClass.v theories/Classes/SetoidDec.v theories/Classes/RelationPairs.v theories/Classes/DecidableClass.v
Setoids:
theories/Setoids/Setoid.v
Lists: Polymorphic lists, Streams (infinite sequences)
theories/Lists/List.v theories/Lists/ListDec.v theories/Lists/ListSet.v theories/Lists/SetoidList.v theories/Lists/SetoidPermutation.v theories/Lists/Streams.v theories/Lists/StreamMemo.v theories/Lists/ListTactics.v
Vectors: Dependent datastructures storing their length
theories/Vectors/Fin.v theories/Vectors/VectorDef.v theories/Vectors/VectorSpec.v theories/Vectors/VectorEq.v (theories/Vectors/Vector.v)
Sorting: Axiomatizations of sorts
theories/Sorting/Heap.v theories/Sorting/Permutation.v theories/Sorting/Sorting.v theories/Sorting/PermutEq.v theories/Sorting/PermutSetoid.v theories/Sorting/Mergesort.v theories/Sorting/Sorted.v theories/Sorting/CPermutation.v
Wellfounded: Well-founded Relations
theories/Wellfounded/Disjoint_Union.v theories/Wellfounded/Inclusion.v theories/Wellfounded/Inverse_Image.v theories/Wellfounded/Lexicographic_Exponentiation.v theories/Wellfounded/Lexicographic_Product.v theories/Wellfounded/Transitive_Closure.v theories/Wellfounded/Union.v theories/Wellfounded/Wellfounded.v theories/Wellfounded/Well_Ordering.v
MSets: Modular implementation of finite sets using lists or efficient trees. This is a modernization of FSets.
theories/MSets/MSetInterface.v theories/MSets/MSetFacts.v theories/MSets/MSetDecide.v theories/MSets/MSetProperties.v theories/MSets/MSetEqProperties.v theories/MSets/MSetWeakList.v theories/MSets/MSetList.v theories/MSets/MSetGenTree.v theories/MSets/MSetAVL.v theories/MSets/MSetRBT.v theories/MSets/MSetPositive.v theories/MSets/MSetToFiniteSet.v (theories/MSets/MSets.v)
FSets: Modular implementation of finite sets/maps using lists or efficient trees. For sets, please consider the more modern MSets.
theories/FSets/FSetInterface.v theories/FSets/FSetBridge.v theories/FSets/FSetFacts.v theories/FSets/FSetDecide.v theories/FSets/FSetProperties.v theories/FSets/FSetEqProperties.v theories/FSets/FSetList.v theories/FSets/FSetWeakList.v theories/FSets/FSetCompat.v theories/FSets/FSetAVL.v theories/FSets/FSetPositive.v (theories/FSets/FSets.v) theories/FSets/FSetToFiniteSet.v theories/FSets/FMapInterface.v theories/FSets/FMapWeakList.v theories/FSets/FMapList.v theories/FSets/FMapPositive.v theories/FSets/FMapFacts.v (theories/FSets/FMaps.v) theories/FSets/FMapAVL.v theories/FSets/FMapFullAVL.v
Strings Implementation of string as list of ascii characters
theories/Strings/Byte.v theories/Strings/Ascii.v theories/Strings/String.v theories/Strings/BinaryString.v theories/Strings/HexString.v theories/Strings/OctalString.v theories/Strings/ByteVector.v
Reals: Formalization of real numbers
Classical Reals: Real numbers with excluded middle, total order and least upper bounds
theories/Reals/Rdefinitions.v theories/Reals/ClassicalDedekindReals.v theories/Reals/ClassicalConstructiveReals.v theories/Reals/Raxioms.v theories/Reals/RIneq.v theories/Reals/DiscrR.v theories/Reals/ROrderedType.v theories/Reals/Rminmax.v (theories/Reals/Rbase.v) theories/Reals/RList.v theories/Reals/Ranalysis.v theories/Reals/Rbasic_fun.v theories/Reals/Rderiv.v theories/Reals/Rfunctions.v theories/Reals/Rgeom.v theories/Reals/R_Ifp.v theories/Reals/Rlimit.v theories/Reals/Rseries.v theories/Reals/Rsigma.v theories/Reals/R_sqr.v theories/Reals/Rtrigo_fun.v theories/Reals/Rtrigo1.v theories/Reals/Rtrigo.v theories/Reals/Rtrigo_facts.v theories/Reals/Ratan.v theories/Reals/Machin.v theories/Reals/SplitAbsolu.v theories/Reals/SplitRmult.v theories/Reals/Alembert.v theories/Reals/AltSeries.v theories/Reals/ArithProp.v theories/Reals/Binomial.v theories/Reals/Cauchy_prod.v theories/Reals/Cos_plus.v theories/Reals/Cos_rel.v theories/Reals/Exp_prop.v theories/Reals/Integration.v theories/Reals/MVT.v theories/Reals/NewtonInt.v theories/Reals/PSeries_reg.v theories/Reals/PartSum.v theories/Reals/R_sqrt.v theories/Reals/Ranalysis1.v theories/Reals/Ranalysis2.v theories/Reals/Ranalysis3.v theories/Reals/Ranalysis4.v theories/Reals/Ranalysis5.v theories/Reals/Ranalysis_reg.v theories/Reals/Rcomplete.v theories/Reals/RiemannInt.v theories/Reals/RiemannInt_SF.v theories/Reals/Rpow_def.v theories/Reals/Rpower.v theories/Reals/Rprod.v theories/Reals/Rsqrt_def.v theories/Reals/Rtopology.v theories/Reals/Rtrigo_alt.v theories/Reals/Rtrigo_calc.v theories/Reals/Rtrigo_def.v theories/Reals/Rtrigo_reg.v theories/Reals/SeqProp.v theories/Reals/SeqSeries.v theories/Reals/Sqrt_reg.v theories/Reals/Rlogic.v theories/Reals/Rregisternames.v (theories/Reals/Reals.v) theories/Reals/Runcountable.v
Abstract Constructive Reals: Interface of constructive reals, proof of equivalence of all implementations. EXPERIMENTAL
theories/Reals/Abstract/ConstructiveReals.v theories/Reals/Abstract/ConstructiveRealsMorphisms.v theories/Reals/Abstract/ConstructiveLUB.v theories/Reals/Abstract/ConstructiveAbs.v theories/Reals/Abstract/ConstructiveLimits.v theories/Reals/Abstract/ConstructiveMinMax.v theories/Reals/Abstract/ConstructivePower.v theories/Reals/Abstract/ConstructiveSum.v
Constructive Cauchy Reals: Cauchy sequences of rational numbers, implementation of the interface. EXPERIMENTAL
theories/Reals/Cauchy/ConstructiveRcomplete.v theories/Reals/Cauchy/ConstructiveCauchyReals.v theories/Reals/Cauchy/ConstructiveCauchyRealsMult.v theories/Reals/Cauchy/ConstructiveCauchyAbs.v
Program: Support for dependently-typed programming
theories/Program/Basics.v theories/Program/Wf.v theories/Program/Subset.v theories/Program/Equality.v theories/Program/Tactics.v theories/Program/Utils.v theories/Program/Syntax.v theories/Program/Program.v theories/Program/Combinators.v
SSReflect: Base libraries for the SSReflect proof language and the small scale reflection formalization technique
theories/ssrmatching/ssrmatching.v theories/ssr/ssrclasses.v theories/ssr/ssreflect.v theories/ssr/ssrbool.v theories/ssr/ssrfun.v
Ltac2: The Ltac2 tactic programming language
user-contrib/Ltac2/Ltac2.v user-contrib/Ltac2/Array.v user-contrib/Ltac2/Bool.v user-contrib/Ltac2/Char.v user-contrib/Ltac2/Constr.v user-contrib/Ltac2/Control.v user-contrib/Ltac2/Env.v user-contrib/Ltac2/Fresh.v user-contrib/Ltac2/Ident.v user-contrib/Ltac2/Init.v user-contrib/Ltac2/Ind.v user-contrib/Ltac2/Int.v user-contrib/Ltac2/List.v user-contrib/Ltac2/Ltac1.v user-contrib/Ltac2/Message.v user-contrib/Ltac2/Notations.v user-contrib/Ltac2/Option.v user-contrib/Ltac2/Pattern.v user-contrib/Ltac2/Printf.v user-contrib/Ltac2/Std.v user-contrib/Ltac2/String.v
Unicode: Unicode-based notations
theories/Unicode/Utf8_core.v theories/Unicode/Utf8.v
Compat: Compatibility wrappers for previous versions of Coq
theories/Compat/AdmitAxiom.v theories/Compat/Coq813.v theories/Compat/Coq814.v theories/Compat/Coq815.v
Array: Persistent native arrays
theories/Array/PArray.v
coq-8.15.0/doc/stdlib/make-library-index000077500000000000000000000025071417001151100200220ustar00rootroot00000000000000#!/usr/bin/env bash # Instantiate links to library files in index template FILE=$1 HIDDEN=$2 cp -f $FILE.template tmp echo -n "Building file index-list.prehtml... " LIBDIRS=`find theories/* user-contrib/* -type d ! -name .coq-native` for k in $LIBDIRS; do if [[ $k =~ "user-contrib" ]]; then BASE_PREFIX="" else BASE_PREFIX="Coq." fi d=`basename $k` ls $k | grep -q \.v'$' if [ $? = 0 ]; then for j in $k/*.v; do b=`basename $j .v` rm -f tmp2 grep -q $k/$b.v tmp a=$? grep -q $k/$b.v $HIDDEN h=$? if [ $a = 0 ]; then if [ $h = 0 ]; then echo Error: $FILE and $HIDDEN both mention $k/$b.v; exit 1 else p=`echo $k | sed 's:^[^/]*/::' | sed 's:/:.:g'` sed -e "s:$k/$b.v:$b:g" tmp > tmp2 mv -f tmp2 tmp fi else if [ $h = 0 ]; then # Skipping file from the index : else echo Error: none of $FILE and $HIDDEN mention $k/$b.v exit 1 fi fi done fi rm -f tmp2 sed -e "s/#$d#//" tmp > tmp2 mv -f tmp2 tmp done a=`grep theories tmp` if [ $? = 0 ]; then echo Error: extra files:; echo $a; exit 1; fi mv tmp $FILE echo Done coq-8.15.0/doc/tools/000077500000000000000000000000001417001151100142635ustar00rootroot00000000000000coq-8.15.0/doc/tools/coqrst/000077500000000000000000000000001417001151100155765ustar00rootroot00000000000000coq-8.15.0/doc/tools/coqrst/__init__.py000066400000000000000000000013211417001151100177040ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## 0: deps = " ".join(missing_deps) eprint('Cannot find package(s) `%s` (needed to build documentation)' % deps) eprint('You can run `pip3 install %s` to install it/them.' % deps) sys.exit(1) try: import sphinx_rtd_theme except: missing_dep('sphinx_rtd_theme') try: import pexpect except: missing_dep('pexpect') try: import antlr4 except: missing_dep('antlr4-python3-runtime') try: import bs4 except: missing_dep('beautifulsoup4') try: import sphinxcontrib.bibtex except: missing_dep('sphinxcontrib-bibtex') report_missing_deps() coq-8.15.0/doc/tools/coqrst/coqdoc/000077500000000000000000000000001417001151100170465ustar00rootroot00000000000000coq-8.15.0/doc/tools/coqrst/coqdoc/__init__.py000066400000000000000000000012671417001151100211650ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## ", "<-", "<->", "=>", "<=", ">=", "<>", "~", "/\\", "\\/", "|-", "*", "forall", "exists"] COQDOC_HEADER = "".join("(** remove printing {} *)".format(s) for s in COQDOC_SYMBOLS) def coqdoc(coq_code, coqdoc_bin=None): """Get the output of coqdoc on coq_code.""" coqdoc_bin = coqdoc_bin or os.path.join(os.getenv("COQBIN", ""), "coqdoc") fd, filename = mkstemp(prefix="coqdoc_", suffix=".v") if platform.system().startswith("CYGWIN"): # coqdoc currently doesn't accept cygwin style paths in the form "/cygdrive/c/..." filename = check_output(["cygpath", "-w", filename]).decode("utf-8").strip() try: os.write(fd, COQDOC_HEADER.encode("utf-8")) os.write(fd, coq_code.encode("utf-8")) os.close(fd) return check_output([coqdoc_bin] + COQDOC_OPTIONS + [filename], timeout = 10).decode("utf-8") finally: os.remove(filename) def first_string_node(node): """Return the first string node, or None if does not exist""" while node.children: node = next(node.children) if isinstance(node, NavigableString): return node def lex(source): """Convert source into a stream of (css_classes, token_string).""" coqdoc_output = coqdoc(source) soup = BeautifulSoup(coqdoc_output, "html.parser") root = soup.find(class_='code') # strip the leading '\n' first = first_string_node(root) if first and first.string[0] == '\n': first.string.replace_with(first.string[1:]) for elem in root.children: if isinstance(elem, NavigableString): yield [], elem elif elem.name == "span": if elem.string: cls = "coqdoc-{}".format(elem.get("title", "comment")) yield [cls], elem.string else: # handle multi-line comments children = list(elem.children) mlc = children[0].startswith("(*") and children[-1].endswith ("*)") for elem2 in children: if isinstance(elem2, NavigableString): cls = ["coqdoc-comment"] if mlc else [] yield cls, elem2 elif elem2.name == 'br': pass elif elem.name == 'br': pass else: raise ValueError(elem) def main(): """Lex stdin (for testing purposes)""" import sys for classes, text in lex(sys.stdin.read()): print(repr(text) + "\t" ' '.join(classes)) if __name__ == '__main__': main() coq-8.15.0/doc/tools/coqrst/coqdomain.py000066400000000000000000001545261417001151100201370ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## `_. prodn displays multiple productions together with alignment similar to ``.. productionlist``, however unlike ``.. productionlist``\ s, this directive accepts notation syntax. Example:: .. prodn:: occ_switch ::= { {? {| + | - } } {* @natural } } term += let: @pattern := @term in @term | second_production The first line defines "occ_switch", which must be unique in the document. The second references and expands the definition of "term", whose main definition is elsewhere in the document. The third form is for continuing the definition of a nonterminal when it has multiple productions. It leaves the first column in the output blank. """ subdomain = "prodn" #annotation = "Grammar production" # handle_signature is called for each line of input in the prodn:: # 'signatures' accumulates them in order to combine the lines into a single table: signatures = None # FIXME this should be in init, shouldn't it? def _render_signature(self, signature, signode): raise NotImplementedError(self) SIG_ERROR = ("{}: Invalid syntax in ``.. prodn::`` directive" + "\nExpected ``name ::= ...`` or ``name += ...``" + " (e.g. ``pattern += constr:(@ident)``)\n" + " in `{}`") def handle_signature(self, signature, signode): parts = signature.split(maxsplit=1) if parts[0].strip() == "|" and len(parts) == 2: lhs = "" op = "|" rhs = parts[1].strip() else: parts = signature.split(maxsplit=2) if len(parts) != 3: loc = os.path.basename(get_node_location(signode)) raise ExtensionError(ProductionObject.SIG_ERROR.format(loc, signature)) lhs, op, rhs = (part.strip() for part in parts) if op not in ["::=", "+="]: loc = os.path.basename(get_node_location(signode)) raise ExtensionError(ProductionObject.SIG_ERROR.format(loc, signature)) parts = rhs.split(" ", maxsplit=1) rhs = parts[0].strip() tag = parts[1].strip() if len(parts) == 2 else "" self.signatures.append((lhs, op, rhs, tag)) return [('token', lhs)] if op == '::=' else None def _add_index_entry(self, name, target): pass def _target_id(self, name): return make_id('grammar-token-{}'.format(name[1])) def _record_name(self, name, targetid, signode): env = self.state.document.settings.env objects = env.domaindata['std']['objects'] self._warn_if_duplicate_name(objects, name, signode) objects[name] = env.docname, targetid def run(self): self.signatures = [] indexnode = super().run()[0] # makes calls to handle_signature table = nodes.inline(classes=['prodn-table']) tgroup = nodes.inline(classes=['prodn-column-group']) for _ in range(4): tgroup += nodes.inline(classes=['prodn-column']) table += tgroup tbody = nodes.inline(classes=['prodn-row-group']) table += tbody # create rows for signature in self.signatures: lhs, op, rhs, tag = signature position = self.state_machine.get_source_and_line(self.lineno) row = nodes.inline(classes=['prodn-row']) entry = nodes.inline(classes=['prodn-cell-nonterminal']) if lhs != "": target_name = make_id('grammar-token-' + lhs) target = nodes.target('', '', ids=[target_name], names=[target_name]) # putting prodn-target on the target node won't appear in the tex file inline = nodes.inline(classes=['prodn-target']) inline += target entry += inline entry += notation_to_sphinx('@'+lhs, *position) else: entry += nodes.literal('', '') row += entry entry = nodes.inline(classes=['prodn-cell-op']) entry += nodes.literal(op, op) row += entry entry = nodes.inline(classes=['prodn-cell-production']) entry += notation_to_sphinx(rhs, *position) row += entry entry = nodes.inline(classes=['prodn-cell-tag']) entry += nodes.literal(tag, tag) row += entry tbody += row return [indexnode, table] # only this node goes into the doc class ExceptionObject(NotationObject): """An error raised by a Coq command or tactic. This commonly appears nested in the ``.. tacn::`` that raises the exception. Example:: .. tacv:: assert @form by @tactic This tactic applies :n:`@tactic` to solve the subgoals generated by ``assert``. .. exn:: Proof is not complete Raised if :n:`@tactic` does not fully solve the goal. """ subdomain = "exn" index_suffix = "(error)" annotation = "Error" # Uses “exn” since “err” already is a CSS class added by “writer_aux”. # Generate names automatically def _name_from_signature(self, signature): return notation_to_string(signature) class WarningObject(NotationObject): """An warning raised by a Coq command or tactic.. Do not mistake this for ``.. warning::``; this directive is for warning messages produced by Coq. Example:: .. warn:: Ambiguous path When the coercion :token:`qualid` is added to the inheritance graph, non valid coercion paths are ignored. """ subdomain = "warn" index_suffix = "(warning)" annotation = "Warning" # Generate names automatically def _name_from_signature(self, signature): return notation_to_string(signature) def NotationRole(role, rawtext, text, lineno, inliner, options={}, content=[]): #pylint: disable=unused-argument, dangerous-default-value """Any text using the notation syntax (``@id``, ``{+, …}``, etc.). Use this to explain tactic equivalences. For example, you might write this:: :n:`generalize @term as @ident` is just like :n:`generalize @term`, but it names the introduced hypothesis :token:`ident`. Note that this example also uses ``:token:``. That's because ``ident`` is defined in the Coq manual as a grammar production, and ``:token:`` creates a link to that. When referring to a placeholder that happens to be a grammar production, ``:token:`…``` is typically preferable to ``:n:`@…```. """ notation = utils.unescape(text, 1) position = inliner.reporter.get_source_and_line(lineno) return [nodes.literal(rawtext, '', notation_to_sphinx(notation, *position, rawtext=rawtext))], [] def coq_code_role(role, rawtext, text, lineno, inliner, options={}, content=[]): #pylint: disable=dangerous-default-value """Coq code. Use this for Gallina and Ltac snippets:: :g:`apply plus_comm; reflexivity` :g:`Set Printing All.` :g:`forall (x: t), P(x)` """ options['language'] = 'Coq' return code_role(role, rawtext, text, lineno, inliner, options, content) ## Too heavy: ## Forked from code_role to use our custom tokenizer; this doesn't work for ## snippets though: for example CoqDoc swallows the parentheses around this: ## “(a: A) (b: B)” # set_classes(options) # classes = ['code', 'coq'] # code = utils.unescape(text, 1) # node = nodes.literal(rawtext, '', *highlight_using_coqdoc(code), classes=classes) # return [node], [] CoqCodeRole = coq_code_role class CoqtopDirective(Directive): r"""A reST directive to describe interactions with Coqtop. Usage:: .. coqtop:: options… Coq code to send to coqtop Example:: .. coqtop:: in reset Print nat. Definition a := 1. The blank line after the directive is required. If you begin a proof, use the ``abort`` option to reset coqtop for the next example. Here is a list of permissible options: - Display options (choose exactly one) - ``all``: Display input and output - ``in``: Display only input - ``out``: Display only output - ``none``: Display neither (useful for setup commands) - Behavior options - ``reset``: Send a ``Reset Initial`` command before running this block - ``fail``: Don't die if a command fails, implies ``warn`` (so no need to put both) - ``warn``: Don't die if a command emits a warning - ``restart``: Send a ``Restart`` command before running this block (only works in proof mode) - ``abort``: Send an ``Abort All`` command after running this block (leaves all pending proofs if any) ``coqtop``\ 's state is preserved across consecutive ``.. coqtop::`` blocks of the same document (``coqrst`` creates a single ``coqtop`` process per reST source file). Use the ``reset`` option to reset Coq's state. """ has_content = True required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True option_spec = { 'name': directives.unchanged } directive_name = "coqtop" def run(self): # Uses a ‘container’ instead of a ‘literal_block’ to disable # Pygments-based post-processing (we could also set rawsource to '') content = '\n'.join(self.content) args = self.arguments[0].split() node = nodes.container(content, coqtop_options = set(args), classes=['coqtop', 'literal-block']) self.add_name(node) return [node] class CoqdocDirective(Directive): """A reST directive to display Coqtop-formatted source code. Usage:: .. coqdoc:: Coq code to highlight Example:: .. coqdoc:: Definition test := 1. """ # TODO implement this as a Pygments highlighter? has_content = True required_arguments = 0 optional_arguments = 0 final_argument_whitespace = True option_spec = { 'name': directives.unchanged } directive_name = "coqdoc" def run(self): # Uses a ‘container’ instead of a ‘literal_block’ to disable # Pygments-based post-processing (we could also set rawsource to '') content = '\n'.join(self.content) node = nodes.inline(content, '', *highlight_using_coqdoc(content)) wrapper = nodes.container(content, node, classes=['coqdoc', 'literal-block']) self.add_name(wrapper) return [wrapper] class ExampleDirective(BaseAdmonition): """A reST directive for examples. This behaves like a generic admonition; see http://docutils.sourceforge.net/docs/ref/rst/directives.html#generic-admonition for more details. Optionally, any text immediately following the ``.. example::`` header is used as the example's title. Example:: .. example:: Adding a hint to a database The following adds ``plus_comm`` to the ``plu`` database: .. coqdoc:: Hint Resolve plus_comm : plu. """ node_class = nodes.admonition directive_name = "example" optional_arguments = 1 def run(self): # ‘BaseAdmonition’ checks whether ‘node_class’ is ‘nodes.admonition’, # and uses arguments[0] as the title in that case (in other cases, the # title is unset, and it is instead set in the HTML visitor). assert len(self.arguments) <= 1 self.arguments = [": ".join(['Example'] + self.arguments)] self.options['classes'] = ['admonition', 'note'] return super().run() class PreambleDirective(Directive): r"""A reST directive to include a TeX file. Mostly useful to let MathJax know about `\def`\s and `\newcommand`\s. The contents of the TeX file are wrapped in a math environment, as MathJax doesn't process LaTeX definitions otherwise. Usage:: .. preamble:: preamble.tex """ has_content = False required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True option_spec = {} directive_name = "preamble" def run(self): document = self.state.document env = document.settings.env if not document.settings.file_insertion_enabled: msg = 'File insertion disabled' return [document.reporter.warning(msg, line=self.lineno)] rel_fname, abs_fname = env.relfn2path(self.arguments[0]) env.note_dependency(rel_fname) with open(abs_fname, encoding="utf-8") as ltx: latex = ltx.read() node = make_math_node(latex, env.docname, nowrap=False) node['classes'] = ["math-preamble"] set_source_info(self, node) return [node] class InferenceDirective(Directive): r"""A reST directive to format inference rules. This also serves as a small illustration of the way to create new Sphinx directives. Usage:: .. inference:: name newline-separated premises -------------------------- conclusion Example:: .. inference:: Prod-Pro \WTEG{T}{s} s \in \Sort \WTE{\Gamma::(x:T)}{U}{\Prop} ----------------------------- \WTEG{\forall~x:T,U}{\Prop} """ required_arguments = 1 optional_arguments = 0 has_content = True final_argument_whitespace = True directive_name = "inference" @staticmethod def prepare_latex_operand(op): # TODO: Could use a fancier inference class in LaTeX return '%\n\\hspace{3em}%\n'.join(op.strip().splitlines()) def prepare_latex(self, content): parts = re.split('^ *----+ *$', content, flags=re.MULTILINE) if len(parts) != 2: raise self.error('Expected two parts in ‘inference’ directive, separated by a rule (----).') top, bottom = tuple(InferenceDirective.prepare_latex_operand(p) for p in parts) return "%\n".join(("\\frac{", top, "}{", bottom, "}")) def run(self): self.assert_has_content() title = self.arguments[0] content = '\n'.join(self.content) latex = self.prepare_latex(content) docname = self.state.document.settings.env.docname math_node = make_math_node(latex, docname, nowrap=False) tid = make_id(title) target = nodes.target('', '', ids=['inference-' + tid]) self.state.document.note_explicit_target(target) term, desc = nodes.term('', title), nodes.description('', math_node) dli = nodes.definition_list_item('', term, desc) dl = nodes.definition_list(content, target, dli) set_source_info(self, dl) return [dl] class AnsiColorsParser(): """Parse ANSI-colored output from Coqtop into Sphinx nodes.""" # Coqtop's output crashes ansi.py, because it contains a bunch of extended codes # This class is a fork of the original ansi.py, released under a BSD license in sphinx-contribs COLOR_PATTERN = re.compile('\x1b\\[([^m]+)m') def __init__(self): self.new_nodes, self.pending_nodes = [], [] def _finalize_pending_nodes(self): self.new_nodes.extend(self.pending_nodes) self.pending_nodes = [] def _add_text(self, raw, beg, end): if beg < end: text = raw[beg:end] if self.pending_nodes: self.pending_nodes[-1].append(nodes.Text(text)) else: self.new_nodes.append(nodes.inline('', text)) def colorize_str(self, raw): """Parse raw (an ANSI-colored output string from Coqtop) into Sphinx nodes.""" last_end = 0 for match in AnsiColorsParser.COLOR_PATTERN.finditer(raw): self._add_text(raw, last_end, match.start()) last_end = match.end() classes = ansicolors.parse_ansi(match.group(1)) if 'ansi-reset' in classes: self._finalize_pending_nodes() else: node = nodes.inline() self.pending_nodes.append(node) node['classes'].extend(classes) self._add_text(raw, last_end, len(raw)) self._finalize_pending_nodes() return self.new_nodes class CoqtopBlocksTransform(Transform): """Filter handling the actual work for the coqtop directive Adds coqtop's responses, colorizes input and output, and merges consecutive coqtop directives for better visual rendition. """ default_priority = 10 @staticmethod def is_coqtop_block(node): return isinstance(node, nodes.Element) and 'coqtop_options' in node @staticmethod def split_lines(source): r"""Split Coq input into chunks, which may include single- or multi-line comments. Nested comments are not supported. A chunk is a minimal sequence of consecutive lines of the input that ends with a '.' or '*)' >>> split_lines('A.\nB.''') ['A.', 'B.'] >>> split_lines('A.\n\nB.''') ['A.', '\nB.'] >>> split_lines('A.\n\nB.\n''') ['A.', '\nB.'] >>> split_lines("SearchPattern (_ + _ = _ + _).\n" ... "SearchPattern (nat -> bool).\n" ... "SearchPattern (forall l : list _, _ l l).") ... # doctest: +NORMALIZE_WHITESPACE ['SearchPattern (_ + _ = _ + _).', 'SearchPattern (nat -> bool).', 'SearchPattern (forall l : list _, _ l l).'] >>> split_lines('SearchHead le.\nSearchHead (@eq bool).') ['SearchHead le.', 'SearchHead (@eq bool).'] >>> split_lines("(* *) x. (* *)\ny.\n") ['(* *) x. (* *)', 'y.'] >>> split_lines("(* *) x (* \n *)\ny.\n") ['(* *) x (* \n *)', 'y.'] """ return re.split(r"(?:(?<=(?` to display "text" for the definition of "term", such as when "term" must be capitalized or plural for grammatical reasons. The term will also appear in the Glossary Index. Examples:: A :gdef:`prime` number is divisible only by itself and 1. :gdef:`Composite ` numbers are the non-prime numbers. """ #pylint: disable=dangerous-default-value, unused-argument env = inliner.document.settings.env std = env.domaindata['std']['objects'] m = ReferenceRole.explicit_title_re.match(text) if m: (text, term) = m.groups() text = text.strip() else: term = text key = ('term', term) if key in std: MSG = 'Duplicate object: {}; other is at {}' msg = MSG.format(term, env.doc2path(std[key][0])) inliner.document.reporter.warning(msg, line=lineno) targetid = make_id('term-{}'.format(term)) std[key] = (env.docname, targetid) target = nodes.target('', '', ids=[targetid], names=[term]) inliner.document.note_explicit_target(target) node = nodes.inline(rawtext, '', target, nodes.Text(text), classes=['term-defn']) set_role_source_info(inliner, lineno, node) return [node], [] GlossaryDefRole.role_name = "gdef" class CoqDomain(Domain): """A domain to document Coq code. Sphinx has a notion of “domains”, used to tailor it to a specific language. Domains mostly consist in descriptions of the objects that we wish to describe (for Coq, this includes tactics, tactic notations, options, exceptions, etc.), as well as domain-specific roles and directives. Each domain is responsible for tracking its objects, and resolving references to them. In the case of Coq, this leads us to define Coq “subdomains”, which classify objects into categories in which names must be unique. For example, a tactic and a theorem may share a name, but two tactics cannot be named the same. """ name = 'coq' label = 'Coq' object_types = { # ObjType (= directive type) → (Local name, *xref-roles) 'cmd': ObjType('cmd', 'cmd'), 'cmdv': ObjType('cmdv', 'cmd'), 'tacn': ObjType('tacn', 'tacn'), 'tacv': ObjType('tacv', 'tacn'), 'opt': ObjType('opt', 'opt'), 'flag': ObjType('flag', 'flag'), 'table': ObjType('table', 'table'), 'attr': ObjType('attr', 'attr'), 'thm': ObjType('thm', 'thm'), 'prodn': ObjType('prodn', 'prodn'), 'exn': ObjType('exn', 'exn'), 'warn': ObjType('warn', 'exn'), 'index': ObjType('index', 'index', searchprio=-1) } directives = { # Note that some directives live in the same semantic subdomain; ie # there's one directive per object type, but some object types map to # the same role. 'cmd': VernacObject, 'cmdv': VernacVariantObject, 'tacn': TacticObject, 'tacv': TacticVariantObject, 'opt': OptionObject, 'flag': FlagObject, 'table': TableObject, 'attr': AttributeObject, 'thm': GallinaObject, 'prodn' : ProductionObject, 'exn': ExceptionObject, 'warn': WarningObject, } roles = { # Each of these roles lives in a different semantic “subdomain” 'cmd': XRefRole(warn_dangling=True), 'tacn': XRefRole(warn_dangling=True), 'opt': XRefRole(warn_dangling=True), 'flag': XRefRole(warn_dangling=True), 'table': XRefRole(warn_dangling=True), 'attr': XRefRole(warn_dangling=True), 'thm': XRefRole(warn_dangling=True), 'prodn' : XRefRole(warn_dangling=True), 'exn': XRefRole(warn_dangling=True), 'warn': XRefRole(warn_dangling=True), # This one is special 'index': IndexXRefRole(), # These are used for highlighting 'n': NotationRole, 'g': CoqCodeRole } indices = [CoqVernacIndex, CoqTacticIndex, CoqOptionIndex, CoqGallinaIndex, CoqExceptionIndex, CoqAttributeIndex] data_version = 1 initial_data = { # Collect everything under a key that we control, since Sphinx adds # others, such as “version” 'objects' : { # subdomain → name → docname, objtype, targetid 'cmd': {}, 'tacn': {}, 'opt': {}, 'flag': {}, 'table': {}, 'attr': {}, 'thm': {}, 'prodn' : {}, 'exn': {}, 'warn': {}, } } @staticmethod def find_index_by_name(targetid): for index in CoqDomain.indices: if index.name == targetid: return index return None def get_objects(self): # Used for searching and object inventories (intersphinx) for _, objects in self.data['objects'].items(): for name, (docname, objtype, targetid) in objects.items(): yield (name, name, objtype, docname, targetid, self.object_types[objtype].attrs['searchprio']) for index in self.indices: yield (index.name, index.localname, 'index', "coq-" + index.name, '', -1) def merge_domaindata(self, docnames, otherdata): DUP = "Duplicate declaration: '{}' also defined in '{}'.\n" for subdomain, their_objects in otherdata['objects'].items(): our_objects = self.data['objects'][subdomain] for name, (docname, objtype, targetid) in their_objects.items(): if docname in docnames: if name in our_objects: self.env.warn(docname, DUP.format(name, our_objects[name][0])) our_objects[name] = (docname, objtype, targetid) def resolve_xref(self, env, fromdocname, builder, role, targetname, node, contnode): # ‘target’ is the name that was written in the document # ‘role’ is where this xref comes from; it's exactly one of our subdomains if role == 'index': index = CoqDomain.find_index_by_name(targetname) if index: return make_refnode(builder, fromdocname, "coq-" + index.name, '', contnode, index.localname) else: resolved = self.data['objects'][role].get(targetname) if resolved: (todocname, _, targetid) = resolved return make_refnode(builder, fromdocname, todocname, targetid, contnode, targetname) return None def clear_doc(self, docname_to_clear): for subdomain_objects in self.data['objects'].values(): for name, (docname, _, _) in list(subdomain_objects.items()): if docname == docname_to_clear: del subdomain_objects[name] def is_coqtop_or_coqdoc_block(node): return (isinstance(node, nodes.Element) and ('coqtop' in node['classes'] or 'coqdoc' in node['classes'])) def simplify_source_code_blocks_for_latex(app, doctree, fromdocname): # pylint: disable=unused-argument """Simplify coqdoc and coqtop blocks. In HTML mode, this does nothing; in other formats, such as LaTeX, it replaces coqdoc and coqtop blocks by plain text sources, which will use pygments if available. This prevents the LaTeX builder from getting confused. """ is_html = app.builder.tags.has("html") for node in doctree.traverse(is_coqtop_or_coqdoc_block): if is_html: node.rawsource = '' # Prevent pygments from kicking in elif 'coqtop-hidden' in node['classes']: node.parent.remove(node) else: node.replace_self(nodes.literal_block(node.rawsource, node.rawsource, language="Coq")) COQ_ADDITIONAL_DIRECTIVES = [CoqtopDirective, CoqdocDirective, ExampleDirective, InferenceDirective, PreambleDirective] COQ_ADDITIONAL_ROLES = [GrammarProductionRole, GlossaryDefRole] def setup(app): """Register the Coq domain""" # A few sanity checks: subdomains = set(obj.subdomain for obj in CoqDomain.directives.values()) found = set (obj for obj in chain(*(idx.subdomains for idx in CoqDomain.indices))) assert subdomains.issuperset(found), "Missing subdomains: {}".format(found.difference(subdomains)) assert subdomains.issubset(CoqDomain.roles.keys()), \ "Missing from CoqDomain.roles: {}".format(subdomains.difference(CoqDomain.roles.keys())) # Add domain, directives, and roles app.add_domain(CoqDomain) app.add_index_to_domain('std', StdGlossaryIndex) for role in COQ_ADDITIONAL_ROLES: app.add_role(role.role_name, role) for directive in COQ_ADDITIONAL_DIRECTIVES: app.add_directive(directive.directive_name, directive) app.add_transform(CoqtopBlocksTransform) app.connect('doctree-resolved', simplify_source_code_blocks_for_latex) app.connect('doctree-resolved', CoqtopBlocksTransform.merge_consecutive_coqtop_blocks) # Add extra styles app.add_css_file("ansi.css") app.add_css_file("coqdoc.css") app.add_js_file("notations.js") app.add_css_file("notations.css") app.add_css_file("pre-text.css") # Tell Sphinx about extra settings app.add_config_value("report_undocumented_coq_objects", None, 'env') # ``env_version`` is used by Sphinx to know when to invalidate # coqdomain-specific bits in its caches. It should be incremented when the # contents of ``env.domaindata['coq']`` change. See # `https://github.com/sphinx-doc/sphinx/issues/4460`. meta = { "version": "0.1", "env_version": 2, "parallel_read_safe": True } return meta coq-8.15.0/doc/tools/coqrst/notations/000077500000000000000000000000001417001151100176145ustar00rootroot00000000000000coq-8.15.0/doc/tools/coqrst/notations/CoqNotations.ttf000066400000000000000000001121441417001151100227570ustar00rootroot00000000000000 FFTMt˕HGDEF5*GPOS5N@GSUBD:ROS/2r`cmap{1jcvt :[)v \zfpgmvD#gaspglyfZ- :tzhead,6hhea/d$hmtx alocav maxp name5xpostP0NprepXû 8#̡p3_<O~>V~`/Ys2  P [DAMA [1> V W"^U.(~q/#0+)#.1&-v-((E 66??-6*-G.66('  .q1m,>,$!?$1?$??Y6$; C??.& 1Sh2-qll(qq;J2 2$- ^Uq/q1m??r622EOmJJJowwMMM`\]ZfkG~ - dH ~    !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`ardeixpkvjsgwl|cnm}byqz@?XUTSRQPONMLKJIHGFEDCBA@?>=<;:98765/.-,(&%$#" ,E#F` &`&#HH-,E#F#a &a&#HH-,E#F` a F`&#HH-,E#F#a ` &a a&#HH-,E#F`@a f`&#HH-,E#F#a@` &a@a&#HH-, <<-, E# D# ZQX# D#Y QX# MD#Y &QX# D#Y!!-, EhD ` EFvhE`D-, C#Ce -, C#C -,(#p(>(#p(E: -, E%EadPQXED!!Y-, EC`D-,CCe -, i@a ,b`+ d#da\XaY-,E+)#D)z-,Ee,#DE+#D-,KRXED!!Y-,%# `#-,%# a#-,%-, ` <<-, a <<-,CC -,!! d#d@b-,!QX d#d b@/+Y`-,!QX d#dUb/+Y`-, d#d@b`#!-,E#E`#E`#E`#vhb -,&&%%E#E &`bch &ae#DD-, ETX@D E@aD!!Y-,E0/E#Ea``iD-,KQX/#p#B!!Y-,KQX %EiSXD!!Y!!Y-,EC`c`iD-,/ED-,E# E`D-,E#E`D-,K#QX34 34YDD-,CX&EXdf`d `f X!@YaY#XeY)#D#)!!!!!Y-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%% F%`#B< XY%%)%%)%% XY%%CH%%`CH!Y!!!!!!!-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%%%% F%`#B< XY%%)) EeD%%)%% XY%%CH%%%%`CH!Y!!!!!!!-,% F%#B%%EH!!!!-,% %%CH!!!-,E# E P X#e#Y#h @PX!@Y#XeY`D-,KS#KQZX E`D!!Y-,KTX E`D!!Y-,KS#KQZX8!!Y-,KTX8!!Y-,CTXF+!!!!Y-,CTXG+!!!Y-,CTXH+!!!!Y-,CTXI+!!!Y-, #KSKQZX#8!!Y-, IQX@# 84!!Y-,F#F`#Fa#  Fab@@pE`h:-, #Id#SX<!Y-,KRX}zY-,KKTB-,B#Q@SZX TXBYY-,Eh#KQX# E d@PX|Yh`YD-,%%#>#> #eB #B#?#? #eB#B-@vn)uodtrdrC)qodpndoBnBia)gedeC)dBcadaBYQ)XBWUdUC)TBSQdRBQBLD)JHdHC)FDdDBCA/B?B* * U*U*U * U*U*U *U*UTSKRKP[%S@QZUZ[XYBKSXYKSXBY++++++++t+++s++++++++++++++++++++++++++++kk [[||f|P}fPkr{d}fk{}ofk{}fTkvfPP<<::dd_@~rU*W!Gxo{9jZ\.r+z4Y*CrrQY=****b "8>N6  h  J r  N  8pNN |l &< VplRLhFh""\  !>!b""<"#X###$$$%T%&J&`&&''8'p'( (),)**2*+$+:+^+|+++++,F,X,f,x,,,--"-0-B-j---. .Z..//\/00040P001V122223"3v34.445Z566T66727|78@89 9x9:P:;@;<<.<|<=M./<2<2/<2<233'3#rbbM-M6 @qo u ??10#.=3#"&54632Af 3 33 3$:44 44:$(,,(&,,Wm 6@n n@@ H ??/+]_q10#.=##.= H B G 0:><<><00:><<><00jMLO M LM LpM M@ M@M MгM L| L M M@  MML @e My  y!yy  r r??]22/223??22229]9]]]]]]]9]910++++]+++]+++++++]++++]++733733#3##7##7#537#537#fPf>RgzfPf753.#"#?M& V08,!08,!6'r.GO*0% &'B0%=.r' b %5'4,! a\g " !+:'5-!k='3' M" M M@ M MMM@BM""""|(|(((|` p  55...|44+@,H+}} p}%}1111??99//]]]]?]?+]]]99//]]]]10+++++]+++#34632#"&74&#"3264632#"&74&#"326[YY<45<<54<p<36<<63<  <k#LOOLLOOL*$$**""zLOOLLPPL*##**"";)5A¹?@ M%@ M"@ M M @ M9@ M. M. M-@ L9-  &C""y#C?<_<3267'.'326>54&#"m "H#-C-11.96*:>,f& <#*.f ">.>"0X H((<("6"0\ 24F8`&"F"|$B * "2x.<"d= =nR  @ H   H H ?++]q+_q10#.== H B:>>>>:B^B @ p ??107.5467m`9~9`q{8LKKL8UB @ p  ??210%4&'7'>&q`9~9`m|8LK犉KL8.6@Y@<@M<:.'4"'@M AB/-;4 ' ?9/2+9=/2+107'7>7*./7.=3>?"#'. ^ #$" %   t   $ "$# ^ "" B  n  $" "$  p D ""(  M@ M@M@M@ M M M@4 M@M@M R`@ P  U ?]]9/]]32]10++++++++++3533##5#(jj,d~Z[@z {??107>7.54632~%!&*3'*/1N8 4 '280#F<,q7?10!!q6xTS_M@He L H M HjM@Hez {?10]++]++++]++%#".54>32T6#  #6,. !! ./J-@v??/+}10#3~`#= #U@4d d%d$!!!@e e ??9/]]]/9/]]]]10%#"&546324&#"326'#"&54632niinpgino532662350joojkoou''((00/@@ M a  ee ??9310+7'>733!5H(882T{*Z f "&ff+7%&@c ' c&g! e ??210!!&654>54&#"'>32(2 " l%9A9%.#@=(i4&F6  <84 $$f 2TF>84&*"V*(,B)=0- M, M+ M M M @* M L)c&, c,,21g!))e! g ?2?9/9=/2]310+++++++".'732654&+532>54&#"'>323/( I3?3N<&.&)$#E,Y84M2*#05:ZI g 6(1'f"'Z%/@%%DN6+H500@ ae  ??9/932222231033##5!5>7&%" {CC{7!##".'732>A3^L <|z>\=2.% D3$/ +TKf Fh^+I6 f  .8->@&c`p/+cc @ H .e&g% g ??9+]107"32>54&'2#"&54>3>( # *$6K00J3hm5bT(L@/ $  5*$/3f 6H($J>'Z^0i!4' 10)a c@ H ee ??+10>7!5!$3<@9+<@|.f^pP&>+76@ M4 M2@ M0@ M/ M/ M. L, M , M@ M @ M M5 M/ L#M# M#"@ M"2#d M@& H95d9//d&dP82##),g)g ??9/9]/]++910]+]++++++++]++++++++#".5467.54>324.'326">54&*-9/3R2 *@&0R D&B4"X8(*,$$",8"$-9*@@%))c c@  ,&c 0@+e&g% g??9]]//10267>54.#"".54>32#'267*  # ,$6L01J2hn4aTP~' 4*&02f 6F*$J>&|Z^0jDH T#".54>32#".54>32T6#  #66#  #6,.    .,. "" .vT2##".54>32>7.54632T6#  #6%!&*3'*/1N8,. "" . 4 &280"F<,-!*@??9=/999910% %-""2lmml(e!@ UU?2210!!!!(\\f\f(!.@??9=/9999910%'-7##âlmmlE=01@22q'''1,u" g??]107&5467>54&#"'>32#".54>32*"(* <#"#^.9J*#(##3  (H*^.80*$$&&,  zz5EZ>M8@M!@M!8 M @ M M@M: M:@ M @ M @M@M@M MM@ M@ LLMML@rL@ M@ M@ M@ M M MP==@>>y 505`55GO6_66y+!G|+F@Py=@APAAy &> = = &}0 }&/]?99//999]]]]9/]]]22]10++++++++++++++++++]++++++++++%#".54>3254&#"3267#".54>323275.#"G&A0$9*0.!;+3U?/6 BrS0)FZ2)H6  8 1O9*J7 &5FuYFlI& W$Yol['6W>( " 0 D @' L L   H ????9/9910++'##6733.g,Z*,W)  y <,β$>>> >>>67#0T@'F Fp20D @ H   1#H$$,J J ??9/9=/^]+2]]10%#"&'>32%32>54&#'2654.#"&AY3)X+#T)FX2+$?- (90/0  8K- Y /:,DN)#-'f)$=:`  @H!F J JJJ ??+2]210"&54>32.#"3267(*Kg=!5( A*7+IS0?YIMxS+ b1R=jq a67!6@ F#D@ H" HH ??^]+}/]107232>54.#"#"&'>32 *9# 4)(+Li?!?$*J =eF') 8M.(L:#VyL#\ $Mw?0 Q@ 0  D 0 H@MM@ M H H??9/+++]2]10!#3#!?w<lfff?0 J@/@ M@ D@ M0 H H???9/]+2^]]+10!!3#?{<lff=#4@ %F$ D0  !JJ ????9//]10"326753#".54>32.8,<'FD  {]B:]B$*Hb8$9* @ 9O0oi)QyPOyR) b-0 ;@  D  D@ H  H@???9/]?+2210###335{{{0l60 .  D@ H H H?2?2+1053#5!#3673^ )27{{0+$ '29@9.<$PNBlDDBDLN$NX^,G0, HD@ HH??++10!3!{| <l0o M@ M0 M@[ M@ L?@ M? M M L @ M M @ M @ L @ L?  @M L M M@M@ M M L@ L@ M@ M M M? M@1 M@ M@ M/?@ M DD@Mo  ??9/9??9/9|]]+/9=/9999/10+]++]++]+++]++++++++++]++++++++]+]]+++]++>73###>7 f qLZIq 0BLN $PH@RPLB.0S@ @ M @ M?/ ?  DD@ H  ????99+2]210]+]+.'#33c*c9oc85-o54.#"4632#"&& '& &}vimrtjms-O:"";N--O:"";N-63?@ F`D @ H  HJ??99//?+2]102+#>"32654&vx,{-., ' 2;<;3anocck/96-j&/@F (F' J J  "J??32/9104632.'.732>54.#"vimrZU UJuw QW}& '& &G! XKF-N;"";N--O:"";N63$Y@D` p  &F`p&!DD@ H%H!!  J?????9/2+]]22102#.'##>4&#"3265V>!,3&$! =#@{+,(|<1 ?832M46X?EE54.'.54632.#"#"&'7) "*<1 qc9U$=(b'?5"qsM\$K  (;,Wb_G -C3W_!d0@DH???210###5ۣ|0ff'6/@ D`pD@ H J ??3+]10".5332>53X6}'5 !5'u6X>! 0@ MMH@ M$ M+M@0LG @ M   /  ???3??9]]910+]++]+]+]++.'3>7360& !  #-67( r f,0f r T8DN*4x|~@FL<~|x4 LJ> 0f MM M @'M    ??9??//99=//9910++++.'#>7373d7: '/4bg|4-" <4|><~4LX\,$*^ZL 0>@!D     ???9??9/]29=/210>73#5.'3  &*0|7S&8FNP 4`^\2`fH.0f@ M M@ H H @ H @ HHH???/+]++99+]10++!!5>7#5!+H>7 h2>I,Btj`0fH*`l~JfqJ/@_@_@ nrr??]]]]10!#3!q```1J*@v???/+}103#1~mJ/@n@@ Prr??]]]210!53#5!ꠠ``D)@r?29=/9910%''3o~~piz6660@ U??10!!0lsS}?10'՞-SoCT,.S@ $QS0, S@$ H/_oWP"`"""@W(@ M(W ?+?]9/]]+10]72675.#"2#".54>3254&#"'>& "$0:M.jB-I4 6F&0%,6#ATfl  2F,$<**8""0 d >[ [@ @ M @ M@ M@ MS` p  !QQ@ H UW W????+]10++++%4&#"3267#"&'7>32Y+,' 08}:T60d"{0.F/BF @L8\@"  "BZ,5@0@`p!S@ H W W ??+]10]74>32.#"3267#"., BeF*D#1&,;$IX?J2HgB2ZD& b $4BJ f &BZ[ `@@@@ M@ M@ MQSU W W????210+++]q]q]q73267.#"#"&5463257/4$,("b/nq_]0{>P F pt %\@7LS ?  '% Sp& @MO _  U@W0  W ?]?]9/]]+]2]10+74>32!3267#".%4.#"&>P*hlL<%C H-<_C$; "# >^> zv .6h >Zl"$$UV MP@) MP  Q`pOU W U ????]]22]10]+]+#5354>32.#"3#gg!8H&'R!E`pf:J.d&f!> 'G@'LQQ`  )S`"p""(W W% @W?]?9/]]10+32675.#"#"&'732>=#"&54632*-# lo{,Q%@0 * +\_nMZX:= sn h # tgtr?UB@Q@ ` p  QQ@ H W ??????}/+]107>32#54&#"?{*4F*{*! a 9O0B:$V L@. MQp?!! !`!T `pWUX ??/]]]]]10+#"&54632#".=#533267. -- .&C0A'$0$**$&**6L0f*(1OM@ MQ @! H!T @PpUX W??/]+]10+%#"&'73265#5!'#"&546321C'0R&&G"9. -- .9M/h%2Wf$**$%**?UQ@ @`0 Q@ H0p  ??9??/]3+22]]2107>73#.'#7%# .11:8/ ',-||/2/684CHH?=7$X6@!`p? Q P ` p   U W???]]]10#"&5#533267"1UN&%2D f`f0" '@^+@M@M/?_R@M/?OR_) 0@R(W" % W ?3??9/?]]]]+9/]++10_]]2#4.#"#54&#"#>32>W'2 d  dd'L).0J0 ".^l0&  ? - QQ@ H 0W ?]??+10>32#54&#"#?!a98J+{.{ 9O0B:&@S@ M!S W W??+10%#".54>3232654&#" :Q22Q; !;R01Q; -201-2018\B$$B\86\@$$@\8>NN>@LL?= 6@S` p  QQ@ HW  W???+]104&#"#3267#"'#>32X23 (,*}.F./2{"c0mtD>R FF6\@$l = !5@QQ`p#Sp  "UW W ???]]103267"&#"4>32#5#".+,% 33}9U630+{3.F/DBF R:6Z@$  "@\Y !Q @ H W ??+10.#"#>32 !" 2|1gE %*+d61L/@ MS &&@ H&3S .@ H.2!W+W ??99++]10+72654.'.54>32.#"#"&'7-0#8.1L5.OH-/!!:/3R<=V#Ol   0&4* h "2&4& h$0f@@ M 0@M@ Mo ` QP`pW U ?32?]22]^]]++]+10#53573#32>7#".5kk{ "O%+B.@fvf  j (J:; 2@ Q Q @ H   W???]+]10%#".533273"a9:J+{-|$ :P0@@pPM M M L@ L   ???910+++++7.'3>73-]*   }*f-Y&VWR""RWV&Y (@(ж M`('M'г M!@ M@MM M@ M@M M@ M M M M M @ M@MMMM L LM@> M(`$$  ) %`%%*@ M0)(%   ??9/9??]+]9///q33333]310+++++++++++++++++++++++]+%.'#.'3>73>73B   ] i  Y  i '415 324 ,kyDWX&A;77;A&.TSS-[)@     ????//91073#.'#>7'3`}.*" 60}'+/RAB<3P (L/=?>9"/@"Q$ #Q   ???/9]10#"&'73267.'3>7#&+%-7%3$, ,X$  9Byt7(:% g 0!X%TTQ##QST&CW H H@ H@Pp U U????/]/+99++]103!5>7#5!4==664]?NU$fK&PMFf?J$7@"n n%r rr ??9/9=/2107;#"&=4&+5326=46;#"6/ ]hZE66EZh] / D@q*&`NJ*+`+)JN`%+q@DJ9(@_ @??]]]2103#||?J(C@%(@ M@ Mn&n* )r !r r ??9/9=/210++%.=4&+532;#"+5326=46960 \g-=%77%=-g\ 0 D@q*&`&9%*+`+)%9&`%+q@DW@  ?]10%".#"'>3232>7Y/,) [!2#/,( ["18.60M5 #@q ou  ??_]1074>73#4632#"&f 3 33 3c$944 449$$'++''++.`![@2 M #QQS 0P"W" W??29/2]]]]10+746753.#"3267#5..Y`|-/$S@ES<2|0F-T|| h @<<@j ~ *7#5354>32.#"ꔔ MM4K/#?!$$f&&$fH|2f4BZ6b * )۵%@MMȳ M LM@ L@ M M @ L M@ L M@ M@ M M@ M L @/ L@ M@ M P$*+ '*!??9/]]10++++++++++++++++++++27'#"&''7&547'764&#"326,&BS@@TB''BS@@TB$v++++?Q>$,,$>R@  ?Q>"..">R@&((&&(( 0! @E MD# !!  "#D" ss ????9/93222?9=////////99333310+%##5#535#53.'36733#3|e)&! 3$G% #'^,hhT>T"LJH@H|FLL"T>J9$@ n???221073#3#||||`1h4D'@ M @ M=@L5@>LF.F5=/@?@@d((F#F8d0044E&5= =5& g1 g??9////]]]]9910++++732654&'.5467.54632.#"#"&'>54.R$$(!#%-#=. &d^(V#!=()* d[E` 5- 5  &2#$@$,EMb  0q&8-#KG)(S>  @ 0 ??]107"&546323"&54632))**))))$ $$ $$ $$ $1ED MC M?@G L: MO94 M7?(_(((G@AF O 0  <2-<# ??99//]]]]99//9910+]++++%#".54>32.#"3267%4>32#".2>54.#"G%++#  !  &?Q,+R?&&?R+,Q?&8-,98--8#-.$6( %:=\==\==[==[w.D,+C..C+,D.hG +l@ )@M @MM M@* M!`-) 0`p, }(%} ?/]9]]]210+++++%2635.#"2#"&54>3254&#"'> ( +:#N4FQ)6 %2 CF$2  2>*  N 2* [@ _y R_y@ H     ??99=//99=//+]]10'%'A^^?OB]]@(*(*-Y@ yr?10%#5!5!iZfq7!!q6x1ED MDC?@! L:@ M: M4 MK4`M@MM@tM@M@M@ M @M_  @H  ? O  7(?(_(((G@HAF<2-/?<# ?q?99//3+q2/23]]]]+q+2//2/10+++++++]++++]+7#5>32#./32654&#"4>32#".2>54.#"6"97(  9  $  &?Q,+R?&&?R+,Q?&8-,98--8M,*0/%2 J=\==\==[==[w.D,+C..C+,D.l&s?10!!l &Xlo@||  }}?102#"&5462654&#"5&&5BNP@ $4 4$HB@J(} M@ Mp    Rpp@& MpUp  @ L@ / ?  U?]]+]22]+?]]2]2]10++7!!3533##5#(\jjIffqD7@ | }t?9/]992103!5467>54&#"'>32y. /-,.H(F;B(4 R,F& D">qA*@ "@ M!@ M MM@ M@ M(M(M(M( M( M(@O L@L@M L@ L M@M ?,'+($}}, }t?9=////2/]10+]+++++++++++++++++72654+532654#"'>32#"&'7 ?%%'("C $1 $:*E*$IF%! -* O sS}?10%'7s-TCo;87@ Q  Q@ H W ????+22]1032673#"&'#  | ^30{8$, r:  &T|n>@) 0  R 0R r???9/]]107.54>32#.#LM#A\90e&e 0f\V0J4 NTS%#".54>32T6#  #6,. !! .gX@   ??10#"'73254/>73,!!,-# ")$P . ' ? $ H;@!@M @L  yys   ?9/9992210++7535'>733d%&FMRHQ  N&QJFd M M M M MM@M_@  tt?]]10+++++++%#".54>324&#"326.@''@..@''@.f%#"'&##%*B..B**D..D*+11+)002* 8@         ??9///99910%'7'7'7'7;A^^?B]]@*()()3  ӹMM M M ML MM@L M@ L  "@"! !!  !    ?2?2?????99//9]]22ԇ+}10++++++++++%3##5#5>7>73#53#$$P|- "2?Uf 3Ed=?''-+L/3 )k3(,o@A @M M*),+*+,)*)*). |.+-##$-%(-",+ )#" ??????9]Ї+}10++!#&4546?>54#"'>32>73#5%#)  ): '= $"2?Ud=? #&   5(*"  yk 7(5:>@U+ M(@M@M<>=>;<;<;@ |  @=?(? %  ?>= < ;6:,0/ 5). ??2?2????9=///9///932Ї+}10+++32654+532654&#"'>32#"&'3##5#5>73# '; ( 840 *"-3$$P|-  3Wd=z  :  6 ( , @&&.*L0@(j2q2@!0@ 0`p??]]10!'!22}2v$V #"&54632#".=#533267. -- .&C0A'$0$**$&**6L0f*(-T*e@ a,$@,,@ H  0Q@ H+*#   ???99//9??+]22/2/3+]10.'#3>73.7>7 $&rr  *3$:-"  E`$PNBlDDBDJP("RXX& "4,(2X0   U@ @L@L@M 0@@CM@ H!/?Oo`p/O_  ????99?]]]q+]+]q10+++.'#33'>54&'{WK  X  E`4^^b6xj&Z^`,j&.($(F& 1g?10#51OgP9 ?103#"&'52>5_=8# 80.< MQ ?10%#&67mB>,JR<*(/P?10#5/MP^B 7.5467m`9~9`q{8LKKL8UB %4&'7'>&q`9~9`m|8LK犉KL8q7!!q6x/J#3~`zz5E%#".54>3254&#"3267#".54>323275.#"G&A0$9*0.!;+3U?/6 BrS0)FZ2)H6  8 1O9*J7 &5FuYFlI& W$Yol['6W>( "qJ!#3!q```1J3#1~mJ!53#5!ꠠ``?J$7;#"&=4&+5326=46;#"6/ ]hZE66EZh] / D@q*&`NJ*+`+)JN`%+q@D?J(%.=4&+532;#"+5326=46960 \g-=%77%=-g\ 0 D@q*&`&9%*+`+)%9&`%+q@D#|%'7![ee0.rk%#".54>32#3 2##2 3#3((34((46(!!6w(\(!! (\#7'7ee[.0O5 74>73#4632#"&f 3 33 3c$944 449$$'++''++2* '%'A^^?OB]]@(*(*2* %'7'7'7'7;A^^?B]]@*()()E8 23267#".54>5'4632#".Q*"  * <#"#^.9J*#(##3 K (I  ^/7/'#"$',  OT@ ??10/'>l6zgl6zT,pD,p^V#+@$$ !?]10'7#".54>324&#"326R#    8  T8.L" "   mc >@%o0 JJPJJ ??]]]]]107#"&54632#"&54632%3#"!!"""""~Fnb @ J  J ?10%#"&54632'3#3""""~GJx|-@J  JJ J???]10'4632#"&'4632#"&k9x"!!""!!"|*Jx|7@!JJ JJ ???]]10'7#"&54632#"&54632tx8jr!!!!""""B*Js JM@"!H JJJJ ???]+10+7#"&54632#"&54632%77!!!!"""",UW-j4@@4rob> M@ M /?  ?]10++%#".#"'>323267'3#  1    (   \HiF?10%'7i>>Zw}F?10%''7}&\^&9669_w}F@ ??107'77&^\&`8668MW C@*JOOJ@  J J?]?]?]q]]10%4632#"&'4632#"&7'5"!!"""""V-^P5EMW K@2 J//?OOJ@JJ  ?]?]?]]]q107#"&54632#"&54632''7"!!"""""_]-U(E5PMW j@JJ  O _   O_J@P`JJ  ?]?]?]?]q]]]]107#"&54632#"&54632%77"!!"""""%1/'V:.##.N[,<@@&-F #@#`#p##>F7Fp=2W :J(W ??9/]]1074>7.54>3:63#"#".%4.'326 5+$)?,($! #R&  -!;F9Q12Q; :$#5**38:88*4$Z "&hH6Z> 32+3"32654&{CCCC*+)lwxn"Ȗ '300XXcHbVabWGc-+)"`F@  ??107".'732676*\""\*6!0##0 Ii?10'7IGW1Ke &6%'##6733.>7.546323#"&5473g,Z*,W)  y  $'B3N #8=_.в"@<>""><@ " ""(P", <.0e &6%'##6733..546323#"&5473g,Z*,W)  y 3B'$  #8=_.в"@<>""><@0"P("" " <.0e *:%'##6733.%'>7.546323#"&5473g,Z*,W)  y l; " ?0" #8=_.в"@<>""><@," ""(P", <.0e *:%'##6733.%'.546323#"&5473g,Z*,W)  y l;.0> " ! #8=_.в"@<>""><@, R("" " <.0e *:%'##6733.''7>7.546323#"&5473g,Z*,W)  y ~;l! "!@/ #8=_.в"@<>""><@v," ""(P", <.0e *:%'##6733.''7.546323#"&5473g,Z*,W)  y ;l0?!" !| #8=_.в"@<>""><@v, R("" " <.0\= ,>N'##6733.'.#"#>32>7.546323#"&5473g,Z*,W)  y    E'11'   ""] #8=_-ϲ#?=>!!>=?s%5##5%    !- ;.0]= ,>N'##6733.'.#"#>32.546323#"&5473g,Z*,W)  y    E'11'h#"   #8=_-ϲ#?=>!!>=?s%5##5%!     ;.0Ze +###335>7.546323#"&5473{{{ $'B3 #8=_zj" ""(P", <.0fe +###335.546323#"&5473{{{3B'$ & #8=_zj"P("" " <.0e /###335%'>7.546323#"&5473{{{Ll; " ?0r #8=_zj," ""(P", <.0e /###335%'.546323#"&5473{{{Kl;.0> " ! #8=_zj, R("" " <.0e /###335'7>7.546323#"&5473{{{~;l! "!@/R #8=_zj$," ""(P", <.0e /###335'7.546323#"&5473{{{;l0?!" ! #8=_zj$, R("" " <.0= !3C###335.#"#>32>7.546323#"&5473{{{}   E'11'   "" #8=_Rk(%5##5%    !- ;.0= !3C###335.#"#>32.546323#"&5473{{{}   E'11'h#"   c #8=_Rk(%5##5%!     ;.0keG>7.546323#"&54734>323#5>54.#"#53. $'B3 #8=_)BU-,UC)$*Q"5 + + 1&T*%" ""(P", <.0JjDDjJB~323#5>54.#"#53.D3B'$ E #8=_)BU-,UC)$*Q"5 + + 1&T*%"P("" " <.0JjDDjJB~7.546323#"&54734>323#5>54.#"#53.zl; " ?0 #8=_)BU-,UC)$*Q"5 + + 1&T*%," ""(P", <.0JjDDjJB~323#5>54.#"#53.zl;.0> " ! #8=_)BU-,UC)$*Q"5 + + 1&T*%, R("" " <.0JjDDjJB~7.546323#"&54734>323#5>54.#"#53."~;l! "!@/7 #8=_)BU-,UC)$*Q"5 + + 1&T*%V," ""(P", <.0JjDDjJB~323#5>54.#"#53.!;l0?!" ! #8=_)BU-,UC)$*Q"5 + + 1&T*%V, R("" " <.0JjDDjJB~32>7.546323#"&54734>323#5>54.#"#53..   E'11'   "" #8=_)BU-,UC)$*Q"5 + + 1&T*%*%5##5%    !- ;.0KjCCjKB~32.546323#"&54734>323#5>54.#"#53./   E'11'h#"   [ #8=_)BU-,UC)$*Q"5 + + 1&T*%*%5##5%!     ;.0KjCCjKB~""><@ <.0-o ###3353#"&5473{{{" #8=_jb <.0 h73#"&54734>323#5>54.#"#53.* #8=_)BU-,UC)$*Q"5 + + 1&T*%" :.0LjB BjLB~?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`a      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNuni00A0uni00AD SF030000.001i.loclkgreenlandic.casenapostrophe.case caron.alt commaaccentrevcommaaccentcaron.alt.short Parenleft ParenrightHyphenSlashAt Bracketleft Backslash Bracketright Braceleft Braceright GuilsinglleftBulletEndashEmdashGuilsinglright Exclamdown GuillemotleftGuillemotright Questiondown double_grave ring_acutedieresis_macron dot_macrondieresis_gravedieresis_acutedieresis_breve tilde_macron acute.asccircumflex.asc caron.ascdieresis_grave.capdieresis_acute.capdieresis_breve.capafii10066.locltengeroublekratka tonos.cap uni1F88.alt uni1F89.alt uni1F8A.alt uni1F8B.alt uni1F8C.alt uni1F8D.alt uni1F8E.alt uni1F8F.alt uni1F98.alt uni1F99.alt uni1F9A.alt uni1F9B.alt uni1F9C.alt uni1F9D.alt uni1F9E.alt uni1F9F.alt uni1FA8.alt uni1FA9.alt uni1FAA.alt uni1FAB.alt uni1FAC.alt uni1FAD.alt uni1FAE.alt uni1FAF.alt uni1FBC.alt uni1FCC.alt uni1FFC.alt SF540000.001 SF530000.001 "}~ *cyrlgrek8latnJBGR MKD SRB "AZE 2CRT BMOL RROM RTRK ^afrc2case:loclBloclHloclNnumrTordnZss02` $,4<DLTHLPTblz6L6L6L {tu l|DR {tutu{  #>?@^`cm}D] <>cyrlgreklatn(!zOcoq-8.15.0/doc/tools/coqrst/notations/Makefile000066400000000000000000000022021417001151100212500ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## ../tests/antlr-notations.html coq-8.15.0/doc/tools/coqrst/notations/TacticNotations.g000066400000000000000000000037301417001151100230750ustar00rootroot00000000000000/************************************************************************/ /* * The Coq Proof Assistant / The Coq Development Team */ /* v * Copyright INRIA, CNRS and contributors */ /* ' | '%||' | '%|||' | '%||||'; // for SSR PIPE: '|'; ATOM: '@' | '_' | ~[@_{}| ]+; ID: '@' ('_'? [a-zA-Z0-9])+; SUB: '_' '_' [a-zA-Z0-9]+; WHITESPACE: ' '+; coq-8.15.0/doc/tools/coqrst/notations/TacticNotations.tokens000066400000000000000000000001541417001151100241470ustar00rootroot00000000000000LALT=1 LGROUP=2 LBRACE=3 RBRACE=4 ESCAPED=5 PIPE=6 ATOM=7 ID=8 SUB=9 WHITESPACE=10 '{|'=1 '{'=3 '}'=4 '|'=6 coq-8.15.0/doc/tools/coqrst/notations/TacticNotationsLexer.py000066400000000000000000000075711417001151100243060ustar00rootroot00000000000000# Generated from TacticNotations.g by ANTLR 4.7.2 from antlr4 import * from io import StringIO from typing.io import TextIO import sys def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f") buf.write("f\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\3\3\3") buf.write("\3\3\3\3\3\3\3\3\5\3!\n\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6") buf.write("\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3") buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6") buf.write("\3\6\5\6F\n\6\3\7\3\7\3\b\3\b\6\bL\n\b\r\b\16\bM\5\bP") buf.write("\n\b\3\t\3\t\5\tT\n\t\3\t\6\tW\n\t\r\t\16\tX\3\n\3\n\3") buf.write("\n\6\n^\n\n\r\n\16\n_\3\13\6\13c\n\13\r\13\16\13d\2\2") buf.write("\f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\3\2\5") buf.write("\4\2BBaa\6\2\"\"BBaa}\177\5\2\62;C\\c|\2v\2\3\3\2\2\2") buf.write("\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r") buf.write("\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3") buf.write("\2\2\2\3\27\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2") buf.write("\13E\3\2\2\2\rG\3\2\2\2\17O\3\2\2\2\21Q\3\2\2\2\23Z\3") buf.write("\2\2\2\25b\3\2\2\2\27\30\7}\2\2\30\31\7~\2\2\31\4\3\2") buf.write("\2\2\32\33\7}\2\2\33!\7-\2\2\34\35\7}\2\2\35!\7,\2\2\36") buf.write("\37\7}\2\2\37!\7A\2\2 \32\3\2\2\2 \34\3\2\2\2 \36\3\2") buf.write("\2\2!\6\3\2\2\2\"#\7}\2\2#\b\3\2\2\2$%\7\177\2\2%\n\3") buf.write("\2\2\2&\'\7\'\2\2\'F\7}\2\2()\7\'\2\2)F\7\177\2\2*+\7") buf.write("\'\2\2+F\7~\2\2,-\7b\2\2-.\7\'\2\2.F\7}\2\2/\60\7B\2\2") buf.write("\60\61\7\'\2\2\61F\7}\2\2\62\63\7\'\2\2\63\64\7~\2\2\64") buf.write("F\7/\2\2\65\66\7\'\2\2\66\67\7~\2\2\678\7/\2\28F\7@\2") buf.write("\29:\7\'\2\2:;\7~\2\2;F\7~\2\2<=\7\'\2\2=>\7~\2\2>?\7") buf.write("~\2\2?F\7~\2\2@A\7\'\2\2AB\7~\2\2BC\7~\2\2CD\7~\2\2DF") buf.write("\7~\2\2E&\3\2\2\2E(\3\2\2\2E*\3\2\2\2E,\3\2\2\2E/\3\2") buf.write("\2\2E\62\3\2\2\2E\65\3\2\2\2E9\3\2\2\2E<\3\2\2\2E@\3\2") buf.write("\2\2F\f\3\2\2\2GH\7~\2\2H\16\3\2\2\2IP\t\2\2\2JL\n\3\2") buf.write("\2KJ\3\2\2\2LM\3\2\2\2MK\3\2\2\2MN\3\2\2\2NP\3\2\2\2O") buf.write("I\3\2\2\2OK\3\2\2\2P\20\3\2\2\2QV\7B\2\2RT\7a\2\2SR\3") buf.write("\2\2\2ST\3\2\2\2TU\3\2\2\2UW\t\4\2\2VS\3\2\2\2WX\3\2\2") buf.write("\2XV\3\2\2\2XY\3\2\2\2Y\22\3\2\2\2Z[\7a\2\2[]\7a\2\2\\") buf.write("^\t\4\2\2]\\\3\2\2\2^_\3\2\2\2_]\3\2\2\2_`\3\2\2\2`\24") buf.write("\3\2\2\2ac\7\"\2\2ba\3\2\2\2cd\3\2\2\2db\3\2\2\2de\3\2") buf.write("\2\2e\26\3\2\2\2\13\2 EMOSX_d\2") return buf.getvalue() class TacticNotationsLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] LALT = 1 LGROUP = 2 LBRACE = 3 RBRACE = 4 ESCAPED = 5 PIPE = 6 ATOM = 7 ID = 8 SUB = 9 WHITESPACE = 10 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] modeNames = [ "DEFAULT_MODE" ] literalNames = [ "", "'{|'", "'{'", "'}'", "'|'" ] symbolicNames = [ "", "LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE", "ATOM", "ID", "SUB", "WHITESPACE" ] ruleNames = [ "LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE", "ATOM", "ID", "SUB", "WHITESPACE" ] grammarFileName = "TacticNotations.g" def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("4.7.2") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None coq-8.15.0/doc/tools/coqrst/notations/TacticNotationsLexer.tokens000066400000000000000000000001541417001151100251470ustar00rootroot00000000000000LALT=1 LGROUP=2 LBRACE=3 RBRACE=4 ESCAPED=5 PIPE=6 ATOM=7 ID=8 SUB=9 WHITESPACE=10 '{|'=1 '{'=3 '}'=4 '|'=6 coq-8.15.0/doc/tools/coqrst/notations/TacticNotationsParser.py000066400000000000000000001020201417001151100244440ustar00rootroot00000000000000# Generated from TacticNotations.g by ANTLR 4.7.2 # encoding: utf-8 from antlr4 import * from io import StringIO from typing.io import TextIO import sys def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\f") buf.write("\u0081\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") buf.write("\t\16\4\17\t\17\4\20\t\20\3\2\3\2\3\2\3\3\3\3\5\3&\n\3") buf.write("\3\3\7\3)\n\3\f\3\16\3,\13\3\3\4\3\4\5\4\60\n\4\3\5\3") buf.write("\5\3\5\3\5\3\5\3\5\5\58\n\5\3\6\3\6\5\6<\n\6\3\6\3\6\5") buf.write("\6@\n\6\3\6\3\6\3\7\3\7\5\7F\n\7\3\7\3\7\5\7J\n\7\3\7") buf.write("\3\7\6\7N\n\7\r\7\16\7O\3\b\3\b\5\bT\n\b\3\b\7\bW\n\b") buf.write("\f\b\16\bZ\13\b\3\t\3\t\5\t^\n\t\3\t\3\t\3\t\5\tc\n\t") buf.write("\3\t\3\t\3\n\3\n\5\ni\n\n\3\n\3\n\5\nm\n\n\3\n\3\n\3\13") buf.write("\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\5\17{\n\17\3") buf.write("\20\3\20\5\20\177\n\20\3\20\2\2\21\2\4\6\b\n\f\16\20\22") buf.write("\24\26\30\32\34\36\2\3\3\2\b\t\2\u0086\2 \3\2\2\2\4#\3") buf.write("\2\2\2\6/\3\2\2\2\b\67\3\2\2\2\n9\3\2\2\2\fC\3\2\2\2\16") buf.write("Q\3\2\2\2\20[\3\2\2\2\22f\3\2\2\2\24p\3\2\2\2\26r\3\2") buf.write("\2\2\30t\3\2\2\2\32v\3\2\2\2\34x\3\2\2\2\36|\3\2\2\2 ") buf.write("!\5\4\3\2!\"\7\2\2\3\"\3\3\2\2\2#*\5\6\4\2$&\5\30\r\2") buf.write("%$\3\2\2\2%&\3\2\2\2&\'\3\2\2\2\')\5\6\4\2(%\3\2\2\2)") buf.write(",\3\2\2\2*(\3\2\2\2*+\3\2\2\2+\5\3\2\2\2,*\3\2\2\2-\60") buf.write("\5\24\13\2.\60\5\b\5\2/-\3\2\2\2/.\3\2\2\2\60\7\3\2\2") buf.write("\2\618\5\34\17\2\628\5\32\16\2\638\5\36\20\2\648\5\n\6") buf.write("\2\658\5\20\t\2\668\5\22\n\2\67\61\3\2\2\2\67\62\3\2\2") buf.write("\2\67\63\3\2\2\2\67\64\3\2\2\2\67\65\3\2\2\2\67\66\3\2") buf.write("\2\28\t\3\2\2\29;\7\3\2\2:<\7\f\2\2;:\3\2\2\2;<\3\2\2") buf.write("\2<=\3\2\2\2=?\5\f\7\2>@\7\f\2\2?>\3\2\2\2?@\3\2\2\2@") buf.write("A\3\2\2\2AB\7\6\2\2B\13\3\2\2\2CM\5\16\b\2DF\7\f\2\2E") buf.write("D\3\2\2\2EF\3\2\2\2FG\3\2\2\2GI\5\26\f\2HJ\7\f\2\2IH\3") buf.write("\2\2\2IJ\3\2\2\2JK\3\2\2\2KL\5\16\b\2LN\3\2\2\2ME\3\2") buf.write("\2\2NO\3\2\2\2OM\3\2\2\2OP\3\2\2\2P\r\3\2\2\2QX\5\b\5") buf.write("\2RT\5\30\r\2SR\3\2\2\2ST\3\2\2\2TU\3\2\2\2UW\5\b\5\2") buf.write("VS\3\2\2\2WZ\3\2\2\2XV\3\2\2\2XY\3\2\2\2Y\17\3\2\2\2Z") buf.write("X\3\2\2\2[]\7\4\2\2\\^\t\2\2\2]\\\3\2\2\2]^\3\2\2\2^_") buf.write("\3\2\2\2_`\7\f\2\2`b\5\4\3\2ac\7\f\2\2ba\3\2\2\2bc\3\2") buf.write("\2\2cd\3\2\2\2de\7\6\2\2e\21\3\2\2\2fh\7\5\2\2gi\5\30") buf.write("\r\2hg\3\2\2\2hi\3\2\2\2ij\3\2\2\2jl\5\4\3\2km\5\30\r") buf.write("\2lk\3\2\2\2lm\3\2\2\2mn\3\2\2\2no\7\6\2\2o\23\3\2\2\2") buf.write("pq\7\b\2\2q\25\3\2\2\2rs\7\b\2\2s\27\3\2\2\2tu\7\f\2\2") buf.write("u\31\3\2\2\2vw\7\7\2\2w\33\3\2\2\2xz\7\t\2\2y{\7\13\2") buf.write("\2zy\3\2\2\2z{\3\2\2\2{\35\3\2\2\2|~\7\n\2\2}\177\7\13") buf.write("\2\2~}\3\2\2\2~\177\3\2\2\2\177\37\3\2\2\2\23%*/\67;?") buf.write("EIOSX]bhlz~") return buf.getvalue() class TacticNotationsParser ( Parser ): grammarFileName = "TacticNotations.g" atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] sharedContextCache = PredictionContextCache() literalNames = [ "", "'{|'", "", "'{'", "'}'", "", "'|'" ] symbolicNames = [ "", "LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE", "ATOM", "ID", "SUB", "WHITESPACE" ] RULE_top = 0 RULE_blocks = 1 RULE_block = 2 RULE_nopipeblock = 3 RULE_alternative = 4 RULE_altblocks = 5 RULE_altblock = 6 RULE_repeat = 7 RULE_curlies = 8 RULE_pipe = 9 RULE_altsep = 10 RULE_whitespace = 11 RULE_escaped = 12 RULE_atomic = 13 RULE_hole = 14 ruleNames = [ "top", "blocks", "block", "nopipeblock", "alternative", "altblocks", "altblock", "repeat", "curlies", "pipe", "altsep", "whitespace", "escaped", "atomic", "hole" ] EOF = Token.EOF LALT=1 LGROUP=2 LBRACE=3 RBRACE=4 ESCAPED=5 PIPE=6 ATOM=7 ID=8 SUB=9 WHITESPACE=10 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("4.7.2") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None class TopContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def blocks(self): return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) def EOF(self): return self.getToken(TacticNotationsParser.EOF, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_top def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitTop" ): return visitor.visitTop(self) else: return visitor.visitChildren(self) def top(self): localctx = TacticNotationsParser.TopContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_top) try: self.enterOuterAlt(localctx, 1) self.state = 30 self.blocks() self.state = 31 self.match(TacticNotationsParser.EOF) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class BlocksContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def block(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.BlockContext) else: return self.getTypedRuleContext(TacticNotationsParser.BlockContext,i) def whitespace(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) else: return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) def getRuleIndex(self): return TacticNotationsParser.RULE_blocks def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitBlocks" ): return visitor.visitBlocks(self) else: return visitor.visitChildren(self) def blocks(self): localctx = TacticNotationsParser.BlocksContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_blocks) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 33 self.block() self.state = 40 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,1,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: self.state = 35 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 34 self.whitespace() self.state = 37 self.block() self.state = 42 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,1,self._ctx) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class BlockContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def pipe(self): return self.getTypedRuleContext(TacticNotationsParser.PipeContext,0) def nopipeblock(self): return self.getTypedRuleContext(TacticNotationsParser.NopipeblockContext,0) def getRuleIndex(self): return TacticNotationsParser.RULE_block def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitBlock" ): return visitor.visitBlock(self) else: return visitor.visitChildren(self) def block(self): localctx = TacticNotationsParser.BlockContext(self, self._ctx, self.state) self.enterRule(localctx, 4, self.RULE_block) try: self.state = 45 self._errHandler.sync(self) token = self._input.LA(1) if token in [TacticNotationsParser.PIPE]: self.enterOuterAlt(localctx, 1) self.state = 43 self.pipe() pass elif token in [TacticNotationsParser.LALT, TacticNotationsParser.LGROUP, TacticNotationsParser.LBRACE, TacticNotationsParser.ESCAPED, TacticNotationsParser.ATOM, TacticNotationsParser.ID]: self.enterOuterAlt(localctx, 2) self.state = 44 self.nopipeblock() pass else: raise NoViableAltException(self) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class NopipeblockContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def atomic(self): return self.getTypedRuleContext(TacticNotationsParser.AtomicContext,0) def escaped(self): return self.getTypedRuleContext(TacticNotationsParser.EscapedContext,0) def hole(self): return self.getTypedRuleContext(TacticNotationsParser.HoleContext,0) def alternative(self): return self.getTypedRuleContext(TacticNotationsParser.AlternativeContext,0) def repeat(self): return self.getTypedRuleContext(TacticNotationsParser.RepeatContext,0) def curlies(self): return self.getTypedRuleContext(TacticNotationsParser.CurliesContext,0) def getRuleIndex(self): return TacticNotationsParser.RULE_nopipeblock def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitNopipeblock" ): return visitor.visitNopipeblock(self) else: return visitor.visitChildren(self) def nopipeblock(self): localctx = TacticNotationsParser.NopipeblockContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_nopipeblock) try: self.state = 53 self._errHandler.sync(self) token = self._input.LA(1) if token in [TacticNotationsParser.ATOM]: self.enterOuterAlt(localctx, 1) self.state = 47 self.atomic() pass elif token in [TacticNotationsParser.ESCAPED]: self.enterOuterAlt(localctx, 2) self.state = 48 self.escaped() pass elif token in [TacticNotationsParser.ID]: self.enterOuterAlt(localctx, 3) self.state = 49 self.hole() pass elif token in [TacticNotationsParser.LALT]: self.enterOuterAlt(localctx, 4) self.state = 50 self.alternative() pass elif token in [TacticNotationsParser.LGROUP]: self.enterOuterAlt(localctx, 5) self.state = 51 self.repeat() pass elif token in [TacticNotationsParser.LBRACE]: self.enterOuterAlt(localctx, 6) self.state = 52 self.curlies() pass else: raise NoViableAltException(self) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class AlternativeContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def LALT(self): return self.getToken(TacticNotationsParser.LALT, 0) def altblocks(self): return self.getTypedRuleContext(TacticNotationsParser.AltblocksContext,0) def RBRACE(self): return self.getToken(TacticNotationsParser.RBRACE, 0) def WHITESPACE(self, i:int=None): if i is None: return self.getTokens(TacticNotationsParser.WHITESPACE) else: return self.getToken(TacticNotationsParser.WHITESPACE, i) def getRuleIndex(self): return TacticNotationsParser.RULE_alternative def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitAlternative" ): return visitor.visitAlternative(self) else: return visitor.visitChildren(self) def alternative(self): localctx = TacticNotationsParser.AlternativeContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_alternative) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 55 self.match(TacticNotationsParser.LALT) self.state = 57 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 56 self.match(TacticNotationsParser.WHITESPACE) self.state = 59 self.altblocks() self.state = 61 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 60 self.match(TacticNotationsParser.WHITESPACE) self.state = 63 self.match(TacticNotationsParser.RBRACE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class AltblocksContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def altblock(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.AltblockContext) else: return self.getTypedRuleContext(TacticNotationsParser.AltblockContext,i) def altsep(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.AltsepContext) else: return self.getTypedRuleContext(TacticNotationsParser.AltsepContext,i) def WHITESPACE(self, i:int=None): if i is None: return self.getTokens(TacticNotationsParser.WHITESPACE) else: return self.getToken(TacticNotationsParser.WHITESPACE, i) def getRuleIndex(self): return TacticNotationsParser.RULE_altblocks def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitAltblocks" ): return visitor.visitAltblocks(self) else: return visitor.visitChildren(self) def altblocks(self): localctx = TacticNotationsParser.AltblocksContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_altblocks) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 65 self.altblock() self.state = 75 self._errHandler.sync(self) _alt = 1 while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt == 1: self.state = 67 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 66 self.match(TacticNotationsParser.WHITESPACE) self.state = 69 self.altsep() self.state = 71 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 70 self.match(TacticNotationsParser.WHITESPACE) self.state = 73 self.altblock() else: raise NoViableAltException(self) self.state = 77 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,8,self._ctx) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class AltblockContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def nopipeblock(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.NopipeblockContext) else: return self.getTypedRuleContext(TacticNotationsParser.NopipeblockContext,i) def whitespace(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) else: return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) def getRuleIndex(self): return TacticNotationsParser.RULE_altblock def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitAltblock" ): return visitor.visitAltblock(self) else: return visitor.visitChildren(self) def altblock(self): localctx = TacticNotationsParser.AltblockContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_altblock) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 79 self.nopipeblock() self.state = 86 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: self.state = 81 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 80 self.whitespace() self.state = 83 self.nopipeblock() self.state = 88 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class RepeatContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def LGROUP(self): return self.getToken(TacticNotationsParser.LGROUP, 0) def WHITESPACE(self, i:int=None): if i is None: return self.getTokens(TacticNotationsParser.WHITESPACE) else: return self.getToken(TacticNotationsParser.WHITESPACE, i) def blocks(self): return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) def RBRACE(self): return self.getToken(TacticNotationsParser.RBRACE, 0) def ATOM(self): return self.getToken(TacticNotationsParser.ATOM, 0) def PIPE(self): return self.getToken(TacticNotationsParser.PIPE, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_repeat def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitRepeat" ): return visitor.visitRepeat(self) else: return visitor.visitChildren(self) def repeat(self): localctx = TacticNotationsParser.RepeatContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_repeat) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 89 self.match(TacticNotationsParser.LGROUP) self.state = 91 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.PIPE or _la==TacticNotationsParser.ATOM: self.state = 90 _la = self._input.LA(1) if not(_la==TacticNotationsParser.PIPE or _la==TacticNotationsParser.ATOM): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() self.state = 93 self.match(TacticNotationsParser.WHITESPACE) self.state = 94 self.blocks() self.state = 96 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 95 self.match(TacticNotationsParser.WHITESPACE) self.state = 98 self.match(TacticNotationsParser.RBRACE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class CurliesContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def LBRACE(self): return self.getToken(TacticNotationsParser.LBRACE, 0) def blocks(self): return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) def RBRACE(self): return self.getToken(TacticNotationsParser.RBRACE, 0) def whitespace(self, i:int=None): if i is None: return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) else: return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) def getRuleIndex(self): return TacticNotationsParser.RULE_curlies def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitCurlies" ): return visitor.visitCurlies(self) else: return visitor.visitChildren(self) def curlies(self): localctx = TacticNotationsParser.CurliesContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_curlies) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 100 self.match(TacticNotationsParser.LBRACE) self.state = 102 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 101 self.whitespace() self.state = 104 self.blocks() self.state = 106 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.WHITESPACE: self.state = 105 self.whitespace() self.state = 108 self.match(TacticNotationsParser.RBRACE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class PipeContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def PIPE(self): return self.getToken(TacticNotationsParser.PIPE, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_pipe def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitPipe" ): return visitor.visitPipe(self) else: return visitor.visitChildren(self) def pipe(self): localctx = TacticNotationsParser.PipeContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_pipe) try: self.enterOuterAlt(localctx, 1) self.state = 110 self.match(TacticNotationsParser.PIPE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class AltsepContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def PIPE(self): return self.getToken(TacticNotationsParser.PIPE, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_altsep def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitAltsep" ): return visitor.visitAltsep(self) else: return visitor.visitChildren(self) def altsep(self): localctx = TacticNotationsParser.AltsepContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_altsep) try: self.enterOuterAlt(localctx, 1) self.state = 112 self.match(TacticNotationsParser.PIPE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class WhitespaceContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def WHITESPACE(self): return self.getToken(TacticNotationsParser.WHITESPACE, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_whitespace def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitWhitespace" ): return visitor.visitWhitespace(self) else: return visitor.visitChildren(self) def whitespace(self): localctx = TacticNotationsParser.WhitespaceContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_whitespace) try: self.enterOuterAlt(localctx, 1) self.state = 114 self.match(TacticNotationsParser.WHITESPACE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class EscapedContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def ESCAPED(self): return self.getToken(TacticNotationsParser.ESCAPED, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_escaped def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitEscaped" ): return visitor.visitEscaped(self) else: return visitor.visitChildren(self) def escaped(self): localctx = TacticNotationsParser.EscapedContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_escaped) try: self.enterOuterAlt(localctx, 1) self.state = 116 self.match(TacticNotationsParser.ESCAPED) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class AtomicContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def ATOM(self): return self.getToken(TacticNotationsParser.ATOM, 0) def SUB(self): return self.getToken(TacticNotationsParser.SUB, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_atomic def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitAtomic" ): return visitor.visitAtomic(self) else: return visitor.visitChildren(self) def atomic(self): localctx = TacticNotationsParser.AtomicContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_atomic) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 118 self.match(TacticNotationsParser.ATOM) self.state = 120 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.SUB: self.state = 119 self.match(TacticNotationsParser.SUB) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx class HoleContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser def ID(self): return self.getToken(TacticNotationsParser.ID, 0) def SUB(self): return self.getToken(TacticNotationsParser.SUB, 0) def getRuleIndex(self): return TacticNotationsParser.RULE_hole def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitHole" ): return visitor.visitHole(self) else: return visitor.visitChildren(self) def hole(self): localctx = TacticNotationsParser.HoleContext(self, self._ctx, self.state) self.enterRule(localctx, 28, self.RULE_hole) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) self.state = 122 self.match(TacticNotationsParser.ID) self.state = 124 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TacticNotationsParser.SUB: self.state = 123 self.match(TacticNotationsParser.SUB) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx coq-8.15.0/doc/tools/coqrst/notations/TacticNotationsVisitor.py000066400000000000000000000060341417001151100246570ustar00rootroot00000000000000# Generated from TacticNotations.g by ANTLR 4.7.2 from antlr4 import * if __name__ is not None and "." in __name__: from .TacticNotationsParser import TacticNotationsParser else: from TacticNotationsParser import TacticNotationsParser # This class defines a complete generic visitor for a parse tree produced by TacticNotationsParser. class TacticNotationsVisitor(ParseTreeVisitor): # Visit a parse tree produced by TacticNotationsParser#top. def visitTop(self, ctx:TacticNotationsParser.TopContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#blocks. def visitBlocks(self, ctx:TacticNotationsParser.BlocksContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#block. def visitBlock(self, ctx:TacticNotationsParser.BlockContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#nopipeblock. def visitNopipeblock(self, ctx:TacticNotationsParser.NopipeblockContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#alternative. def visitAlternative(self, ctx:TacticNotationsParser.AlternativeContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#altblocks. def visitAltblocks(self, ctx:TacticNotationsParser.AltblocksContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#altblock. def visitAltblock(self, ctx:TacticNotationsParser.AltblockContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#repeat. def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#curlies. def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#pipe. def visitPipe(self, ctx:TacticNotationsParser.PipeContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#altsep. def visitAltsep(self, ctx:TacticNotationsParser.AltsepContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#whitespace. def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#escaped. def visitEscaped(self, ctx:TacticNotationsParser.EscapedContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#atomic. def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): return self.visitChildren(ctx) # Visit a parse tree produced by TacticNotationsParser#hole. def visitHole(self, ctx:TacticNotationsParser.HoleContext): return self.visitChildren(ctx) del TacticNotationsParser coq-8.15.0/doc/tools/coqrst/notations/UbuntuMono-B.ttf000066400000000000000000005656501417001151100226460ustar00rootroot00000000000000DSIGyacx0GSUBZ<)V[[TUj/Y2  P [DAMA [1> V 2W"^U.(~q/#0+)#.1&-v-((E 66??-6*-G.66('  .q1m,>,$!?$1?$??Y6$; C??~ >:p ()r6Z6 .& 1Sh2-qll(qq;J2 E ????6666.<'''' 6,,,,,,,$$$$(?(;;;;? , , ,,,,,6 ?????!!!!-76$6$6$6$6$$7*1-?-?G$G$G$G$$.?.?.? .?6Y6#6Y(6(6(6$$$';';';';';'; .C.C.C(6$1pjiC   fYqbkjfqqYqbkj5    ( ( 1(,,fqqYqbkjfqqYqbkj^Uq/q1m??r622E6?(?  6-?;??6(6 $ $') ' .C(6-$5?\ -7 ,6$';';';';'; , , !!-71 !6.? ,  , ,<?6$6$,O6Y';';"K-7-.C ,? $$ '$C7< 6*1 iOmJJJowwMMM??$(66*.-.- 66$? ..---6 --"6*,(DY3???;??,&?+ "6Y'6$$1.???.?6?$Y$-? 3-?-;-;$$,&   -+-6? 6 -?-;-;-+6 , , ? 3.?.?*-+$Y""` 6$ ?.-6- . -6   (6@6?$?;5??-$3{jm6Z (@?$$$33&&((((((((6)wx@@@@@@k{????????Zf$$$$$$$$d`}`33333333rkG~((@@??$$33((((((((..????????Q]kG~((((((( g i^?????9B'1@Nn$$$$66%OHn33&&??33 7'W&&D3 \]ZfkG~ - .9[      !"#$%& ' (")"*$+%,%-&.'/(0)1)2*3+4+5-6.7.8/90:1;2<2=3>4?5@6A6B7C8D9E:F;G;H<I=J>K?L@MANAOBPCQDRDSFTFUGVHWIXIYJZK[L\M]N^N_O`PaQbRcSdSeTfUgVhWiXjXkYlZm[n\o\p^q^r_s`tauavbwdxeyezf{f|g}h~jjkklmnnppqrssuvvwxxyz{|}}|`b ~17HQS_aw67O_cuEMWY[]}    " & 0 : D p y !!!"!&!.!^""""""""+"H"`"e%%% %%%%%$%,%4%<%l%%%  28IRT`bx78br HPY[]_    & 0 9 D p t !!!"!&!.!S""""""""+"H"`"d%%% %%%%%$%,%4%<%P%%% YXAT&S;;z@:*߶$mtTxSߘߕߍߋ߈߅y]FC߼ߵ߯߂z# mykwzRSTUVWXi_abcduestnocpqeghrlx    !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`ag}~rv|df{mystpqnoznblxhcej iuabced_@?XUTSRQPONMLKJIHGFEDCBA@?>=<;:98765/.-,(&%$#" ,E#F` &`&#HH-,E#F#a &a&#HH-,E#F` a F`&#HH-,E#F#a ` &a a&#HH-,E#F`@a f`&#HH-,E#F#a@` &a@a&#HH-, <<-, E# D# ZQX# D#Y QX# MD#Y &QX# D#Y!!-, EhD ` EFvhE`D-, C#Ce -, C#C -,(#p(>(#p(E: -, E%EadPQXED!!Y-, EC`D-,CCe -, i@a ,b`+ d#da\XaY-,E+)#D)z-,Ee,#DE+#D-,KRXED!!Y-,%# `#-,%# a#-,%-, ` <<-, a <<-,CC -,!! d#d@b-,!QX d#d b@/+Y`-,!QX d#dUb/+Y`-, d#d@b`#!-,E#E`#E`#E`#vhb -,&&%%E#E &`bch &ae#DD-, ETX@D E@aD!!Y-,E0/E#Ea``iD-,KQX/#p#B!!Y-,KQX %EiSXD!!Y!!Y-,EC`c`iD-,/ED-,E# E`D-,E#E`D-,K#QX34 34YDD-,CX&EXdf`d `f X!@YaY#XeY)#D#)!!!!!Y-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%% F%`#B< XY%%)%%)%% XY%%CH%%`CH!Y!!!!!!!-,CX%Ed `f X!@Ya#XeY)#D%% XY%% F%#B<%%%% F%`#B< XY%%)) EeD%%)%% XY%%CH%%%%`CH!Y!!!!!!!-,% F%#B%%EH!!!!-,% %%CH!!!-,E# E P X#e#Y#h @PX!@Y#XeY`D-,KS#KQZX E`D!!Y-,KTX E`D!!Y-,KS#KQZX8!!Y-,KTX8!!Y-,CTXF+!!!!Y-,CTXG+!!!Y-,CTXH+!!!!Y-,CTXI+!!!Y-, #KSKQZX#8!!Y-, IQX@# 84!!Y-,F#F`#Fa#  Fab@@pE`h:-, #Id#SX<!Y-,KRX}zY-,KKTB-,B#Q@SZX TXBYY-,Eh#KQX# E d@PX|Yh`YD-,%%#>#> #eB #B#?#? #eB#B-@vn)uodtrdrC)qodpndoBnBia)gedeC)dBcadaBYQ)XBWUdUC)TBSQdRBQBLD)JHdHC)FDdDBCA/B?B* * U*U*U * U*U*U *U*UTSKRKP[%S@QZUZ[XYBKSXYKSXBY++++++++t+++s++++++++++++++++++++++++++++kk [[||f|P}fPkr{d}fk{}ofk{}fTkvfPP<<::dd_@~rU*W!Gxo{9jZ\.r+z4Y*CrrQY\\\\X  |$ 8 P |  \  ,Tx t|`(X(  !H""#4#$&l'$'(L(()<)))*+,,-.P//0H012334 45<567@78@9:t;;<@<=8=>??@XA AB4BDDEEFHFGLGHLHxHIJJK<L\LLMMNOPQR\RTUUUUWWX<XYZ[ [\\,\] ]^@_`abbcc0c\cdeffg g<gpgggh0hi iPi|iijjkkllHl|lm<nTn|nnno$oPpqqqr$rTrrrs t t4t`tttuuvvvw w<wdxxDxpxxxyzz{${P{x{{{| |P}}}}~~L~t~~$Lx@lHtp(@hPP\,XPp H|,T,TDpx Lt 4HtHx,\ H 4HTx`H<t`\Ĉ,Ȍ@D Ӏ`0հ֘הt0ٰ,ڼۤ|܌|݌ݜݬݼ ,@Th|ސޤ޸ hߤ 0@P`pT4(<P@Hh8LlLHp@Xld<0,P     | 8 t<L|Hd(THP H !l#l#$%%,%T&H'X'''()p*@*l***+,-. .8.`/4//01d1112202d22233P33344<4h4445$5X55566L6x678D8p99:;<=p>>H>p?D@@BBHCCDD(ELE|EEFGHIJKpLtMhMNPO\PPQ R8S8SHSUUVW@WXY0ZL[d\\l\]L]^8^_T```aa<abcLcxcdpde8eHeXeeflg<ggh hLhhiditijjklmhmmnn$n4nDnnnnop\plpqqrsstpuvvvwxy yyz{||0|}H~,~~(xt\@ hP(X4\ 0dtH(8d,x0lX$<h 4xp\d|T$Pdt@dx@ 08@<@P|h,x\DPt,<Lxl 8lÜìüDtĨx$XƈƼ@tǤ<l4hɘ xTάμ L\lԘՐ@8Hhp۰݌H߈(\0X@p$P4d,`4` <p Lx(X T|(T4h0X0\8h@lHx0\ <p8` 8d0X Hp P@Xp0` P(@l(X4`<h<h0\@h  | , X    ( `     l  t   4`@h<Lx$T|8h4h0d4h0d$HlX$L%%& &D&|&&''l'( (x)))*H*+ ++,`,-(-h.0./T/0<01L1223 3x34T45(56648<;?d?@l22@!0@ 0`p??]]103!'!222vMk @qo u ??10#.553#"&54632Af 3 33 3$954 549$',,'&,,W 6@n n@@ H ??/+]_q10#.55##.55 H B G 1;><<><01;><<><0kjMLO M LM LpM M@ M@M MгM L| L M M@  MML @e My  y!yy  r r??]22/223??22229]9]]]]]]]9]910++++]+++]+++++++]++++]++733733#3##7##7#537#537#fPf>RgzfPf753&&#"#?M& V08,!08,!6'r.GO*0% &'B0%=.r  b %5'4,! a\g " !+:'5-!ky'3' M" M M@ M MMM@BM""""|(|(((|` p  55...|44+@,H+}} p}%}1111??99//]]]]?]?+]]]99//]]]]10+++++]+++3#34632#"&74&#"3264632#"&74&#"326[YY<45<<54<p<36<<63<  k#LOOLLOOL*$$**""zLOOLLPPL*##**""y)5A¹?@ M%@ M"@ M M @ M9@ M. M. M-@ L9-  &C""y#C?<_<3267'&&'3266654&#"m "H#-C-11.96*:>,f& <#*.f "/>"0W!H'(=(#5#0[ 23F 8`'!F#}#B +"2w-;"|= =nR  @ H   H H ?++]q+_q10#.55= H B;>==>;B^V @ p ??10&&5467m`9~9`q {8LKKL8UV @ p  ??2104&'7'66&q`9~9`m |8LK犉KL8.k@Y@<@M<:.'4"'@M AB/-;4 ' ?9/2+9=/2+10'7>7""&&''7.553>77"#'. ^ #$" %   t   $ "$# ^ v "" C  m  !$"  "$!  o  D #!(&  M@ M@M@M@ M M M@4 M@M@M R`@ P  U ?]]9/]]32]10++++++++++3533##5#(jj9e~g[@z {??10>7&&54632~%!&*3'*/1N8I 4 '280#F<,q=?10!!q=yT_M@He L H M HjM@Hez {?10]++]++++]++%#".54>32T6#  #6O,. !! ./[-@v??/+}10#3~`#y #U@4d d%d$!!!@e e ??9/]]]/9/]]]]10#"&546324&#"326'#"&54632niinpgino5326623507joojkoou''((0k/@@ M a  ee ??9310+7'>733!5H(882T{fZ f "&ff+y%&@c ' c&g! e ??210!!&654>54&#"'6632(2 " l%9A9%.#@=(i4&F6  ;84 !#$f 2TG=84&)!V*(,C)y0- M, M+ M M M @* M L)c&, c,,21g!))e! g ?2?9/9=/2]310+++++++".'732654&##532>54&#"'66323/( I3?3N<&.&)$#E,Y84M2*#05:Z  g 6(1'f"'Z%/@%%DN6+H5k0@ ae  ??9/932222231033##5!5>7&%" {CC{A3^L <|z>\=2.% D3$/ +TKf Fh^+I6 f  .o->@&c`p/+cc @ H .e&g% g ??9+]10"32>54&'2#"&54>366( # *$6K00J3hm5bT(L@/ $&  5*$/3f 6H($J>'Z^0i!4' 1k)a c@ H ee ??+103>7!5!$3<@9+@}-f]pP&z+76@ M4 M2@ M0@ M/ M/ M. L, M , M@ M @ M M5 M/ L#M# M#"@ M"2#d M@& H95d9//d&dP82##),g)g ??9/9]/]++910]+]++++++++]++++++++#".5467&&54>324.'326"6654&*-9/3R1 +?'/S D&A4"X9()+$##,7##-{*@@%))c c@  ,&c 0@+e&g% g??9]]//102676654.#"".54>32#'667*  # ,$6L01J2hn4aTP~'I 3*&/2f6G)$K='}Z^1jCHT&/@ P O++4vgT6/-(*@??9=/999910%%-""9lmml(r!@ UU?2210!!!!(\\f]f('.@??9=/9999910%'%%7##ɢlmmlEy01@22q'''1,u" g??]107&54676654&#"'6632#".54>32*"(* <#"#^.9J*#(##3  (I+^.70*%$&',  yy5EZ>M8@M!@M!8 M @ M M@M: M:@ M @ M @M@M@M MM@ M@ LLMML@rL@ M@ M@ M@ M M MP==@>>y 505`55GO6_66y+!G|+F@Py=@APAAy &> = = &}0 }&/]?99//999]]]]9/]]]22]10++++++++++++++++++]++++++++++%#".54>3254&#"3267#".54>323275&&#"G&A0$9*0.!;+3U?/6 BrS0)FZ2)H6  7 1O9*J7 &5FuYFlI& W$Yol['6W>( " k D @' L L   H ????9/9910++!'##6733.g,Z*,W)  y -ϲ#?=>!!>=?6r#0T@'F Fp20D @ H   1#H$$,J J ??9/9=/^]+2]]10%#"&'6632%32>54&#'2654.#"&AY3)X+#T)FX2+$?- (90/0  8K- Y /:,DN)#-'f)$y:`  @H!F J JJJ ??+2]210"&54>32&&#"3267(*Kg=!5( A*7+IS0?Y MxS+ b1R=jq a6r!6@ F#D@ H" HH ??^]+}/]107232>54.#"#"&'6632 *9# 4)(+Li?!?$*J =eF'd 8M.(L:#VyL#\ $Mw?k Q@ 0  D 0 H@MM@ M H H??9/+++]2]103!#3#!?wkfff?k J@/@ M@ D@ M0 H H???9/]+2^]]+103!!3#?{kffy#4@ %F$ D0  !JJ ????9//]10"326753#".54>32&&8,<'FD  {]B:]B$*Hb8$9* @ 9O0oi)QyPOyR) b-k ;@  D  D@ H  H@???9/]?+2210###335{{{kk6k .  D@ H H H?2?2+10353#5!#36fffaf*k.@DH J ???^]]10%#"&'73265#5!5T73^ )27{{0+$ '29@9.#QMCkDDBEKN$NY],Gk, HD@ HH??++10!!3!{| kko M@ M0 M@[ M@ L?@ M? M M L @ M M @ M @ L @ L?  @M L M M@M@ M M L@ L@ M@ M M M? M@1 M@ M@ M/?@ M DD@Mo  ??9/9??9/9|]]+/9=/9999/10+]++]++]+++]++++++++++]++++++++]+]]+++]++>73###>7 f qLZIq kBLN!%OI@QPLA.kS@ @ M @ M?/ ?  DD@ H  ????99+2]210]+]+!&&'#33c*c9oc85-oggek,gf`$}y,@ F!F J J??]10]]32>54.#"4632#"&& '& &}vimrtjms6-O:"";N--O:"";N-6r?@ F`D @ H  HJ??99//?+2]102###>"32654&vx,{-., ' 2;<;ranocck/96-Vv&/@F (F' J J  "J??32/9104632&&'&&732>54.#"vimrZU UJuw QW}& '& &3! XKF-N;"";N--O:"";N6r$Y@D` p  &F`p&!DD@ H%H!!  J?????9/2+]]22102#&&'##>4&#"3265V>!,3&$! =#@{+,(|<1 ?8r2M46X?EE54.'.54632&&#"#"&'7) "*<1 qc9U$=(b'?5"qsM\$KZ  (;,Wb_G -C3W_!dk@DH???210###5ۣ|kff'k/@ D`pD@ H J ??3+]10".5332>53X6}'5 !5'u6X>! k@ MMH@ M$ M+M@0LG @ M   /  ???3??9]]910+]++]+]+]++3.'3>7360& !  #-6E104307( r f,0f r 8DN)4w|;LE?B??EK;~}w4 LJ> kf MM M @'M    ??9??//99=//9910++++!&&'#>7373d7: '/4bg|4-" 3|>;3LX]+$+^YM k>@!D     ???9??9/]29=/210>73#5&&'3  &*0|7S&8PFNP 3`^]1afH.kf@ M M@ H H @ H @ HHH???/+]++99+]10++!!5>7#5!+H>7 h2>I,Btia/fG+_l~Jfq[/@_@_@ nrr??]]]]10!#3!q```1[*@v???/+}103#1~m[/@n@@ Prr??]]]210!53#5!ꠠ``k)@r?29=/9910''3o~~pi5666[@ U??10!!:ks}?10'՞-oCT,.S@ $QS0, S@$ H/_oWP"`"""@W(@ M(W ?+?]9/]]+10]72675&&#"2#".54>3254&#"'66& "$0:M.jB-I4 6F&0%,6#ATXl  1G,%;+)8"#/ c > [@ @ M @ M@ M@ MS` p  !QQ@ H UW W????+]10++++%4&#"3267#"&'76632Y+,' 08}:T60d"{0.F/BF AK8[@#  #AZ,5@0@`p!S@ H W W ??+]10]74>32&&#"3267#"., BeF*D#1&,;$IX?J2HgB3ZC' c %4AJ e &BZ `@@@@ M@ M@ MQSU W W????210+++]q]q]q73267&&#"#"&5463257/4$,("b/nq_]0{?P F qs %\@7LS ?  '% Sp& @MO _  U@W0  W ?]?]9/]]+]2]10+74>32!3267#".%4.#"&>P*hlL<%C H-<_C$; "# >^? {v .5i =Zm#$$V MP@) MP  Q`pOU W U ????]]22]10]+]+3#5354>32&&#"3#gg!8H&'R!Epf9K-d%f!X 'G@'LQQ`  )S`"p""(W W% @W?]?9/]]10+732675&&#"#"&'732>55#"&54632*-# lo{,Q%@0 * +\_nMZ:= sn h # tgtr?B@Q@ ` p  QQ@ H W ??????}/+]10376632#54&#"?{*4F*{*!  9O0B:$ L@. MQp?!! !`!T `pWUX ??/]]]]]10+#"&54632#".55#533267. -- .&C0A'$0]$**$%** 5L1f*) 1VM@ MQ @! H!T @PpUX W??/]+]10+%#"&'73265#5!'#"&546321C'0R&&G"9. -- .9M/h%2Wf$**$%**?Q@ @`0 Q@ H0p  ??9??/]3+22]]210>73#.'#7%# .11:8/ ',-||/2/684CHH?=7$6@!`p? Q P ` p   U W???]]]10%#"&5#533267"1UN&%2 eaf0"'@^+@M@M/?_R@M/?OR_) 0@R(W" % W ?3??9/?]]]]+9/]++10_]]2#4.#"#54&#"#663266W'2 d  dd'L).0I1".^l0&  ?- QQ@ H 0W ?]??+106632#54&#"#?!a98J+{.{ 9O0B:&@S@ M!S W W??+10%#".54>3232654&#" :Q22Q; !;R01Q; -201-2018[B$$B[87[A##A[8?NN??MM?[ 6@S` p  QQ@ HW  W???+]10%4&#"3267#"'#6632X23 (,*}.F./2{"c0mt?Q GE6[A$l [ !5@QQ`p#Sp  "UW W ???]]1073267&&#"4>32#5#".+,% 33}9U630+{3.F/AG Q;6[@$  #@[Y!Q @ H W ??+10.#"#6632 !" 2|1gE %*+Y61L/@ MS &&@ H&3S .@ H.2!W+W ??99++]10+72654.'.54>32&&#"#"&'7-0#8.1L5.OH-/!!:/3R<=V#O]    1%5) h   !3%4& g$`f@@ M 0@M@ Mo ` QP`pW U ?32?]22]^]]++]+10#53573#32>7#".5kk{ "O%+B.pfvf  j (I;;2@ Q Q @ H   W???]+]10%#".533273"a9:J+{-|  ;P0A?qPM M M L@ L   ???910+++++3&&'3>73-]*   }*f-Y&VWR""RWV&Y (@(ж M`('M'г M!@ M@MM M@ M@M M@ M M M M M @ M@MMMM L LM@> M(`$$  ) %`%%*@ M0)(%   ??9/9??]+]9///q33333]310+++++++++++++++++++++++]+!.'#.'3>73>73B   ] i  Y  i '415 324 ,kyDWX&A;77;A&.TSS-[)@     ????//91073#&&'#>7'3`}.*" 60}'+/CAB<3P (L/=?>X"/@"Q$ #Q   ???/9]10#"&'73267&&'3>7#&+%-7%3$, ,X$  Byt7(:% g 0!X%TTQ##QST&CW H H@ H@Pp U U????/]/+99++]103!5>7#5!4==664]|?NU$fK&PMFf?[$7@"n n%r rr ??9/9=/21033#"&554&##5326554633#"6/ ]hZE66EZh] / D@q*&`NJ*+`+)JN`%+q@D[9(@_ @??]]]2103#||?[(C@%(@ M@ Mn&n* )r !r r ??9/9=/210++&&554&##53233#"##53265546960 \g-=%77%=-g\ 0 D@q*&`&9%*+`+)%9&`%+q@Dd@  ?]10%".#"'>3232>7Y/,) [!2#/,( ["1 7.7/w.v M M M@- M 0-)+ c(+/J# )s*-s. J?]2222?2222223210++++6632&&#"3#3#3267#".'#535#5d|Y@"/34  <%>K&*N?- A99gef 4-TT5/ c1O9T?T~g[@ z ?10>7&&54632~%!&*3'*/1N8I 4 '280#F<,V)3@(+&)QR*(U' W U??222210%#"&'73265#5354>32&&#"3#2C'""gg!8H&'R!E9M/f%2Wf9K-d%f p'"@"'('??10>7&&546327>7&&54632 %!!.'&.F.% " .'&.F.M +##3$@6&C +##3$@6&w #I@/ @MO  yO_yO!_!!y$t ???23]]]]]+10"&54632#"&54632#"&54632 )()))()))())>k:@ R @ P   U  ??9/9/]3210#&&55#535331 K mPH{BA|Hff:klMMM@,MR  0@Pr  r_o{??]2222]222222210++++%#&&'#535#53533#3.J nb=l:9m=gffgp@@"@ M M+r~?399=//999910]++''71YY39;EE;z y #'3?K>@M<@M8 M8XM6M6M"@M" @M  MM @M @M@MM$@KL ..(&'M::@F4%L+'&7!7=II7%$%% C71! ???99//999/99]]]10+]+]+]+]+]+]+]+]+++++++74632#"&74&#"32674632#"&%'%4&#"3264632#"&74&#"326O0.0000.0{ _0/0000/0F h0.0000.0{|CGGCCFFC$ $$ $CGGCCFF,-$ $$ CFFCDFFD$$%(>&6_@ /3+$P4O++4'|8M@ M ??9=/99910++%'7![ee0/q%n@K@ M@ MD/?D/?O_? O _  ' !!S&HHOH J J ???]?]]]]]10++!#"&546323#3#3%27&#" eikboXX|#"ffff6M00M6m@ z ?10#"&54>7m$!&*3'*/1N8 5 '280"G<,m @@ M@ Mz ?10++>7&&54632%!&*3'*/1N8 4 '280#F<,)'&@"(' ??]10#"&54>7#"&54>7$ !!.'&.F.% " .'&.F.n +#$4$@6&C +#$4$@6&'.@" 0(' ??]]10>7&&546327>7&&54632% " .'&.F.% " -'&/E. +#$4$@6%B +#$4$@6%r@   ?]10#".54>32#3 2##2 3#:3((34((469?10!!6w9\9?10!! 9\Z M M M L L M M L M @% M   0 ??]9////9910++++++++++#".#"'>323267";"y '  ek$@V $#$ %//#$$ ?2?]??9/9??]9/9/]qqrqqq]9=/]9999]10]##5#5'#>7373#4.'@:@4':6,/6:*k22t<9,2J<34?>*W$p99 W033W 0W ??]?9/]?9/////]10+++746326632#3267#"&'#"&732654&#"7"34.MB5.1$#-)60#IF^  X ~x! "6X@ %' a %%~yP@?QP@?C%& ,&<@)P!OO+++44[M #@q ou  ??_]104>73#4632#"&f 3 33 3$944 449$$'++''++.j![@2 M #QQS 0P"W" W??29/2]]]]10+46753&&#"3267#5..Y`|-/$S@ES<2|0F-U{{ h ?=<@j } *32&&'"ꔔ MM4K/#?!$$[e&'$fH{3e4CY6 b  *!V)۵%@MMȳ M LM@ L@ M M @ L M@ L M@ M@ M M@ M L @/ L@ M@ M P$*+ '*!??9/]]10++++++++++++++++++++27'#"&''7&547'764&#"326,&BS@@TB''BS@@TB$v++++?Q>$,,$>R@  ?Q>"..">R@&((&&(( k! @E MD# !!  "#D" ss ????9/93222?9=////////99333310+%##5#535#53.'36733#3|e)&! 3$G% #'^hhhT>T"KKG@H|GLK"T>[9$@ n???221073#3#||||`1z4D'@ M @ M=@L5@>LF.F5=/@?@@d((F#F8d0044E&5= =5& g1 g??9////]]]]9910++++732654&'.5467&&54632&&#"#"&'6654.R$$(!#%-#=. &d^(V#!=()* d[E` 5- 5(  &2#$@$,EMb  0q&8-#KG)(S  @ 0 ??]10"&546323"&54632))**))))# $$ ## $$ #1ED MC M?@G L: MO94 M7?(_(((G@AF O 0  <2-<# ??99//]]]]99//9910+]++++%#".54>32&&#"3267%4>32#".2>54.#"G%++#  !  &?Q,+R?&&?R+,Q?&8-,98--8x#-.$6( %:=\==\==[==[w.D,+C..C+,D.hx +l@ )@M @MM M@* M!`-) 0`p, }(%} ?/]9]]]210+++++2675&&#"2#"&54>3254&#"'66 ( +:#N4FQ)6 %2 C_E#3 3>)  O 2. [@ _y R_y@ H     ??99=//99=//+]]10'%'A^^?OB]]@(*(*-*k@ yr?10%#5!5!i*fq=1ED MDC?@! L:@ M: M4 MK4`M@MM@tM@M@M@ M @M_  @H  ? O  7(?(_(((G@HAF<2-/?<# ?q?99//3+q2/23]]]]+q+2//2/10+++++++]++++]+7#56632#&&''32654&#"4>32#".2>54.#"6"97(  9  $  &?Q,+R?&&?R+,Q?&8-,98--8M,*0/%2 J=\==\==[==[w.D,+C..C+,D.l0s?10!!l Wl@||  }}?102#"&5462654&#"5&&5BNP@ $4 3$IAAJ(.} M@ Mp    Rpp@& MpUp  @ L@ / ?  U?]]+]22]+?]]2]2]10++7!!3533##5#(\jjfffqy7@ | }t?9/]992103!54676654&#"'6632y. /-,.H(F; '5 Q,F& E!=qy*@ "@ M!@ M MM@ M@ M(M(M(M( M( M(@O L@L@M L@ L M@M ?,'+($}}, }t?9=////2/]10+]+++++++++++++++++2654##532654#"'6632#"&'7 ?%%'("C $1 $:*E*Z$IF%! -* O s}?10'7s-UTCo;[7@ Q  Q@ H W ????+22]1032673#"&'#  | ^30{#- q:  'S{[r>@) 0  R 0R r???9/]]107&&54>32#&&#LM#A\90e&e 0f[W0J3 OTaVg@   ??10#"'73254''6673,!!,-# ")$P  ' ? $ r;@!@M @L  yys   ?9/9992210++535'66733d%&FMRQ  N&QJyd M M M M MM@M_@  tt?]]10+++++++#".54>324&#"326.@''@..@''@.f%#"'&##%*B..B**D..D*+11+)002. 8@         ??9///99910%'7'7'7'7;A^^?B]]@.()()r  ӹMM M M ML MM@L M@ L  "@"! !!  !    ?2?2?????99//9]]22ԇ+}10++++++++++%3##5#56676673#53#$$P|- "2?Uf 3Ed=?''-+L/3 )kr(,o@A @M M*),+*+,)*)*). |.+-##$-%(-",+ )#" ??????9]Ї+}10++%#&4546776654#"'66326673#5%#)  ): '= $"2?Ud=?? #&   5(*"  yk y(5:>@U+ M(@M@M<>=>;<;<;@ |  @=?(? %  ?>= < ;6:,0/ 5). ??2?2????9=///9///932Ї+}10+++32654##532654&#"'6632#"&'3##5#56673# '; ( 840 *"-3$$P|-  3Wd=  9  6 ) + ?''-+L/@)kEV 2C@ H4)qP!`!p!!3 p@ H3.u$  g??+]+10%3267#".54>5'4632#".Q*"  * <#"#^.9J*#(##3  (I  ^/7/'#"$',   >&$C@  PO++4 >&$@  PO++4 >&$i@  PO++4 +&$u@ % P$O++4 ,&$@+ P#OO+++44 #/"8 M! M M4 MMM M LM M@1 M  0*$*$$*0 1 0H  ???9/32]9///9910++++++++]+++#'##67&&54>323.74&#"326c+V(+W)&+?j  y &ծ' (' 4#?=>!!>=? k@ L@ M L @I L @ M   HH H H ????9/?9/9/]9/////////99910+++++7##>7!#3#3#3[,,.oXX|"EjOfffESVy6M@,2 L2@ M$#3828,F7 72J36 $J#'J/J ????3210++#"'73254''667&&54>32&&#"3267,!!,-# ")$ag*Kg=!5( A*7+IS0?U7 ' ?  MxS+ b1R=jq a?>&(C@  PO++4?>&(@  P O++4?>&(i@  P O++4?,&(@  PO O+++446>&,C@  PO++46>&,@  P O++46>&,i@  P O++46,&,@ PO O+++44r)I@+/!?!!F+)D  *   &JJ ??9////]2]]10#"&'#53566323#232>54.#")Id< =#55*F :`C%LL &4 0%6VyL#f $Mwf 8M.(L:#.+&1u@ PO++4>&2C@ #!P"O++4>&2@ " P!O++4>&2i@ $ P!O++4+&2u@ . P-O++4,&2@"4P,O O+++44<F tM @ M MML@!M       ??99=//9999910]++++++''7'7{F}~G}}E}}E}G|}E~~D}}H}(@ M L@F LF_  @ H *)!"*!""!)*F)"!$J $J????9??9/////9999+]10+++46327#"''7&%4&'32>'&#"vi7'P"&8C@ PO++4'>&8@ PO++4'>&8i@ PO++4',&8@.P&OO+++44 >&<@ PO++46kD@(S Q 0 P  W W_o   ??99//]]22]102###362"32654&wy,{{  ' 2;<;fihjrkYk62036~@-!@ L @ L@ L@ L@ M@ M@ M@ M@' LS $SS 8@ M*Q-7'W2+W???]+10+++++++++#"&'732654&'&&54>54&#"#4>32"&"M^/73+"- &&{0J42E+!+1#"- MRf$; #,%*":3!/P: )5,&DC@ 20P1O++4,&D@ 1/P0O++4,&Di@ 3/P0O++4,&Du@  =/P32&&#"3267###"'73254''67., BeF*D#1&,;$IX?J2 !!,-# ")$ 1F-3ZC' c %4AJ e ' ?  ,>M&HC@ )'P(O++4&H @ (&P'O++4&Hi@ *&P'O++4&H@(:P2O&O+++44$& C@  P O++4$& @  P O++4$& i@  P O++4$& @ ( P O O+++44(#4_ L@. M2S###65*S 5$W'Ws!#/W??9/9932?]10++#".54>32&&''7&&'77&&#"32654& 0P<4M31K3,\D J=-m+ *) 0'3277&#"4&'326 :Q2 8&>)!;R0>2%?*01018[B$ 3/9 U37[A#3/9 R3 M? N;&XC@ P O++4;&X@ P O++4;&Xi@ P O++4;&X@'P O O+++44X&\@ %# P$O++4?[MSM@ H  Q  @ H W  W?????+]22++10%#"'#766324&#"326.F./2{{mt}23 (,*6[A$Fu?Q GX&\@%7 P/O#O+++44 &$@  PO++4,&D@ /0P1O++4 5&$a@ % PO++4,&Da@ 3=P/O++4 Vk ,@@$(!   .-% J  ?3???99?9103.'##6733267#"&54677  y d,Z*,W)  30* #?=>!!>=?0-ϲ : %6,V4CY@5 S)E9Q(E.EAS 0@PD@P`W<< OW# 45W ??99?]9/]]210#"&5467#".54>3254&#"'66323267'2675&&#"30*"-I4 6F&0%,6#AT-:M. & "$0 %5%;+)8"#/ c 1G, l  >&&$@ 1" P!O++4,&F@ !" P!O++4>&&i6@ 9$ P!O++4,&Fi-@ )$ P!O++4.&&b6@ 8"(P O++4,&Fb6@ 1"(P O++4>&&_6@ 9 $P%O++4,&F_6@ 2 $P%O++46>&'_@ "&P'O++4  Q@Q _ 0SH@ H  WW??????++]]107327&&#"#"&5463257#5!$ Y+_bPN(|P?P F qs r#@@#S%S $U!W0  W??]?32??22103##"&546325#53573267&&#"77#`0mo^[0{-3$*'gS qs IS:8?P F?&(@ PO++4&H@ &'P(O++4?5&(a@  P O++4&Ha@ *4P&O++4?.&(b@  P O++4&Hb@ (.P&O++4?Vk [@9  @P`@ M?" D !H@ H H??9/]?]22]+]]103!#3#!3267#"&5467?w  30*!kfff : %:V.9s. L!@>L/S" @;(;9Sp:O_oU///044W !@W +??]?]9/]]2]2]10++#"&5467#".54>32!326732674.#"30*<_C$&>P*hlL<%C  L "#  %3=Z<>^? {v .5g z#$?>&(_@  PO++4&H_@ &*P+O++4>&*i-@ 2($ P%O++4!X&Ji@ ,(! P$)O++45&*a-@ 1(2 P$O++4!X&Ja@ ,6! P$(O++4.&*b-@ 1&, P$O++4!X&Jb@ *0! P$(O++4Vy&* -& P+4!X&J$@ /*-! P$(O++4->&+i@  P O++47=H@LQ Q@ H W ?????+10+376632#54&#"''7?{*4F*{*! &\^&y 9O0B:9669_kc@= 0D @PD 0D  /   H H ????2222]q]222]222]3103##5###5353355#--|{--{kHc@cHHH]]@ M M M @ M @M M @ M @MM@;MHMQ/? Q W0 ???]??9////]2222]]10+++++++++++3#53573#6632#54&#"P77|&1A'{#S:OSB 9O0B:6+&,u@  PO++4$& u@ " P !O++46&,@  PO++4$& @  P O++465&,a@ P O++4$& a@ " P O++46Vk!Y L@- L#D 0@P" H H !????]2]10++#"&5467#53#5!#3#3267N30*!   %:fffaf $V 3R3 M2@& M355 5+Q(*T*42/U ")U+X ???]10++#"&546323267#".5467.55#533267. -- .  ',$-=$$0]$**$%**~ :35K/f+' e6=&,b@ P O++4$; M@ M @ Q  U W??]10++%#".55#533267&C0A'$0 5L1f*) $Lk?@ /?D`@H D J ????]+]]1033%#"&'732>53{M0?$#>$0!  |k>P.b*#7[+M@M&T  ?  -T  @ H ,Q?O_Q@ H,#X)WX ????]+]]+]]]_]10++3#37#"&54632"&'732653#"&54632{{0 // 0- 1 |Qb0 // 0և%))%&))e&#;^X%))%&))*>&-i6@ APO++41V&]i@ ,PO++4-Vk&. ޴$P+4?V&N ִ$P+4-*e@ a,$@,,@ H  0Q@ H+*#   ???99//9??+]22/2/3+]10!.'#3>73&&7>7 $&rr  *3$:-"  E#QMCkDDBDKP'"SXX% !4,)1Y/  ?'@Q   ????9210>73#.'#7%# .11:8/ ',-||/2/684CHH?=7G>&/@ PO++4$=9@ ?P S  P   W???]]]]10%#"&5#533267'7"1UN&%2c eadf-0"l>>ZGVk&/ P+4$V&O- + P+4Gm&/Y JP+4$&O  P+4Gk FT H@ HD 0@X H ??9/]+9/+10#"&54632!3!3 33 3{| h(**((**pk$8 N@0T@P @ P  !Q@ L/P X UW??9/]]+]9/]10#"&54632#"&5#53326783 33 3e"1UN&%2p(**((**| eaf0"k C@% D      H???99//9922]103!5'7377,c|l+$fZ26Z$>@%Q Q  UW???9]22]10%#"&55'75#5373267"1UN7(_@(h&%2 eapS/f R40".>&1@  PO++4?&Q@ PO++4.Vk&1 P+4?V&Q P+4.>&1_@ PO++4?&Q_@ PO++4 @ @L@L@M 0@@CM@ H!/?Oo`p/O_  ????99?]]]q+]+]q10+++!.'#33'6654&'{WK  X  E4_]b7wk'Z^_-k&.(%(G& &Q^6O+.YkF@M/ ?  DD@ HH  ???99?+]10+%&&'#33#"&'73265W(]5oc85-oGN $_`ek,gf`$}WXe" ?[5@ QQ@ HWW ???+]10%4&#"#6632#"&'7325>.{!a98J+IT )1B: 9O0VR_?&2@  !P"O++4&R@  ! P"O++45&2a@ $.P O++4&Ra@ $. P O++4>&2e$@&"$P%O!O+++44&Re$@&"$ P%O!O+++446>&5@ '% P&O++4Y&U@  P O++46Vr&5 ڴ.' P+4#V&U g P+46>&5_@ %) P*O++4Y&U_$@   P O++4(>&6@ 1/+$P0O++46&V@  42.%P3O++4(>&6i@ 3/+$P0O++46&Vi@ 62.%P3O++4(VyF~@L;@ M3@ M) M@ M"FB2H8F,@" H,G @PG36J/=J''/J ???9]+10+++++#"'73254''667&&'732>54.'.54632&&#"!!,-# ")$8E$K6) "*<1 qc9U$=(b'?5"ab ' ?  d  (;,Wb_G -C3Q]6VH@ 4 L3@LM@L2 S C@CPC`CCJ9S*@ H*I @0 HI33/33O66W/ @W%>// ??9/9999]]]]]++]10++++#"'73254''667&&'732654.'.54>32&&#"!!,-# ")$1G#O)-0#8.1L5.OH-/!!:/'?. ' ?  g    1%5) h   !3%/$Vk P@1@ LD!/  !H ??2?99]]]]]10+#"'73254''667##5!##!!,-# ")$ £ ' ?   ff$V`4i1@> M11@1`1116?##$$"%QP`5  50+W4%U" !????32?]]22]2]]10+#"'73254''667.55#53573#32>7W!!,-# ")$/ kk{ @  ' ?  *C2fvf  j >&7_@  P O++4$&W } P+4kc@ @ L @ L L@$ LD  H H ??9/32?222/2/210++++#3##5#535#5ۣ~~|}}kfccf$`%LML@M @ ` p  @ I  ' QP`p@$H&W O_oUO _ o  U ?2]22]2?+]333]+]10++++7#535#53573#3#32>7#".5kkkk{ "O%+B.TPfvfPT  j (I;'+&8u@ (P'O++4;&Xu@ !P O++4'&8@ PO++4;&X@ P O++4'5&8a@ (PO++4;&Xa@ !P O++4':)5`@$*o0@PD`70"/""D@ H6-''3 J ?????]+]]]]]10".5332>53#".54>324&#"326X6w'5 !5'n6X>!24 &' 43;&Xc P+44'>&8e$@&POO+++44;&Xe@P O O+++44'Vk,+@'D). .D-("J ????10%3267#"&5467".5332>53 30*W6}'5 !5'uDc;V'A@Q)!)`)QP`p@ H(  W ????+]]10#"&5467#".533273326730* :J+{-|  %5 ;P0A?q: >&:i@ -) P *O++4 &Zi@ -)%P*O++4 >&<i@ PO++4X&\i@ '# P$O++4.>&=@  PO++4C&]@  PO++4..&=b@ PO++4C&]b@ PO++4.>&=_@ PO++4C&]_@ PO++47  @ H Q@ H W??+_]+]1034>32&&#"!8H&'R!E9K- e%(Vy&6 ݴ81+$P+46V&V ߴ;4.%P+4Vk&7  P+4$V`&W$  ' P+41V%@DU W??]10%#"&'73265#5!1C'0R&&G"99M/h%2VfC ?10'6654&'C  W  .)&)D& p? M@ M M???9=/9910+++77p1YY3x;EE;zj,?10!!j [i@ } ?10".553326735'[Z'5%4!&&!4%A   ?10"&54632****% && %c+@M_ ?]q10+#".54>324&#"326c?+&&+?A\24 &' 43V\ @   ???10#"&546733267Z30*-$h  %C C&@????]10'7'7/y/znp1Fp1>&:C@ ,* P +O++4 &ZC@ ,*%P+O++4>&:@  +) P *O++4 &Z@  +)%P*O++4,&:@+= P 5O )O+++44 &Z@+=%P5O)O+++44 >&<C@ PO++4X&\C@ &$ P%O++4k@ yy??10#d]kkfy #2@  $}! %}?9/99]104632#"&74&#"326'#"&54632fMHGMMGHMY``YY``Y/::/0993Y }r V@ @ L@% L ?9/////9]22/210+]+3##5#56673T))T&M0%IrJMM@@e3c4 q|r@ M@ L@M M@7 M M 0@  }}E?299//]]]10+]+++++#"&'73254&#>73#|BS A&!:IGԉWC5C O $ 991Q  ;bss w@@M@L@LM@3L| `!| } !}?99//]]]q]10+++++"32654&'2#"&546366 8 C8!2"EGs?EPA=.-"OIcqP k}r ?@( @ M @ M M M L  s?29/10+++++667#5! 2($ X6VK=IT+jy)51M1 M/ȳM)@ M MM@$M L @ M6$*0 7@P@ @ M  67-3'!7??]9/]99////+]]99910+++++++++#".5467&&54>3232654&'74&#"66CN,8 !3"'4 !%^q/;# '$%#* )      y !LLM@@M M @M @M @M@ L #"p} }#}?99//]]]10+++++++++2676#"".54>32#'667 6 !/ !2"FFv~ECOC(+"QHbqO fmofqmqmY}fpq|fqbsgrk}fsjmtmu5!/4@%I 1 /0//R!!0}*??]2]21074>323267#".55'76654.#"(6&/ fT*-&!@+,<$; 9@  =Q.-6_F'?9 >-; *04yL 5+ 1a@@ M/(M@  3/?o/,,|2////_/o/////&??]]2]]]]10++7"32673#".54>32'4'&&#"3!25pD*,I ^6/Q<##?@A@@A>C|+&|p|18|C@B  !BA@ ? >%""%0 `  5));. ; ??9??]9///????]]/̇+}10+++++%467&&54>32#"&6673#54&'326'6654&#"# ''%6@3A"2?U#F  _d=E"   +0& V   |  kyBMZ^صH@M@L@"M@L=@ M$@L#@L!@M\^]^[\[\[`!|:)|6:6:6`_@M|HC||NU|`]_0B_^] \ [8830?`???#&,3QFFXK X ??9??]9=/????]+99//ԇ+}10++++++++%467&&54>32#"&32654##532654&#"'6632#"&'4&'326'6654&#"# ''%6@3A '< ' 840*"-3#F  hd=E"   +0&  9  6 )%+    |  kr;FSW>@:@L:@M9@L9@MM@M@LPM@M(M'M'M'LMMMMM@fLUWVWTUTUTY-|Y|A<||GN|YVX;9P88&XWV U T:92&#8*0#`###J??QD Q  ?q?9?]q?9/9?????]]]ԇ+}10+++++++++++++++++++%467&&54>32#"&#"&'732654.##>73#4&'326'6654&#"# ''%6@3A?-!,2*!`6#F  hd=E"   +0&0#!  =   " ?   |  kr%2AE@>@M> M>@ M> M:L@\M@M@LCEDEBCBCBG| ||&-|GDF8<:A3::_:o:::FED C B0A`AAA8;)@H) # 0  ?q?9/+??]????]]]ԇ+}10]++++++++%467&&54>32#"&74&'326'6654&#"%>7#53%# ''%6@3A#F   d ,d=E"   +0&)   |  "0)&?2)+4%kr +/MM@XLDT-/./,-,-,1)*|1.0+##o00/. - ," +)  0`??]9///??9????]]]ԇ+}10q+++6673#5#"&'732654.##>73##"2?Us?-!+2+!_?d=- 0$!  =   " ?ky >B2@M-@/M) M"@M!@M2 M2 M@L@L@L M M M H@SL@BAB?@?@?D-|99CD|DAC"#C o  3CBA @ ?/6!0"0"`"""   /??9?]q?????]]9/ԇ+}10+++++++++++++++%#"&'732654.##>73#'#&&546776654#"'6632%#?-!+2+!_)  ): '= $rd=0$!  =   " ?? #&   6(*" k y'HL4 L4@ M4$4 M M L M M L M M @XLLJIJKLKKLINF:|+N||NKMH@@o33M'MLK J I?((0HF70 0$`$$$??]99=//??9????]]ć+}10+++++++++q++32654##532654&#"'6632#"&'#"&'732654.##>73## '; ( 840*"-3|?-!+2+!_Qd=  9  6 )%+ 0$!  =   " ?kr -262@L0@M L @L  L @SM6434565563857%?""(221-*7 o  7|65 4 3"2(0&0&`&&&&!  ??9??]q2????/]/2]2ć+}10++++++%#"&'732654.##>73#3##5#56673%#?-!+2+!_$$P|-  3ed=0$!  =   " ??''-+L/@)kr(,@ @L( M@;L*,+,)*)*).|  .+-#$-|-,+ * )%((%!#@ M`###!  ??9/q??]+9///????]]և+}10+++%6632#"&546"326546673#5%#0= 51740=ka_ m"2?Ud= )'*449RP   ykr3>B@ =@L4@M!MLL@ M@4@jM$4B@?@ABAAB?D%|1D=| @ M  DAC30C7|CBA @ ?++31" 0@0`4 : ??9?]q?/????]]+Ƈ+}10q+qqq++++++%6632#"&546#"&'732654.##>73#"32654#0= 51740=ka?-!,2*!` ;d= )'*449RP0#!  =   " ?7  kr @^@M@ M@M@M@M M     0`??]9///??????]ć+}10++++++6673#5>7#53#"2?U8 d 1d=- "0)'>2)+4%kky,0@e M M @M @ M@M@M@M@M0.-./0//0-22/1|'!110/ . -$0` ???]?????ć+}10++++++++!>7#53#&&546776654#"'6632%#M d )  ): '= $nd="0)'>2)+4%? #&   6(*" k y'6:@3@M3 M/@M.@M M M @_M:8789:99:7<-1<|| 0<9;6(/;';:9 8 76 -/ 0$0$`$$$??]q99=//??????99//]qć+}10+++++++32654##532654&#"'6632#"&'>7#53# '; ( 840*"-3A d 3d=  9  6 )%+ "0)'>2)+4%kkr $ @L@nM @M M@M@M@M@M$"!"#$##$!& &#% %/%$# " ! 0` ?????]2????q/2]]3ć+}10++++++++!>7#533##5#56673%#M d $$P|-  3_d="0)'>2)+4%r?''-+L/@)kr /3@ ,@M, M M M @ M$ 4 ȳML@IM31012322305&*5|524/!(4  432 1 0/ )&( 00`/?]q????????ć+}10+++q+++++#"&'732654.##>73#>7#53#p?-!,2*!` d 3d= 0#!  =   " ?"0)'>2)+4%kk {"-1@$,@M@M M@M@M@M@M@NL1/./01001.33,| 302"2&|210 / . # )0 0 `  ?]q?9/???????]ć+}10++++++++6632#"&54633>7#53"32654%#1= 51640>e\ x d  :d=7 )'*449OP"0)'>2)+4%  kr'+ֹ&M@ LM MM@M M@M/M@MM+)()*+**+(- ||-*,,&|,+* ) (0`  # ??9/??]9///????]ć+}10++q++++++++%667#"54632#6673#527&&#"#0= e64(fg"2?Un_d=; P*4 )RM6  ky:>@ & M% M@ M.@MMMMM@MM><;<=>==>;@||`@=?/?|?)|5>= < ;+20`   ??9?]?????/]]ć+}10+++++++++%667#"54632#727&&#"#&&546776654#"'6632%#0= e64(fg`)  ): '= $id=; P*4 )RM %? #&   6(*" kr*/3߹M& M% M! MMM@M@MMM@MM31012322305||524" %/.*&4|432 1 0/"%`####+   ??9/??]22????ć+}10+++++++++++%667#"&54632#727&&#"3##5#56673%# 1< 5164'eg_$$P|-  3]d=; )'*4 )RM ?''-+L/@)kr3=A5M4M$M!"LLML @[MA?>?@A@@A>C%|1C6|C@B30B<|BA@ ? >*31"00`4  9 ??9/?]qq?9/????]ć+}10q+q+++++++%667#"54632##"&'732654.##>73#27&&#"#0= e64(fg?-!,2*!`id=; P*4 )RM)0#!  =   " ?] kr!+/@ @M M#M"MM@DM/-,-./../,1$|1.0!0*|0/. - ,0!`!!"  ' ??9/??]????Ƈ+}10++++++%667#"54632#>7#5327&&#"# 0= e64(fg d  bd=; P*4 )RM"0)&?2)+4% ky.9COSfBM/MM#M !LL#M !LL@@MN@MN@LN@L/NM@MM@LM@L/ML@ML@LL@L/L;@%M:!@MSQPQRSRRSPU<|+URTB|#T"M@FMo|///4|o|JD|TSR Q P: &. ?&G22 700`M ??]q9??9/????q]q++ć+}10+q+q+++q+++q+++++++++++++467&&54>32#"&667#"&54632#4&'32627&&#"6654&#"%#''%6@3A 1< 5164'eg#  {d=!   +0& )'*4 )RM      3k 0O@2@ M@ M@ M@ M.dd   2&d1t!!)tt??9/]10++++"'6632#".54>32&&"32>7&&-'<M`67[B'E38O1.N ( +# -6O _ ;a}B?fA6O39_C% HW*558$>T0 k@ M @M   H???9=/999910++36733&& ,Z*,W),(-ϲZpq(k(@ nn@ H??+]10###{{k4fk/@D `HH??9=/]10!!5667&&'5!!T,,+%O,`./\,**)CGFeGQ@BHDfA@> y'w!M!@H L@M@ L@M@ L) F0"""F$(%H#" HJ?????]]]]10++++++4>323#56654.#"#53&&)BU-,UC)$*Q"5 + + 1&T*%bKjCCjKB~326632#"&'&&#"32677/-3$.%%.$83$-&&-$7E  *P$#6%%5#)$#5%%6#)r 1[O M@M/? Q@ HW W??+]]]]10++46322#"#""&퀑[eF&[eF&ahk,$,ahm,$>6aka(=@%     r rO_?]2222?910373#3#'7#537#(@K)R1?K*U1w)Nf]fy)Pf],. G@) L0Pp @r ??]9=/999]10+%%!!-""]lddlf,- V@/L L 0Pp @r ??]9=/999]10++'%%7!!##]]&lddlf{ @ ppp M M @? Mf  M M Mi p ??9=//////9////]q9910]+++]+++]]]>7&&7&'66;764m>>m45n+./.+61VPJ$IabIHbS?I**F##F+Z@8 @ M T- Q 0@Q"!,W( U# "U X  ?????/??2]2]]+10!#37#"&54632&"#"3###5354>32rr+ ** + VVs55(A0և%))%&))A  fpf-J3 '|@7 @ M@ M  ) 0PQ@ M_@'' 0@Q@"H(W$U U  W??????+2]3]]]+]]10++.57&"#"3###5354>3209 r" VVs55(A0 *9"#)  fpf-J3frokykqykqrkYx}pkqr|qkbrsrkky}skjrtkxukfoqqY}pq|qbsrk}sjtu1?10#51OVP ?103#"&'52>5_=8# -0.; M ?10#&67mB>,JS=+( /?10#5/M^ +U +qq4/*#<q>*1?*m@*?^*?`*^| l7rr6cs*ct*^ x7Op|2d62d6Et 'i@B"@ M!@ M@ M@ MS)'Q!$% "(&%# W UW0 !0s ?]2?]????23310++++%4&#"326#6632#"&'#5357Y')$  -4-,D-8R4/a"77{BF ASI #AZ78[@# S:Or!.9G@%2J+J;9#R :8W#"9  9"&W ??9=/////92210"&'#"&54>3232>54&#'2654&#"#=% Z $A\9!~v"K,&::4("!*,K7gd .33&y0 F`@ H! JJ ??+]10"&'732654.#"'>32:X@0RJ,8)A +8 @eF& a qj=R1b (QyQ*MH#H#,F@H+$'J  J J J ???]+++10&#"3267#".54>326632&&#"8/?I$2 #2*I,9[@#(E\3!$<&!  li>T3a*SxNRzP' ,$^ ( +,)@%.S-%!W(W W ???1074>326632&&#"&&#"3267#".(!>Z9  1))    ./"BD9E*A\<3ZC'- Z1 %3@K g &C[rr*RMM@"M@M"F, 0D +H 'H?2?]10++++2#"&'#"&54>232>54.#"vr"=T33 Z 'C[<  &$ rXyK!  ("+6 !:O-%L>'k@@(@ L@ LQQS W WOH?]?]10++5!#"&54>3352>35#"Pn-P#|#A\:)=/8A@f dg7K,^'22/ 6@ @ M @ MQ S W W H???10++73267&&#"5!#"&546325/4$,(Nc"b/nq_]0?P Ff_ qs ~X*:[@<@L@M M LS5S @`<+S!;00 8W&W ??9///]10++++#"&'732>54.'&&54>32>54&#" 2'%-C+)L": / =D9P21R:!&!5**37871#3% a  %iH0L63I9*# $'(432?k R@@ M@ MD H @ H @H H H ???9/]++210++35!5#535#5!?wfffy'T'@ M$ M!@% M@ M'S@`)S ('H"W W??9/]210++++4.#"'>32#".546732>7c.=!<)*'EfC!zj:R4m;-)Y2E+ ` 1VuD1Qh6#aQR-;!y0&@L M M M%@@/H20@ M0,ccp1&)g"@00e" g ??99=//]]++]10++++#"3267#".5467&&54>32&&#"33~S4&!,,\!(*t<.VB(<)&$#:K)6g)*S'&747] "  f-H5:JK".A(_'#(Yk.@DHH J??]107#"&'7325!!3#%>- +2{.E/c?kff,@@%D.F((-J# J???9/]]_]1026632&&#"&#"326753#".54>!$<&! ! 8/'4 FD  {]B:]B$&D\y ,$^ :%44>1M44M1 k<@ M DHJ ??2]]10+3267#".5#5!$-0:47G*tw1( e  7H)Jff6kp@@ M Q0@PQ0@P  H H  H?2?9/322??]]2/2/2/10+3535#535#5!#3#36ZZZZfcffcf- xRD@ H! D@ H   ???9=///?+2=/+10!.'#37>32&#"] $08{{j!$ 6_Gm NNHk*  VB[?%^@! 0P`p' DP`p@ H&#W   ??9=///??+]]]10>73#.'#4632&&#"%# .11:8/ ',-|L[   1/2/684CHH?=7UP_>;H@ @Q  @ H  s  HW??22+223]10%#"&55#535#533#3267"1UN&$2 eaUfU0"*K@)*)( o,!"o+(sW%???22229=/9910#.'#>7&&''7&&#"'6632706-#  -03kb 0 1KmB-$QRN!$RRO@}th, &J# b (1'Jk& MM MM@ M@ L@AM$4R$DR 0R'(J! J????99//9///]]]_]]]10+++++++".533267&55332673#"&')4 d  d d-I+2 2H-F"%40'  YkV@6/?R@ HR   ????99//]]]+]]10!&&'#"&'732533c*c9 :- +1c85-oggw)C2c?k,gf`$}?[, QQ@ H  W????+106632#4&#"#?!a98J+{.{ 9O0RB:y !t@`p`p@/H"#/ ?  J H@ H 0J?]+]?]9//////+]]]]]]104632#"&2>7#"3.vimrtjms$  %#  $68->$%>-,=##=,-(@$Sd/S.J )J???1046326654&53##"&732>54.#"viM2i"'tjms}& '& &6((  *J P-O:"";N--O:"";N4 ,2@'SR @`.!S -*W$W ???]10%#".54>3266544'332654&#" :Q22Q; !;R0F2 f-201-2018[B$$B[87[A#"   *<I+?NN??MMy%9C@'HRy&0y:5J! J +J????]q+104>326632#4.#"#".74.#"32>"7&,,44`  $6$%6#   7`}H4Tk7(0 G`~II~`>T23S>?S33T[#7_@ R@ HRP3`3p3333 )0))R  @H 8 ??q??+q]]+]q]10%#".54>326632#4&#""'"32>54.H%;**9$%;*2 *2'`#     :[@""?\:7[@$ )G72=4:; 8-.9 9--9 r$M@L/"?""d& R @ Hd%H  H?2?9/+]10+2####"&54>""32654&;T6_`i Z 'C[a "+&r1O9oc ("+6 b,<66?[+1@F-&Q ,"W W)W ????10%#"'#4>32&#"62324&#"326.F./2{*9"!mt}23 (,*6[A$0A'`'4u?Q G6 &I@D (F("D@ H'#HJ???9/?+]102#.'##3624&#"3265V>!/74-% !*07{{ u<1 ?8r2M49Y*_^V"PYX%D1.1(y0I@' MF" 02,F@ P  1J'J J ??9]]10+%267#"&54>7>54#"'6632 6K$ "/='sq"4@'b(<$U9cq 2;*" )Zd _W3C- G_bW,;(  63o@ (M M M M@, LS'51S  4P`W,,"OW" @W?]?]9/]]10+++++%267#".54>7>54&#"'6632 (P"V=32&&#"3#32>7#".5gg3N7*B.8** "P,*D0pf-J3k-#f4( i.O;Yk;@"`  D WH??]]]]]10#5!#327#"&5£1+ OQffDfX^'"4@@MD$D@# t ????]10+".53326532654&'35F*s&%!3'f-=+G !>X6tN<>M   /= 6X>!)4 1@@ L QR "Q!W ????10+36654&'3#".5332679#f-;Y35C&r" %    /= }  ;P0A> l'<@#F?""`$$$)F 0($H#HJ ???]]]10#".5467#5332>54&'53#)BU-,UB)"+Q"4 + + 1&U+% KjCCjKA54.#"'66:J,ks>Q1|")"  "r*RzP!?Z8)8"7L-7R7k t| M M MM@. MD J???9///99//99]310+++++>32&&#"#5&&'3( )+7  ^{3M 0# X`\HKI X,k, L)@7 L M MLQ,,-) .))-."-, #&g ???9=////9910+++++6676632&&#"#"&'73267&&'89&  $/=)* 0;K"LSX)Oj"63 a0RMM*'?+ f*&ss.k@ @ L@ M L @f M@ L M` M@ M@ M@ L M@ M   H H@pH ?]???9////////////////10++++++++++++3#!!5667#537#5!;0j- h3 `ZZNb,R'fG,`8bfCӹ@M@ M@ M M@w MP        }OU  U ?2??]22?9/////////////////]]]]]]]]]]9]10+++++3#3!5667#53667#5!-Q&)?x"]|7"O3-fK?O-fk&Q@)M"S `(&'Js& &&  H???9///]10+>7!5!#".'732654&##++&  '.12D*y ;3) 'N17>54.#5>7#5!&&-*<$-# %C6 "! z "#"6%8R5d[ ,   E  f[ ! !/"'3#   %y(w@FM"@ L! M!@M! Mc  *)* c#)"g& @e e ?]??9////]10+++++3#!!&&445467#536654&#"'6632>,Bf8&I(/%)# ='h7#6632T2- '9'+Lg;{ [7#5332654.#"777->%5-*@-  r^~/"VU%z6^zHcIy*nxz6]LHqff=]tkCX5)?@+i "/5@0477767 @ HR@(P(((((#H#@% H#R0@P675231 " - % ??????q++]q+]9/]9]10#"&'66323#5>7#5332654.#"777->%5-*@-  g[~/!VU%z6^zHcIy HNP$]G0PHE%]tkCX5[)@A+j  !06@/@ M.@ M M155!87'H''@K H'R/7/ O  /.?._...R /O/Oo6342! " ,????????/]q]]q+_]+99//q10+++%#"&54>32573#5>7#53"327&7773JI+W  g[   }!VU%z t4Z@%HNP$]G0PHE%]S 1>0&)@A+jkR@9O _  R@ H 0 0RUJ ????/]]q+]103#332>53#W+#W(H;k# H2H/-V#o@.S  R_% `p0@PR@ H$S!!! J U?????]+]]q]]10!!3332653#"&'#"&546321W9& X);%+ ())(k-2C8J.%% %%7V+@ S H@Z H @ H -o+++++@H+%@H%@ H%##@#P#`##_o,%+@ H+S@ H ??+??+?]]]+++]+++qqq1032653#"&'#"&54632.57& X);%+ ())(:I*X +H-2C8J.%% %%,:" !( k R@4@H/?O  @ H     ???99//???]+]]+103.'#3372>53 WK^W"  W %D7:[Y^=wkkH"H1G/V +@ S&& @H@ HO@PH @M/ ? O  @H @ H @M/?O0@,#S)     ??99//?????]qq+++q++q++q103.'#3332653#"&'#"&54632 WK^W,& X);%+ ())(:[Y^=wkkH-2C8J.%% %%V'3@"'@M'@M@M@M.S (`((((@H(5@H@ H@H @H @ P `  @)H @H@ H@'4+S1$   ??????q+++]+++++]_q10++++6632#4.#"#32653#"&'#"&54632K-(7!W W& X);%+ ())( *D0&!(~H-2C8J.%% %% >&$_@  PO++4,&D_@ /3P4O++46>&,_@  PO++4$& _@  P O++4>&2_@  $P%O++4&R_@  $ P%O++4'>&8_@ PO++4;&X_@ P O++4'>%15x@OD 744556&, P ` O  6D6/444442/))# J ????]?]?]q]]]]]]]10".5332>53#"&54632#"&54632%3#X6c'5 !5'Z6X>!~G;&X@P -O !O O++++444'>%15k@G,,/,,&&&7D 7246  P  6D@P635/)# J ??????]]]]]10".5332>53#"&54632#"&54632''7X6w'5 !5'n6X>!(E5P;&X@#P %O O O++++444'>%171@M- M+ M'M%@M! M M@ M,&&&&9D 9628P  @$H 8D@P85743/)# J ???????]+]]]10++++++++".5332>53#"&54632#"&54632%77X6w'5 !5'n6X>!:.##.N;&X@P 0O !O O++++444'>%151 M-M+@M' M% M!M@DM M  7D 073556,&&&&&6D642)/# J ??????]q]]10++++++++".5332>534632#"&'4632#"&7'X6w'5 !5'n6X>!P5E;&X@#P +O O O++++444" M M M@@ M@ M@ M@ M@ M @ MO  "S @P$Sp#  W W??99//]]10]+++++++++%#"&547!4&#"'66323267&=P*il0K<%D H-<_C$ #/->^? {v.4h =Zm$;' >&26_5@M2M2@&L2. M. M.., M, M,,(M(@ M((&M&@&M&&" M" L" M M M@ M L@f L-'_'@''8    78567!!!P!`!p!O!!755330**$H ???99//?]?]?]]]]]]9////////9]]]10++]++]++]++]++]++]++]++]+++!'##>733.'#"&54632#"&54632%3#g-,,,++  y $"!!"""""{{[DDX#967!!769~G,&D@4;PIO=O1O++++444 >&*&M&@L" M" L M LM@AL    +,(*+!+))'$H ???99//?]]?]9////////910++++++++!'##>733.7#"&54632'3#g-,,,++  y 0""""{{[DDX#967!!769~G,&D@;<P=O1O+++44 &G@ IPO++4& @ PQ% PRO++4y)&@M L@ L @ L@L@M@M@L D&+F*s    !'J!J ??99//922]10+++++++"32675#53533##"&54>32&&('7#?<HH{--X=m'D]5"5'!9 9O0oi[ZBBZOyR) b!X/@)@M(@M(@ M@ M @?L)*&Q1 0  S 0)}((P#`#?##W((O..W @W?]?]99//]]32]]10+++++3##"&'732>55#"&5463232675#535&&#"55jw*L#=,' (WZ{iJU%( GG  b,Psn h # tgtr:= GPK>&*_$@ )$( P)O++4!X&J_ @ (,! P$-O++4->&._@ P O++47= \@4" ! Q!O@H   ?????9=///+]?]?]10>73#.'#75'77%# .11:8/ ',-||&^\&/2/684CHH?=7e+`8668Vy 49@!+F@`66!F 50J&J ???]10#"&5467&&54632326732>54.#"Z30*[_vimr1=  & '& & %3[lD, -O:"";N--O:"";NV&2?@&-S" "@"`"""4 4'S30W *W  ???]]103267#"&5467.54>32'32654&#"=  30*-I4!;R01Q; ,7-201-201  : %3'AV57[A##A[78K9.?NN??MMV&B@ 56 P7O++4V&C%P+44>&_@ '+P,O++4[&_@ &*P+O++41V&]_@ PO++4r"/`@;!"101( (( `p1/@H/@ H/ 0 !/ % ????++]q99//910#"&'663273#5>7#5332654.#"->%5-*@-  r^~/6^zHcIy*nxz6]LHqff=]tkCX5r"/~@ 01010 @ !  1(@3H(( `p1/@ H/ 0" / % ????+]q]+]]99//]10#"&'66323#5>7#5332654.#"->%5-*@-  g[~/6^zHcIy HNP$]G0PHE%]tkCX5 !0l@E . 0212 0 @  20''O'_'o''1! " ,??????]]q]99//]10%#"&54>32573#5>7#53"327&3JI+W  g[    t4Z@%HNP$]G0PHE%]S 1>0&>&* @ &$ P%O++4!X&J@ *(! P$)O++4k!s@/?@HR#@ HR   R  @H "    ???99//??+]]]+]]+]10".55##335332>553T-4RaaR`a 7 !>X61k}-<$%<-6X>!6[r :@$S` D 0 HJ??9]]10#66325>54&#"{#`9yp+NnC&?.92! usJy`Kt0AW<7C.>&1C@ PO++4?&QC@ PO++4 >'37%@ M M@1+@M@LM(`/?9 8"#9"##"89468. 8+#H?1157???9////]9=///99]]]10++]++!'##>7&54>323.74&#"3267'7g+(&!  &'*  y   6V~@!! !AT#967!!7699/K,>&D@  <3P+444 >&G@ RPO++4&@ RP% PQO++4>&  +) P+4&P+444 =&$@  POO+++44,&D@60P5O1O+++44 .&$@ % PO++4,&D@ =3P5O++4<=&(@  POO+++44&H@ -'P,O(O+++44?.&( @  PO++4&H@ 4*P,O++46=&,@  POO+++44$& @  P O O+++446.&,@ PO++4$& @ " P O++4=&2@ '!P&O"O+++44&R@ '! P&O"O+++44.&2@ .$P&O++4&R@ .$ P&O++4,=&5@ ,& P+O'O+++44O&U@  P O O+++446.&5@ 3) P+O++4Y&U@ " P O++4'=&8@ !P OO+++44;&X@ P O O+++44'.&8@ (P O++4;&X@ !P O++4"Zy0W#@. L"@ M "%cc@%%2-1 e@  " "*.g*g??99=//]]10++2>54&##532>54&#"'6632#"&'7w0R="ZL:. 8)B.#E*&T41T>$..<9;cH % /;$7'D9k (14a2L6.Pb8Id=kKZ&S @M M@ Mcc ($'eg#!g ??99=//10+++2'>54&##52654&#"'66.M9 )0388bL-UD)F3C>%&?4E'<*#BL3&+_@  PO++47=H@+Q  Q 0 W ???????]]]10376632#54&#"'77?{*4F*{*! &^\&e 9O0B:`8668-[y,@ QQ 0 W???]]106632#4.#"#-%i>=P/{$!({] #R@/39|@/R@0''d??! 0""W ?,,W }<6} ??]?]??]10%632#"&''7#".5463257"3267&&32654&#"p+!+'> S;d'';)PM/i &  $ v'2%8%/3Y8o#?V3s (3!7'  r#"" k-AZ @5M8S`++C/?S`##(C 0S B.SB/?J=3J ?]??]]]]10+"&5467.5467332654.'3'32>54.#"hw;/ u!!u /;w$$$$ c]@L !%%)=%!,88,!%=)%%! L@]c# ## #(4} M@M M @ M M,F@`6/?OF@ `  6 &0&@&&F 52F5#/?J/)J ??]?]]]]10++++2654&'3#".5467&&54732654&#""}#.84 :R33R:38."}"-38((7266P*#I&9Hh91Q9!:P/9hH9KG*P66?69??96?.Yk \M@/L""! HH ????9/////]]10++!#"&'73255!5>7#5!+H>7 #;, +32>I,Btia/W/D-V:G+_l~JfC[NM@" M    U U ?????99//10++3#"&'732655!5>7#5!4==ގ / 664]|?NU$zWK&PMFf .&$b@  PO++4,&Db@ 17P/O++4?Vk$i@C@ LO_"""?&D%  %"H@H H???9/]?]]]]]]10+#"'73254''667#!#3#!#+!!,-# ")$ w ' ?   kfffV/:O@-0S $@$$,<%:Sp; ;/:s%%5W +@((W ??]?9/?]]10#"'73254''667&&54>32!32674.#",!!,-# ")$]h&>P*hlL<%CB&) "#  ' ?  wg>^? {v .5i ##$>+7;$7M7@L3 M3 L1 M1 L-M-L+M+@L' M' L% M% L!M!M!M!@gL2_,o,,,,@,,= F`=o::8<  P&`&O&&<F<::95/////)#####J J???]]r?]]r?]]]]]]]]10++++++++++++++++++32>54.#"4632#"&#"&54632#"&54632%3#& '& &}vimrtjms"!!"""""")H55G))H55G)~G&R@%, P:O.O"O++++444>;?8` M1 M.@&M* L*@M) M)@M( L F@@:HA><@ .@F@/>>@H>==83.---;%%%J J???]q?]q?]+q+]10++++++++32>54.#"4632#"&#".#"'>323267'3#& '& &}vimrtjmsj  1  ")H55G))H55G):  '   [G&R@. P>O-O+++44.&2b@ "(P O++4&Rb@ "( P O++4>+/+M+@L' M' L% M% L!M!@ L F@, H1./0 P&&0F0..-)###J J???]]?]]+]10++++++++32>54.#"4632#"&#"&54632'3#& '& &}vimrtjms""""")H55G))H55G)~G&R@,- P.O"O+++44 &<@ PO++4X&\@ #$ P%O++4$[&H @' H `(&Q' ##H??9///]+107#536632#".'732654&#";k 1$UG)!I  \v5f $9'NLN "! [$00M.L M@ L 00@: Hy$$$$$2+++|2Oy1}... U (} ???9/////]]]+]10++++%632#"&''754&#"#663232654&#"p+!+'> S;u$# ' iY53B% v'2%8%03Z8B8 9O0#"" $[`*@`/$$$$$Q_ @_ @,*QP`pP`p+r''' ! ??3?9//////?3/222]]]]]]]10%6632#"&''7#53573#32654&#"  3(UG0HCDkkk{  {$9'NL0!H@vVfvf "!  .;[@;@$P$$=+/?y:3257632#"&72654.#""3267&&&8% .`.$8'(9    3 &C\54Z@%$AY45\D& URP2'-&2[!.:Z@;@4P44 32"327&2>54#"&'8$. `.%8&(9    3  &C[55Y@%$AX46[D& TRP2'&1 F@-   ! H????9/9329107##6737#'#'37&&&&'$,Z*{8&&I"}N46d $.<-T v NGC*S$/k M@8 M)(0@""@01%F 0(+J!J ????99//9]]]910++"&''7&&54>327&&'3267&#"(O?A*Kg=  Q$0?Y~ 7+ QV&jMxS+KJb c aF9V1R'#%.^@:  M)#/ *0/ `p_   0&S"/!W* W  ????]]]9/9910+74>337&&'3267#"&''7&&77' DhG H!U DM3#FG@}O#0 3ZC'A8 c   e HItK#7$0k 5@  QU U????]10!!#53533#3PP{bb  C@%@M Q    H?????9/9910+7#5'7#57#3D{^C::kP-R-f[$[DN@-=@M,5FS&FS P??E?418!@W+OW ?]?]99??]10+72654.'.54>32&&#"##3267#".'7-0#8.1L5.OH-/!!:/3R<&  7 #7-'(,#*+(]    1%5) h   !3%0"\!**(\ C[!=@"@ M"! # "U!  J???9/////10+327#".##5>7#5!8CD!4// 1 !4-*/9$ 664]|EW`( -+!d%+%K&PMFf7x#!@ F%Q$ g??10!#5467>54&#"'6632|4#& 7&*'" (,b05P6),$:G) b0>",S  '<%%@ S'n&W ??]10!#54>7>54&#"'>32|%*#M ' '28ch $q)! b JD'  r+8v س M @> M @ M/F(F`:8D D 9@,,H +    +4J#J ??9=//////]]]2]10+++%#"&'5#536632%3#32>54&#'2654.#"#>R0&S)>>!O&AR.+$?-BB  "0)&)   8K- O} /:,DN))O0#-'f)$k! M L@ L @ M @ M@ M@ M@ M@= L D #0  D   " r J ???9/3322]]]2qq]210+++++++++".55#53533533#'2655#6G*++{{..,I5& !>X6DbbD6X>!k=N;327#3267#"&''7&&77"&#"3&&'7&?P*G69 %BY3#D=; "   >^? E@nU  V l KFtvb$'KR =*k:@D @r H J ??9/3]2103##"&'732655#535#5!HH1N99M/5*3ww?xbG-P<# b3E@bf1V'w@'@M@M@M MMM@$MT"(DD(X%sH J??22?10+++++++%#"&'732655#535#5!3##"&546321C'0R&&G"9OO. -- .9M/h%2SUfS$**$%**Y y0*@'F2F1"J  J,J???1046323267#".55#"&732>54.#"ladj  " )4 .dj}  6-"d,?)-O:"";N--O:"";N[,*@'Q. S-#W J*W ???1074>32327#"&55#".73267&&#"7P4-b  ?B/,A-}&&" -.6[@$ B#gEQ #@[4AG Qr%N@. @ M D F`  '"D&H## J??9/32?]2210+2#&&'##5#53>4&#"326]q,3#" 4#{HH))'_-$ 0+rch6X?EE7#5!#".'73254&##&'$ z ")+!:*~p0/*P#pC@(+-*fK+25-<%ca a a5/i@    ?102#&&#"#54>5'Z['5%4!&&!4%O@ ??10'''>l6zgl6z+pC+pV>#+@$$ !?]10'7#".54>324&#"326R#    8  9/K! !! !m  >@%o0 JJPJJ ??]]]]]10#"&54632#"&54632%3#"!!"""""D~G n @ J  J ?10#"&54632'3#3""""D~GJ-@J  JJ J???]10'4632#"&'4632#"&k9x"!!""!!"*J7@!JJ JJ ???]]10'7#"&54632#"&54632tx8jr!!!!""""ى*J JM@"!H JJJJ ???]+10+#"&54632#"&54632%77!!!!"""",UW-Gk4AA4ro > M@ M /?  ?]10++#".#"'>323267'3#  1  b  '   [Gi=?10'7i>>Zw}=?10''7}&\^&9669_w}=@ ??10'77&^\&`8668M> C@*JOOJ@  J J?]?]?]q]]104632#"&'4632#"&7'5"!!"""""V-^P5EM> K@2 J//?OOJ@JJ  ?]?]?]]]q10#"&54632#"&54632''7"!!"""""_]-U(E5PM> j@JJ  O _   O_J@P`JJ  ?]?]?]?]q]]]]10#"&54632#"&54632%77"!!"""""%1/'V:.##.N?>&(C@  PO++4?,&(@  PO O+++44k#Z@9@ M@ M@ M0   F D!"$ JJ !OH?]?9/?]]]]10+++#632#"'7232654&#"##5-K6'@- #%9.  {Pkf1I0/J4d&/8+f$>&:@ ; P O++4y"3@ $FF#WJJ ??9/10"&54>32&&#"3#3267(*Kg=!5( A*2*MH0?Y MxS+ b$<,oPU a(y66k,6,&,@ PO O+++44*k-k+R@3@(P((- /?@Ho  ,}++ s #} ????9/]+q]]10%#"&'#'667>55332'32>54&#-54&#-&.@ PO++4.>&C@   PO++49&@ &.P!O++4-[k >@& D   D 0P P H??3??]]]10!#5#3332r{{k k$6kDF@ HD @ H J OHJ ??]9/++1032#"&'!2654&##.8[@#}v#\-_6=@A*"v0M8gf af^.36*6r%$k 5@D  @H HH??2+]107!#3!5}gffaff[k@ M M M@ M@M@M@M@M@DP@5 H 0 @  O/?OH H  H????2?]]]+]]10++++++++73>55!3#5##3$ F#rr f-u}z0<?LTOE?k( k-d@: $*-%.%%/.$** ?????9//////?9///9992221053>73#.'##>7&&'p  `  g>-)#g ` g#)*Ak&A;9954&##O<<23'$ ,'d39R5,'3;:cM0k-) ")/&2 9Gbs+&&# c0=*IT4"D6"c  *..k<@ M D   D@ H  ????99+]10+33>73#.o-58co9a,k#_hg,gl.8&@  PO++4-k.k)@ D 0  D   H????]107>7665!### F{Z1J6] !)Bǀ:zrgO2k0-k+y2-k4@D 0Dp@ HH???+]]10###{{kk6r3y&k7k E@'@M@M!"!" "@"`" !H ???]9=/9910++#"&'73267&&'3>7'#!-18!5$ ,Fp#=&kKudZ/;N. h,8uUFN,TJ> @  M MMM@9M M@P R@ P  s s?22?22]]2222]10++++++4753#5&74&'66``b!.-"!-,"LAAGF@][\=@Z^ k;-[k _@)@ L8 M@ M@ LD 0 @  D@ Ho  H??2??]+]]10++++%3#5!333#r{|fk-kF@  D 0Dp@ HJ@  ??9/]?+]]]31032673#5#".553 $ '{{,,I5{&0 -4YEk 9@!o  O_o H??3??]]q]10!!33333:aR`Rak[kY@:/?O   p  o 0   H??23??]q]]]qq]10%#5!33333`aGaG`fkk7@F@ `  DJJ H??9/]2]105332#"&'232654&#1P9 ";P."O&{+230f1L66P4 .67-"kj@, H/?R@ H@ P 1   R  @H  J J ??9/??+]]]]2+]+10%#"&'332'32654&##3L/@$="` a\&"--?``54&#Յt%T-{1*I8ij `f $9/*y$9@ F  &%J J ??99//]210"&'73267#53.#"'>32:S@0GN(/*A *7 =cG'%Gf a \Io0<# b )QyPJwT.y.e@ HR%@ HR  0R@H/*J H  J ????+]22+]+]10#".'##33>3232>54.#"#6#>aa?$2!%6#   6`}IAnTkLe;I}`>U44T>?T44Ur$:@ D&F D % H"J??9/?22]102#5##>7&&54>335&&#"'(,+{@#= !$%3+!>V88? 1<r9o<EE?X64M2-1.,D(.`@ M M M M M*S @ H 0 S/%W-W0Pp ?]??]+10+++++632#"&54>766732>54&#",[N732&B1!7K*td$FjE9! %'"G 9:9U832 ,&*KJH!& ) :21++123J/  h`5T[#/#0 &ASY(@ Q00U ??]]]10##!|fpz]@ M Q@) H /? /?OU  U  U ????2]]+10+73>5!3#5##3%1#rrf$\bc+솆EFC H+s@A@PR))$# #)  )#,-) +  + $  ??????99=//99999/////993]210>73#&&'#5#>7.'353+ g !#<g*!`"(g "" g `2443575A oNMn%CB=!74344130Q@/(SS2$1@W?%O%%%%s" "0O--W ?]?99=//]]]106632#"&'732654.##532654.#"@"N3'F5 "'&!7J)2_$S5,-jj&*K  4�7+(7"e ] ?<@ R 0Rp@ H   ????99+]]10336673#?n N8co)')@J* #BDK,?&@ PO++4?Z@ QQ@ H R0Q@ H   ??9=/99??+]+]106673#.'#3#I,//96- &,.||!*b)8:6AFF?=7!@ QS ???]10'667>55!# -C0#" 4{p+WRH7"f"?FG6*p L@E M@ M  0?  ???9/////?9=///]]9999]10+++>73##'#>7 f qKZJq uss=L:|{t1; 9@ QQ@ H U  ????+2]q310!#5##3353{{{{ֶR?B@Q0Q@ H0U ?]??+]]]]]10###|{*p?[S,F&+@ Q@PU  ???]]10###5!ϗ|ppfX\[ 't@/' "0"@""""R  ? O   0@R@H(I' I I I?????]+]]]]]1036654&'#5.54>757*%$+`$,,$`/C+*C1`&A00A'xO==LJEFH'=S0-N>(9T89T9 [?z 5@ Q Q 0P`p  U????]10%3#5!333#r{n|fp+,@ Q S0 W  ??9/?]10326753#5#".55! ||*''E4֕*'A2  S@_o p/?O_o@Mp   U????]+]]]]10!!335333LaI`Iap z @M L @I L / ? O o  _ p_o 0p  U U???]q]]]]q]]10+++!!3353333#a>a>a#`p K@1@ MS @`" Q!WO_oU @  W?]?]9/]10+%4&#"326'632#"&'#53Y+$   -|(!%@.4H(7V"P˘!l)=*/=$cf"#]@ 0@y@ H`p@%@H$ @W  !W ??9/]??+]]+]106632#"&';#'4&#"326/%-;$>!```   !/'>+0=$ *#~"6!?S@ H# Qp@ H"W! W??9/+]+10%2654.#"'6632#"&'3 &+$% N/'H8!lZ7#53&&#"'6632&C^80T0B#2+R3%6,R&9_D&<]?! e  #]2* d !?[*m@$ ;K/R@ `  @ H ,&&&@ H&RR+W!W  ????]+]+]_]]10]336632#"&'##%2>54.#"a;FK*:#%:*JE;a3  ֵ[f"?[:8[@$l[[ 9--9 8-.9 "@@$"Q$Sh  Qo#U"" W ???9/]22]]107#667&&54632#55&&#"33.9,!mh43/{ (6( E*P-:f#;)NU 8!&HC@ )'P(O++4&H@(:P2O&O+++44[)i@C@ L"Q@ M@ H+  (Q@ M_*)W%W  s?????2]+3/+]]+10+3#53573#6632#"&'7326554&#"P77|&1A'ZV 1%#S:OSB 9O0]Yg+$B:Y&@ PO++4' P@1 `"@"@S!@W_oU O  W ?]?9/]]]]]1074>32&&#"3#3267#".' DhG+F$3(HL QIDM3JiC 3ZC' c 2*]-1 e &BZ6V$L$& @ ( P O O+++441VM+P@1)@L_%%%,}"" U W)} ???9/]]]]10+6632#"&'#'>5534&#"326. ??*8:3<<Q  &TI,;$ yOb:a-C/#'&.%q@3O_/?Oo/?O @ H&}   #} ????9/+q]q]]q]106632#"&'5##33534&#"326. ??*8:QWWQXQ  &TI,;$ ֺ#'&?& @ PO++4?&C@ PO++4X&\@ (0 P#O++4?z H@ Q0   QOQ@ H  U????+]]]]10!#5#3332r{|p*,<@@&-F #@#`#p##>F7Fp=2W :J(W ??9/]]1074>7&&54>322667#"#".%4.'326 5+$)?,($! #R&  -!;F9Q12Q; :$#5**38:79)3%[ "'gH7Y>!;VA*# $((DCB Q@1S0@ Q !@W @ HU W??+9/]]]10%#"&'#53533#32232>54&#wi#Q,CC{+ !;-ij f99f_f $9/)P@S 0@$Q* @ M  W!!W  ????9/]+]]1053573#6632#"&'2654.#"C{($A2aR9c1! z\\K&?.SK l ly 4bL@4L%F@`60F5((-((- J  J??9////]]]]10++4632#"&"6632327.2>7#".#"vimrtjms$  %#     #6x.?%  &B0P)8!  #;*#4I@(M'S @`6-S 50$$0*W W??9////]10+%#".54>32"3267#".7267&&#"6632 :Q22Q; !;R01Q; ..(/t -,&0  8[B$$B[87[A##A[F6A6.  "3<2+   t#H@' M@ M   % $J ??9/?9///99910++#.'3>76632&#"  0+" <0-#5 3U^vUE1047^TP+R@G! 5@ M "!  ???99//999910+#&&'3>7>32&&#""Ah'Q%   -!+ S_BY&VWR""523 , a.W9*f @; M ,   +,&/?O 0P+%"??????]?]]299//]10+33>733'667#".'73267.o.69b$;54.#Յt%B.55|GGyz*"/hf b::bCa$' 'v@<Q @ `  )$Q/(@MoO!_!o!!J@ H @J?]?9/+]2]q+2]q]10#6632#"&'#53532654.#"N+%D5eW:e355|Q!% ! 4S1&?.SK RS55 l6r$[@7 %&"F`& 0D%J   OJ?]?99//999]]9102'###>"3263'7654&v# 9S<1,{-., ' 2 'R# ?ran8P_.gckC/<"7*?["M@-S@`$#!$ #$Q# W W???99/999]10#"&'#66324&#"3'7690{"c0mt##3S63 '*R(  l qCg W-J?Q J/F$ 0D @H H H??2+107353#3!5}rfvaffYD1@@PQU ???]]]10##353|spn$kB@"   Q @H H H?22?]22222331075#53!#3#3!5}YYg솆fbfbff /@Q s OH ??]222310#3##5#535!tt|GGfpzSS-Yk"L@/S  $!QO#@W"@PUH??]?9/]]2]103!!632#"&'7232654.#"-""6Z?#}  MM$5! kf DjIeOa4F*?Z"H@, 0W $"Q @ H#WOH! W???]9/+]]10#632#"&'732654&#"#!%$1M62O7 5.6= " {YpW 733#5#.'##>7&&'p  `  g>-8W ` g#)*Ak&A;99733#5#&&'#5#667.'353+ g8&"/W*!`"(g9'" g `2448f<#L' oNMn%>54&##532654&#"'6632!!,-# ")$-`*) ")/&2 9GbO<<23'$ ,'d39R5,'3;+J8 ' ? c  *.e+&&# c0=*IT4=3&3VFa@7*Q;>QAH H3&GF2O//J6$'>'>6@P`J??]]?99=//]?]10#"'73254''67&&'732654.##532654.#"'6632!!,-# ")$)KS5,-jj&*K"N3'F5 "'&*9! ' ?  e ] f  4�7+"2"-[kF@' Q  e???9??]99//10!.'#3>733#5L %-1{{*' $-5-O"+r#QMCkDDBDKN%+{B?zO@. @ L M  D   y????9?9/9910++!.'#366733#5? %%||;*,,B8r?=7ֺ*f*684V-k @ @ L@ L L L L @@ L@ M@ M !"?O O/ !  ????9]]33/]2910++++++++!.'#5##33536673t!D%rr%D=}%0(''0F>:$k6BGOR'!BMZ7 L L L L@4 M ` Q   ??9=/99??9///]9910+++++>73#&&'#5##3353 O*+% |<&E{{E/003p,@GH 73#.'##53U{22'"&/594* (/{::GV"9;A*#JKI"NW[*#KJEV"W@5" L  $ 0Q #   ! ????9?222]33]]10+3#>73#.'##5353CC+% ,38;9/ ',-|--|Ay120675BHG?=7A(k=@" DP H????9]229/910536673#.'##8C@2,# !${f@<9YPX\*#QMC@@# `Q H  ????929/]9910>73#.'##53 &+.40) #%{M23/684AKK?=7pf-[kJ@   Q Q 0@Q@ H U U?????+2]]10!##33533#5:{{|#rk;zj M M M @, M Q  Q 0p U   ????99//?]2]210++++!5##33533#54~{{~{$rֶ-k   M M M@ M  0  D / ?  D  @ H    H??/99//2???+]q2qq2210++++qq####335Y{f{{fkfk; N@? Q  Q@P@ H U ????99//+]q2210]##5##3353M|c{{cpֶ$[k&\@;P_@ H(@ H"#P&`&p&&'$""! /??99//?]+2+]]106632#"&'7232>54.#"###' JP'>.   aB`kAhI(\+G5:J*k$[&[@P?_(@ H"#&@H&'$!"" ??99//??++2]]106632#"&'7232>54.#"###' JP'>.   aB`ִlw5U32&&'.54>32&&#">54.#"" '<*&7$'<)9)Cf 4O6 CeE'!-.?&     6Ki160WA' 5E&)NC3 #eXC5Tn>BwY5 d#;N4!*0%*8|/< M@O M@M@M@M@M,/>00@8P88'>c=,:J"33"@M@MJ /J??+2+9///?2]9/10++++++&&'.54>32&&#"&54>32'>54#"P{#3N3=[<#<%'5 60<2&!1!;& *@8+@R/.XD) e(2f#/D-"3$70&  0(Vy,V[k 1@D   U U??]]310#3#5##5ۣDsMkfaf&z -@ D H ???22210#3#5##5!ϗXrbppf k<[:@Q  /???9///9/99993210#5&&'3>7&^/{.W%    ց^^~'YWP PWX( kMMMM @:MD D_o_o  ??9////]]9?9=/////]910+++++>733##5#53&&'3  %).x~||u4P%8PFNP 2][[/RR^bH[u@ @ M@ L@M@ L@. L   DD   }?????99=/////9910+++++3##5#53&&'3>7 M'Ml{oR&H    lURRVj'YWP PWX( [kK@)    ???9??99=//9910!&&'#>73733#5R51 $+0Td{">,r3|>;3LX]+$8w5zO@*    U????9?99=//99210!#&&'#>7'3733#g3*} $(+Z\}20`3P (K0=?>䓓 J$^rH@ DD@ HD 0  HH   r?2???]+]10#333#5!#5Er|#{FrnsnzH@. @ L @ L y yy r   ???2?]]10++#53#333#5!YGGj#iy]]p-[kFQ@ H Q @ H S r???9/?++210!#5#".55332733#g-!,G3{ !|#r4YE&0 -+zr@ @ L/@?MM M@ M/Q_Q0   S r ????9/]]]]210]++++]+!#5#".55332>7533#U-*D.{*|#r'A0"kG@)D 0  D J???9/]]]10%#5.553536673#5D)D1{ D  {{ `i5V?:7nr1-o@*@Q      0Q@ H WO     ???9///]+]]]q]]10%#5&&5535326753#5#ESX{0E ||!JKJZ/ KN*6k<@D 0@ D @ H J   ???9/+]10%4.#"#36632#U $ '{{,,I5{%1 k4YD?K y(/|.@( L*@M*` L/@ L)@ L)R(1@# HR0/R0@//H O,,J  J ???]9/]+]10+++++3267#".'&&54676632'4&#"3?0 ")4L3HBT UI3A% d$ dZ V -Ol? Q7 '}6Se.&`XLZJ %,@7) M'@ M0 M+&K&&&&S##[$D.,S  $  @H -,sM))W   "W??]_]9/+q]]]]10+++"&'&&54676632#32674&#"Q\fI7\X8QT#+1"$ lkI?   \^{v  ) ` #3580 y+2i@A@ M,@ M?,O,_,,S(4 2022S 3 R3 2U /0//W# W ????]9/]]+10+3267#5.'&&54676632'4&#"3?0 !`"2!HBT UI3A% d$ dZ V u 5J[3 Q7 '}6Se.&`XLZJ (/r@I/)?))S_o#@ H#1S/S 0'R0"W&@//U&O,_,o,,W   (??]]?9/]]+]]]10&&'&&54676632#3267#4&#" ;AI7\X8QT#+%a^"$fUI?   \^{v ) ` x35806k, 9&@ 3;%P.O++4&@ 19#P,O++4-Yk%T@0!F' &'DO&@P`J J??9///??]]9/9910"#36673#"&'7232654. {{2b'-9A"[krv  BC$5k:{97665!3'667##$##:55!3'667# -C0#" 4$>3J" %p+WRH7"f"?FG6K-.7p-[kR@0D Dp@ HH@ J???9/]?+]]]]310#"&'73265##335ZU 1#{{k]Yg' k;[L@ Q0Q@ HO U  W???9/?]]+]]10%#"&'732655##3353ZV 1#{{{]Yg' ֶ-Wkh@)@ L@ L @ L @ L D 0 D@ HH@   ???]??+]310++++3'667###335#:3J" %{{{mK-.7ֶ-[kK@-D 0 D 0  J ???99//?]]]10]]%5#".55332673##L,,I5{ $ '{Lsfi4YE&0 - +zP L @% L@H  Q Q0W   ???9/]+10++326753##535#".55! |brX*''E4֕*4'A2Wk#@^L L0Mo@ H% @ H %$%$% ####$ H$ ?????9///9+]q/9=/9999+/]+]10+++>733'667###>7 e $;733'667###>7 n %<4I# EY:q HSX((YSH`[L-.7L :|{t16k -@ Q0 H H??]10353#5!#36fffaf 9&$@ $ PO++4,&D@ 4<P/O++4 ,&$@+ P#OO+++44,&D@ 1CP;O/O+++44 k?9&(@  P O++4&H @  +3P&O++4y#M@0# S @%Sp$OWO#_#o##U  ?  W?]?9/]]]]10%#"&5467!4&#"'66323267&=P*il0K<%D H-<_C$ #/->^? {v  .4h =Zm$;',&@ *<P 4O (O+++44&i@ &8P0O$O+++44 ,&@0B%P:O.O+++44&@.@#P8O,O+++44,&@5G#P?O3O+++443&@3E P=O1O+++44k[.&@  PO++4?&@ PO++4.,&@$ POO+++44?&@$POO+++44,&2@"4P,O O+++44&R@"4 P,O O+++44y !5@F@`#F"HJ J??9/]104632#"&"3.2>7#vimrtjms$  $#  $6x-=$$>-P,=$$=,!A@(S @`#S  "sOW W??]9/]]10%#".54>32267#"3&& :Q22Q; !;R01Q; (/-,(/-8[B$$B[87[A##A[60065..5,&x@$6P.O"O+++44&y@$6 P.O"O+++44*,&@ '9P1O%O+++44&@%7P/O#O+++44&@ !"P#O++4X&\@ #$ P%O++4,&@#5P-O!O+++44X&\ @%7 P/O#O+++44?&e$@'#%P&O"O+++44X&\e$@*%' P(O$O+++44-,&@, P $O O+++44+&@ + P#OO+++44$zk J@/@M 0@_ ? Q  r r??2]]]]+107!#3#5#5}grffafYz -@Q /?O H H??]10%#5#!#-rbfff",&@ 2P *O O+++44"&@&8P0O$O+++44y %@6?| @`'  90 p  @[H |P ` p    /Oo@`&#/_ U  ????99?]??]q]]]q]q+qr^]^]]qr10^]3#3373#4632#"&74&#"326cRBcR03311330  ekGbPLOOLLPPL0%%01%%9?10!! 9\y5s#@@ L@L@LF_$$7/F- 6#@  J)s330s-33 $) J ??99//3232]]10+++654&#"'66323#3!3267#"&55#53667#5<7%#F$W9ag1h<9)%T " !-:%rj#M6 (  _bXY"Y ,%_ `W Y!Ykz @ M M @ M (M M M@'MD J   ??92]]2222]10+++++++7#5'75'75#5!#78P&v|I&oK$o£P%DE%Q8a"R3F#N4ffb%Mk w@S  O     ` / ?   0@ QP` U U  ????]]]q]]]]]]qqqq10###=!ۣ|fYfFffr%s@C#F`'D &O_oJ HO  J???99//]q323]2]2222232]10]7#5#535#535>32##3"32654&{CCCC*+)lwxn"Ȗ '300XXXcHbVabWGc-+)"`@  ??10".'732676*\""\*6!0##0  k$6r%$k k?k(.k=-k+y#>F @ H 0 F$J J??9/]]+103#'32654&#"4>32#".)0@?10@?1t 441*+(275#F3769??8Kf5648>;fQ;><k7 k< k;k@p@M M M @ M @ M@ M_/?OR`_o@ HPR_ @  R 0@R 0@ H ??]+q]]2/3/33/]]+]]]qq]10]++++++%2>553#5&&553353* `\U`U\` ` !=0փxx0=!  y'S%@ L @+ LF ``?)F#/!!(%H" HJ???]]]]]10++4>323#56654.#"#53&&)BU-,UC)$*Q"5 + + 1&T*%bKjCCjKB~3232>54&##532>54&#"#*D6#9H%,{5G)"A3"(% 24  ! # +J\<=R2 8Q4+@B'-@b!229[i M M @ M M@(M M M M  Q QQ ???9=/910++++++++.'36673 ,56"2#<"g< W6`IISi0I(*:P;0@* M@M+S0@!`!!<S5S;0 8W&W ??9]]10++]74>7&&54>32&&#"#".%4.'326 5+$*@+5H"?&  -!;F9Q12Q; :$#5**38:79)4'\  "'gH7Y>!;VA*# $((DCB@0@ * M MLL@6M00002 %S(S"1_o0U % %-OW- @W?]?]99=//]]]10+++++&&#"33#"3267#".5467&&54>32L*%ji.,4S%d2)J7!&'! 5F&3N"c ] e"7(+70#&4  6V$>S@ H&S@ H%U??++]10'6654&'.54>7#5!g88*B-/DLmYU=5=-@*%%@ %$ 0D/8j_OfQNas=43!+?[2@ Q QP@ H 0W ?]??+]106632#4&#"#?!a98J+{.{ 9O0RB: 5@S@`SUW W??9/]10#"&546322>7#"3&&tffuufft#  #0.,UV3G++G3iWWi$-@ @ Q  U W??]10%#".55#533267&C0A'$. 5L1f*'?K M@$ M` Q   ??9??2]10++>73#.'#3%# .11:8/ ',-||/2/684CHH?=7$2@Q&%&$Q%$ W???99103>7.#"'6632#.'-12  &@M81' @}uh,*4 c==B9$QQN!$QSN;[Y5V8g @ MS5 H5:$@ H:S+S @ H 9&U0 8# # 8W??99=//99+++10+6654&'&&5467&&54632&&#"33#":A6ZRC2.%ng6P:) &jd3((,C-" ^JBPK/O``    f(!#  ,"%@ R??[8S @ H Q@ HW W???++10"&'#4632'32654&#",{damd0H !+0%* . cws6ZA#y IHEC2;-V%K@#S 0@ 0  'P'S@ H & W ??99+]]]107.54>32&&#"'6654&+F2%Ed?+G"4!HR332>54&'#"4Q7/M7!=W5! )  p#O)+R?&#@Z7:Y=f#/+P(5/$$@ @ HQ@H WU ??++]10#5!#32>7#".5g)  "P,)C0pff4( i.O;3,@ QQ 0  W???]]10"&53326553h^|++}] hp >66>ph[$u@Q/?@HR`&$@P`R%& 0R%@  U$OU ???]2?]2]]9/]3]2]+]1032>54&'#5.54632#, `,$`/C+qmsr+D0x@G)6"#9)FC(?Q/szzs/Q?([[]@?/?R! 0R @PR  W  ????2?]2]2]]]10%2>553#5.55333) `-B*`*B-a `R !=0B^>  >^B0=! 0s@7 0@R00000P0'00R/// &0&@&&&R"@H1#" +W0W ????+]]]]]]q]]10%32654&'7#"&'#".5467326553+#S! 3&!-1&2 -,R# `78>169%CHJ"+N<#$##$#N]## "" ## ""5(&@ '2 P 6O++4(&@ )6 P 'O++4(&@+( P :O )O+++44(&@ -( P +O )O+++44(&@+' P :O (O+++44(&@-' P +O (O+++44(7&@ 2< P NO 'O+++44(7&@ 2< P =O 'O+++44&$5 5" P+4&$  & P+4&$ P+44&$ȴ P+446&$ P+44)&$ P+44w&$  ", P+44x&$  ", P+44@&@  1>,)#P+44&MM.)#P+44&}},(#P+44&uu.(#P+44&3=#P+44&3=#P+44(&@ *( P )O++4(&@ )' P (O++4@&@  42!P,3O++4@&@  31!P,2O++4?[&@ PO++4?[&@ PO++4$&@  P O++4$&@  P O++4&R@ #! P"O++4&R@ " P!O++43&@ PO++43&@ PO++4&@ 42P 3O++4&@ 31P 2O++4(V&&B< P+44(V&&B< P+44(V&&@ +( P+444(V&&-( P+444(V&&@ +' P+444(V&&@ -' P+444(V7&&2< P+444(V7&&2< P+444777777.7.7?V&&ϴ/P+44?V&&ϴ/#P+44?V&&@  3P+444?V&&3P+444?V&&@ 3P+444?V&&@  3P+444?V7&&G)P+444?V7&&G)P+444Q7]77G7U7Y7S7r7rV&& 1FP+44V&& 3FP+44V&&@ 52P+444V&&72P+444V&&@ 51P+444V&&@ 71P+444V7&&685#3P+44&ME:5#3P+44&}t84#3P+44&um:4#3P+44&?I#3P+44&?I#3P+44(&a@ +5 P 'O++4(&@ '( P )O++4(V&& *( P+44(V& 2, P+4(V&& )' P+44(&@ 5+ P -O++4(V&& 5+ P+44 5&$a@ % PO++4 &$@  PO++4gk&$C ۴ P+4k&$  P+4 k I@,  @ HR W s ????9/9??+9107##673#.53%3ee65i(2 `L&-ϲ *8#: #"IVP  ?103#"&5473 #8=_4 ;.0I  ?10667&&54632 $'B3.!!#!(P"i@   ??102#&&#"#54>5'Z['5$5!&&!5$^7 -K@--"#. ."""(  ?]]?]]?]]210#"&54632#"&54632'.#"#>32$""$"##"T  E'11'M!! !! A%5##5%?V&&#P+44?V& P+4?V&&#P+44?[&@ "PO++4?V&&3P+449k&(C  P+4Bk&(  P+4'k&+C  P+41k&+  P+4k q@ R@ HR  @ HR W  U ????9/??+]2q2+]q]10###335.539af``f(2 `kk*8#: #@!@  ?/10'667&&54632Hl; " ?0+!!#!)O"N!@  ?10'7667&&54632~;l! "!@/+!!#!)O"n7'6@'""  '  /]/]]210.#"#>32667&&54632A   E'11'   ""%5##5%    !$&a@ " P O++4$&@  P O++4&!@ P .O "O O++++444&!@ P -O "O O++++444$&@ " P O++4$7&@ P "O O++4+44465&,a@ P O++46&,@  PO++4%k&,C  P+4k&,  P+4O@   ~~??10'&&546329l;.0> " !+ R(!#!!H@   ~~??10'7&&54632;l0?!" !+ R(!#!!n7'+@'$!  ( ?]10.#"#>32.54632A   E'11'h#"   %5##5%!    3&a@  PO++43&@ PO++4&&@P,O OO++++444&&@P+O OO++++444?[&@ ) P-O++4?[&@ - PO++43&@  PO++437&@P OO++4+444 5&<a@ #PO++4 &<@ PO++47k&<C P+4'k&< P+4W&3 ) P+4& )@ ~ ???10#"&54632#"&54632''(''(#(''(N>a]## "" ## ""]&o~?10'.uG`V&&DBP+44V& <6P+4V&&$ #CAP+44&@ ?5P 7O++4V&&OEP+44y&2C| #!P+4Dy&2 " P+4y&Cb n+)#P+43y& *(#P+4 y'373 L%M%@ L$@M$@ L @M M M M @ M @MM M M@ M3333 3333/H/@f H/R4- ----@M@ MO_R @0 P  /  !!! !!%R%$4/ 3W( %U" U W?????]]]]]]]]++]_]]++]]q10+++++++++++++++46323#5>54.#"#53&&.53TIHT"@  B!(2 `bB=]W5DO+&E45F'*OB4W]=*8#: #o~?10'7o.e`GuI ~?10&&54632 3B'$ "P(!#!!V&$', P+44V&$', P+44V&$''0 P+444V&$'60 P+444V&$'I0 P+444V&$'C0 P+444\V&$'j"D P+444]V&$'j"D P+444ZV&+'$m !P+44fV&+'$s!P+44V&+'P$%P+444V&+'^$%P+444V&+'b$%P+444V&+'V$%P+444V&+'$;9P+444V&+'$;9P+444kV&'$v(=#P+44GV&'$d*=#P+44~V&'>$,A#P+444V&'M$.A#P+444V&'}$&,A#P+444V&'u$.A#P+444V&'$@3U#P+444V&'$?3U#P+444 Vk&$ " P+4-Vk&+$ $P+4 Vy&$ $3-#P+4.k#@ @ L@ L L M@1 M%$% %# S!#$"@ H"U #OoU?]22+2??229/9910+++++&&##5!#3##.'53267#5 :.nsE:hJ84.444?@D'YY !XDD =?<?;6Z#X94+;Nam)@ no32"32654&766'&&'467'7&&727&&''&&5&#"6"'3274676667'7676&'&5F((F44F((F5    $  $K+%  %+  (F44F((F55F  u  V )  ( _(%    %(  [ #'+/37@3 M2 M+ M* M' M& M# M"@ M! M  M@ M M M&6Ff= &6Ff= &f&f=  >  A&%"!  x I i i6f 6 f  9  9=11i1y111@ H 11/4;H/((^(n(M(>((@N(+H((i(y(((@ H( I($4;H$$^$n$M$.$>$ $$$$$i$y$$$@ H $$6@ 4;H6652@ H2'2722+@-1H1+ +++++@ H+ +0++'-1H'@(,H'@ H' '0''   "/ ;^[Ly  ; ? o =O   { L   9 i   /_ ;{L8 f3`=PSDv 0`=ArD@>f7  0 = !   T   F   9@ 4EH",585@;1;Ha5@5P5!5155555555555c5s5T5C555@H355 555"@H"""""X33,,@@ &HK,[,<,,@H,,,,HZ,I,,@ H,2EH11&40;H4,/H4@3#+H|44k4\4K44HZ44@ H4#+#;##H#W))+-$'H-#H-H-@H-@ H--'- -@2EH$$& @P`;`!Arc$DfG6  <wC i3/?<O `@*IL\l )i AL\  /O_o;Oo.Ll| +Ii8) >xK f  3  `=AScDe@9' APA`p ?r2_]_q_qqrrrr^]2^]]qqqqrr^]?^]]2^]]qqrrr^]2^]]]]]]qqqrrrr^]9/]3^]]]qqqrr^]3^]]]]qq+rrrr^]]^]]2^]]qqrrr^]]2^]]]]]qqqqqqrrrr^]]]?^]2+2^]+++++?]2+q2+]+qqqq+++9/3+3+]]+]+qq+q]2]]+2_]]_]+_]_qqqqqqqrrrrr+22+22r22]]]]qqqrr^]2^]]]]qqqrr^]2^]]]qqqqr^]^]]2^]]qqqrrr^]]2^]]]qqqqrr^]2^]2^]]]qqqqrr^]]_^]22]+++2]+_qrrr+2]+]+2]+]]qrrrrr+2++]]+rrrr2+2]+]]10^]]]]]]+^]]]]]^]]^]]^]]^]^]^]^]+++++++++++++335!5!35!3335!5!35!3|444555k**yhk**L**yhk**)@( M(@M'@M L MM@M@ M@ MMM MسMMMس M M@1 L'!'! !'+*D$ $0$$ ??99//99]]999/////99910++++++++++++++++++7774>54.#"6322654&#" #,!   .   i y'oj&t"">@ EI?1033!Pr">@ EI?1033!P>4PVr@ EI?10!##"PrP4V">E??10#3"PP"rI?105!"PPV">@ EI??310##533"PPPVr2@@P `p EI??]2]q10###5PrP4P">1@@` EI?2]]q105333P"P4PV>(@EI?]?]]31033##PP>4P4V> @@(@ 0p  E0  I?2]2?]22]q10#5333##PP"P4P4V>V@7  ` p   `p E E II?]2]2?]q]q105333###5PPrP|PPP|PV>J@.E ?  E0 I I????]]3]2]10##53;3##PPPPPP4P4V"r@ EI?10!##"PrV"> \@>_oE _oE @M_I_  0  P`I?]]]q?]]q+qq1033!!##P"P|4P4|V> 8@ E E II??]]]]10!33##!PP"r> @@)  @ P  ` p  0E II?2]]q105333!5P rP|PPPP"> <@%@ MPp E II?]]qq+1033!!!P"|4PP> 8@!p p_0E II?]qq]q10!33!!P"rPV @@)  @ P  ` p   0E I I?]]q10###=!P"P|PPPPV" 4@Pp? E II?]]]q10!##5!!"P""4|PV 8@"Pp p0E II?]q]q10%##!%!!P""̠PVr>@EE????10#;#PPPPPV> v@N0Ep   E0Ep   EI I II????]]q]]]q]1033###35#333##PPP P|4P4|Vr> E@)  E@MEE I I????]]+]103#33#3##"PPނPP>l|4P4|V> c@@  E  E  @ME@M0 II ????]]]+]+]]]]10#3#33##3PP"PPҪ4> ?@$ E?E0 I II??]]10!5533#!#33 PP"PP|4r> :@" E? E I IP??]]?]]1033#!3!P"P|4P> @@'?  E0 E I IP??]]?]]]10#33!3!PP"rlV c@AE?   E0@MI I I?]?]]]+]]]]105!##3!3##P PrPP4|Vr r@ @ME @#M  E0 @MI @M  I_0?]]?]+]+]]]+]+103##5!#!PrP"4|V d@FpE  0E?   IpI0?]?]]]]]]]]]10%##35!#!PPrPl@ II ?1055!%5! PPPPVr> '@ E E I????210##533#3PPPPPV> '@ E E I????21033##3#"PPPP>4P4"> E@- @H E@HE0`p I?]33?]++10!53333 PPPrPP44"r> ;@%E E I??]]??]]]1033#3#P"PPr"> ;@%EE I?]]??]?]]10#33#3PPP"4PVr @@'E E0   I?23??]]]]105!####PPP"PP44Vrr C@*E?  E I?]???]]]_]]103##%3#P"PPrPVr C@*E$  E0 I??]??]_]]]]10##3#3PPP"4V>??/10! >.n> #'+/37;?CGKOSW[_cgkosw{3#3#73#3#3#3#73#3#3#3#73#3#3#3#73#3#3#3#73#3#3#3#73#3#3#73#3#3#73#3#3#73#3#3#73#3#3#73#3#3#73#3#3#73#3#73#''N''N''N''''N''N''N''''N''N''N''''N''N''N''''N''N''N''''N''N''N''''''''N''N ''N''N ''N''N ''N''N ''N''N ''N''N '' >'''u'''''''u'''''''u'''('''u'''''''u'''''''u''''''''''u'u'''u'u'''u'v'''u'u'''u'u'''u'u'''Un> #'+/37;?CGKOSW[_cgkosw{ #'+/37;?CGKOS3#73#3#73#73#73#3#73#73#73#3#73#3#73#73#73#73#73#3#73#73#73#73#73#3#73#73#73#73#73#3#73#73#73#73#73#3#73#73#73#73#73#3#3#73#73#73#73#73#73#3#73#73#73#73#73#73#3#73#73#73#73#73#73#3#73#73#73#73#73#73#3#73#73#73#73#73#73#3#73#73#73#73#73#73#''N'''''N'''''N'''''N'''''N'''''N''z''N''N''N''N''N''z''N''N''N''N''N''z''N''N''N''N''N''z''N''N''N''N''N''z''N''N''N''N''N''' ,''N''N''N''N''N''N ,''N''N''N''N''N''N ,''N''N''N''N''N''N ,''N''N''N''N''N''N ,''N''N''N''N''N''N ,''N''N''N''N''N''N >'''''''u'''''''u'''''''u'''''''''''u'''''''''''v'''''''''''u'''''''''''u''''''''''''u'''''''''''''u'''''''''''''v'''''''''''''u'''''''''''''u'''''''''''''u'''''''''''''IV> #'+/37;?CGKOSW[_cgkosw{ ;35#35#35#35#735#35#35#35#735#35#35#35#35#35#35#35#735#35#35#35#735#35#35#35#35#35#35#35#735#35#35#35#735#35#35#35#35#35#35#35#735#35#35#35#735#35#35#35#35#35#35#35#735#35#35#35#735#35#35#35#35#35#35#35#735#35#35#35#735#35#35#35##5##5##5##5##5##5##'''N''N''N''N''N''N''N''N''N''N''N''z''N''N''N''N''N''N''N''N''N''N''N''z''N''N''N''N''N''N''N''N''N''N''N''z''N''N''N''N''N''N''N''N''N''N''N''z''N''N''N''N''N''N''N''N''N''N''N''z''N''N''N''N''N''N''N''N''N''N''N''G '''''''''''''''u'''''''u'''''''u'''u'''u'''''''u'''''''u'''u'''v(((''''v(((''''v(((v'''u'''''''u'''''''u'''u'''u'''''''u'''''''u'''u'''u'''''''u'''''''u'''V>W@6E?E 0   I ???22]22?]2]2]2]2103#####53333rPPPPPPrP44P4V>1@E I I ??2222223310###535#5333#PP"P|PPP|PPnI ITXu @    ITu     :    + I   4 4    Copyright 2011 Canonical Ltd. Licensed under the Ubuntu Font Licence 1.0Ubuntu MonoBoldUbuntu Mono Bold Version 0.80Ubuntu Mono BoldVersion 0.80UbuntuMono-BoldUbuntu and Canonical are registered trademarks of Canonical Ltd.Dalton Maag Ltdhttp://www.daltonmaag.com/Copyright 2011 Canonical Ltd. Licensed under the Ubuntu Font Licence 1.0Ubuntu MonoBoldUbuntu Mono Bold Version 0.80Ubuntu Mono BoldVersion 0.80UbuntuMono-BoldUbuntu and Canonical are registered trademarks of Canonical Ltd.Dalton Maag Ltdhttp://www.daltonmaag.com/  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghjikmlnoqprsutvwxzy{}|~      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ !"#     .nullEurouni00A0uni00ADmacronperiodcenteredAmacronamacronAbreveabreveAogonekaogonek Ccircumflex ccircumflex Cdotaccent cdotaccentDcarondcaronDcroatdcroatEmacronemacronEbreveebreve Edotaccent edotaccentEogonekeogonekEcaronecaron Gcircumflex gcircumflex Gdotaccent gdotaccent Gcommaaccent gcommaaccent Hcircumflex hcircumflexHbarhbarItildeitildeImacronimacronIbreveibreveIogonekiogonek Idotaccenti.loclIJij Jcircumflex jcircumflex Kcommaaccent kcommaaccentkgreenlandic.case kgreenlandicLacutelacute Lcommaaccent lcommaaccentLcaronlcaronLdotldotNacutenacute Ncommaaccent ncommaaccentNcaronncaronnapostrophe.case napostropheEngengOmacronomacronObreveobreve Ohungarumlaut ohungarumlautRacuteracute Rcommaaccent rcommaaccentRcaronrcaronSacutesacute Scircumflex scircumflexuni0162uni0163TcarontcaronTbartbarUtildeutildeUmacronumacronUbreveubreveUringuring Uhungarumlaut uhungarumlautUogonekuogonek Wcircumflex wcircumflex Ycircumflex ycircumflexZacutezacute Zdotaccent zdotaccentlongs Scommaaccent scommaaccentuni021Auni021Bdotlessj apostropheuni02C9WgravewgraveWacutewacute Wdieresis wdieresisYgraveygrave zerosuperior foursuperior fivesuperior sixsuperior sevensuperior eightsuperior ninesuperior zeroinferior oneinferior twoinferior threeinferior fourinferior fiveinferior sixinferior seveninferior eightinferior nineinferior afii61289 estimatedonethird twothirds oneeighth threeeighths fiveeighths seveneighthsonefifth twofifths threefifths fourfifthsonesixth fivesixths oneseventh twosevenths threesevenths foursevenths fivesevenths sixseventhsoneninth twoninths fourninths fiveninths sevenninths eightninthsDeltauni2126uni2215uni2219f_if_l zero.supsone.supstwo.sups three.sups four.sups five.supssix.sups seven.sups eight.sups nine.sups zero.sinfone.sinftwo.sinf three.sinf four.sinf five.sinfsix.sinf seven.sinf eight.sinf nine.sinf caron.alt commaaccentrevcommaaccentcaron.alt.short Parenleft ParenrightHyphenSlashAt Bracketleft Backslash Bracketright Braceleft Braceright GuilsinglleftBulletEndashEmdashGuilsinglright Exclamdown GuillemotleftGuillemotright Questiondownuni0180uni0181uni0182uni0183uni0184uni0185uni0186uni0187uni0188uni0189uni018Auni018Buni018Cuni018Duni018Euni018Funi0190uni0191uni0193uni0194uni0195uni0196uni0197uni0198uni0199uni019Auni019Buni019Cuni019Duni019Euni019FOhornohornuni01A2uni01A3uni01A4uni01A5uni01A6uni01A7uni01A8uni01A9uni01AAuni01ABuni01ACuni01ADuni01AEUhornuhornuni01B1uni01B2uni01B3uni01B4uni01B5uni01B6uni01B7uni01B8uni01B9uni01BAuni01BBuni01BCuni01BDuni01BEuni01BFuni01C0uni01C1uni01C2uni01C3uni01C4uni01C5uni01C6uni01C7uni01C8uni01C9uni01CAuni01CBuni01CCuni01CDuni01CEuni01CFuni01D0uni01D1uni01D2uni01D3uni01D4uni01D5uni01D6uni01D7uni01D8uni01D9uni01DAuni01DBuni01DCuni01DDuni01DEuni01DFuni01E0uni01E1uni01E2uni01E3uni01E4uni01E5Gcarongcaronuni01E8uni01E9uni01EAuni01EBuni01ECuni01EDuni01EEuni01EFuni01F0uni01F1uni01F2uni01F3uni01F4uni01F5uni01F6uni01F7uni01F8uni01F9 Aringacute aringacuteAEacuteaeacute Oslashacute oslashacuteuni0200uni0201uni0202uni0203uni0204uni0205uni0206uni0207uni0208uni0209uni020Auni020Buni020Cuni020Duni020Euni020Funi0210uni0211uni0212uni0213uni0214uni0215uni0216uni0217uni021Cuni021Duni021Euni021Funi0220uni0221uni0222uni0223uni0224uni0225uni0226uni0227uni0228uni0229uni022Auni022Buni022Cuni022Duni022Euni022Funi0230uni0231uni0232uni0233uni0234uni0235uni0236uni0238uni0239uni023Auni023Buni023Cuni023Duni023Euni023Funi0240uni0241uni0242uni0243uni0244uni0245uni0246uni0247uni0248uni0249uni024Auni024Buni024Cuni024Duni024Euni024Funi0292breve_inverted double_grave ring_acutedieresis_macron dot_macrondieresis_gravedieresis_acutedieresis_breve tilde_macron acute.asccircumflex.asc caron.ascdieresis_grave.capdieresis_acute.capdieresis_breve.capuni0400 afii10023 afii10051 afii10052 afii10053 afii10054 afii10055 afii10056 afii10057 afii10058 afii10059 afii10060 afii10061uni040D afii10062 afii10145 afii10017 afii10018 afii10019 afii10020 afii10021 afii10022 afii10024 afii10025 afii10026 afii10027 afii10028 afii10029 afii10030 afii10031 afii10032 afii10033 afii10034 afii10035 afii10036 afii10037 afii10038 afii10039 afii10040 afii10041 afii10042 afii10043 afii10044 afii10045 afii10046 afii10047 afii10048 afii10049 afii10065 afii10066 afii10067 afii10068 afii10069 afii10070 afii10072 afii10073 afii10074 afii10075 afii10076 afii10077 afii10078 afii10079 afii10080 afii10081 afii10082 afii10083 afii10084 afii10085 afii10086 afii10087 afii10088 afii10089 afii10090 afii10091 afii10092 afii10093 afii10094 afii10095 afii10096 afii10097uni0450 afii10071 afii10099 afii10100 afii10101 afii10102 afii10103 afii10104 afii10105 afii10106 afii10107 afii10108 afii10109uni045D afii10110 afii10193afii10066.locluni0462uni0463uni0472uni0473uni0474uni0475uni048Auni048Buni048Cuni048Duni048Euni048F afii10050 afii10098uni0492uni0493uni0494uni0495uni0496uni0497uni0498uni0499uni049Auni049Buni049Cuni049Duni049Euni049Funi04A0uni04A1uni04A2uni04A3uni04A4uni04A5uni04A6uni04A7uni04A8uni04A9uni04AAuni04ABuni04ACuni04ADuni04AEuni04AFuni04B0uni04B1uni04B2uni04B3uni04B4uni04B5uni04B6uni04B7uni04B8uni04B9uni04BAuni04BBuni04BCuni04BDuni04BEuni04BFuni04C0uni04C1uni04C2uni04C3uni04C4uni04C5uni04C6uni04C7uni04C8uni04C9uni04CAuni04CBuni04CCuni04CDuni04CEuni04CFuni04D0uni04D1uni04D2uni04D3uni04D4uni04D5uni04D6uni04D7uni04D8uni04D9uni04DAuni04DBuni04DCuni04DDuni04DEuni04DFuni04E0uni04E1uni04E2uni04E3uni04E4uni04E5uni04E6uni04E7uni04E8uni04E9uni04EAuni04EBuni04ECuni04EDuni04EEuni04EFuni04F0uni04F1uni04F2uni04F3uni04F4uni04F5uni04F6uni04F7uni04F8uni04F9 afii61352 afii00208uni20B4uni20AEtengeroublekratkaAlphaBetaGammauni0394EpsilonZetaEtaThetaIotaKappaLambdaMuNuXiOmicronPiRhoSigmaTauUpsilonPhiChiPsialphabetagammadeltaepsilonzetaetathetaiotakappalambdauni03BCnuxiomicronrhosigma1sigmatauupsilonphichipsiomega Alphatonos EpsilontonosEtatonos Iotatonos Iotadieresis Omicrontonos UpsilontonosUpsilondieresis Omegatonos alphatonos epsilontonosetatonos iotatonos iotadieresisiotadieresistonos omicrontonosupsilondieresis upsilontonosupsilondieresistonos omegatonostonos tonos.cap dieresistonosuni1F00uni1F01uni1F02uni1F03uni1F04uni1F05uni1F06uni1F07uni1F08uni1F09uni1F0Auni1F0Buni1F0Cuni1F0Duni1F0Euni1F0Funi1F10uni1F11uni1F12uni1F13uni1F14uni1F15uni1F18uni1F19uni1F1Auni1F1Buni1F1Cuni1F1Duni1F20uni1F21uni1F22uni1F23uni1F24uni1F25uni1F26uni1F27uni1F28uni1F29uni1F2Auni1F2Buni1F2Cuni1F2Duni1F2Euni1F2Funi1F30uni1F31uni1F32uni1F33uni1F34uni1F35uni1F36uni1F37uni1F38uni1F39uni1F3Auni1F3Buni1F3Cuni1F3Duni1F3Euni1F3Funi1F40uni1F41uni1F42uni1F43uni1F44uni1F45uni1F48uni1F49uni1F4Auni1F4Buni1F4Cuni1F4Duni1F50uni1F51uni1F52uni1F53uni1F54uni1F55uni1F56uni1F57uni1F59uni1F5Buni1F5Duni1F5Funi1F60uni1F61uni1F62uni1F63uni1F64uni1F65uni1F66uni1F67uni1F68uni1F69uni1F6Auni1F6Buni1F6Cuni1F6Duni1F6Euni1F6Funi1F70uni1F71uni1F72uni1F73uni1F74uni1F75uni1F76uni1F77uni1F78uni1F79uni1F7Auni1F7Buni1F7Cuni1F7Duni1F80uni1F81uni1F82uni1F83uni1F84uni1F85uni1F86uni1F87uni1F88uni1F89uni1F8Auni1F8Buni1F8Cuni1F8Duni1F8Euni1F8Funi1F90uni1F91uni1F92uni1F93uni1F94uni1F95uni1F96uni1F97uni1F98uni1F99uni1F9Auni1F9Buni1F9Cuni1F9Duni1F9Euni1F9Funi1FA0uni1FA1uni1FA2uni1FA3uni1FA4uni1FA5uni1FA6uni1FA7uni1FA8uni1FA9uni1FAAuni1FABuni1FACuni1FADuni1FAEuni1FAFuni1FB0uni1FB1uni1FB2uni1FB3uni1FB4uni1FB6uni1FB7uni1FB8uni1FB9uni1FBAuni1FBBuni1FBCuni1FBDuni1FBEuni1FBFuni1FC0uni1FC1uni1FC2uni1FC3uni1FC4uni1FC6uni1FC7uni1FC8uni1FC9uni1FCAuni1FCBuni1FCCuni1FCDuni1FCEuni1FCFuni1FD0uni1FD1uni1FD2uni1FD3uni1FD6uni1FD7uni1FD8uni1FD9uni1FDAuni1FDBuni1FDDuni1FDEuni1FDFuni1FE0uni1FE1uni1FE2uni1FE3uni1FE4uni1FE5uni1FE6uni1FE7uni1FE8uni1FE9uni1FEAuni1FEBuni1FECuni1FEDuni1FEEuni1FEFuni1FF2uni1FF3uni1FF4uni1FF6uni1FF7uni1FF8uni1FF9uni1FFAuni1FFBuni1FFCuni1FFDuni1FFE uni1F88.alt uni1F89.alt uni1F8A.alt uni1F8B.alt uni1F8C.alt uni1F8D.alt uni1F8E.alt uni1F8F.alt uni1F98.alt uni1F99.alt uni1F9A.alt uni1F9B.alt uni1F9C.alt uni1F9D.alt uni1F9E.alt uni1F9F.alt uni1FA8.alt uni1FA9.alt uni1FAA.alt uni1FAB.alt uni1FAC.alt uni1FAD.alt uni1FAE.alt uni1FAF.alt uni1FBC.alt uni1FCC.alt uni1FFC.altuni20B9uniE0FFuniF000uniFFFDuniEFFD SF040000.001 SF020000.001 SF010000.001 SF110000.001 SF100000.001 SF090000.001 SF060000.001 SF070000.001 SF080000.001 SF050000.001 SF540000.001 SF530000.001 SF030000.001SF190000SF360000SF450000SF280000SF500000SF470000SF220000SF510000SF240000SF440000SF230000SF420000SF400000SF260000SF380000SF410000SF250000SF390000SF430000SF200000SF370000SF460000SF270000SF490000SF480000SF210000SF520000blockltshadeshadedkshadeSF530000SF540000SF040000SF020000SF010000SF110000SF100000SF090000SF060000SF070000SF080000SF050000SF030000 tcyrlgreklatnBGR .MKD HSRB b 0>KVa   *1?LWb  !+2@MXc ",3ANYd #4<BIOZe"AZE @CRT ZMOL tROM TRK $5;=CJP[f %-6DQ\g &.7ER]h'8FS^i (9GT_j )/:HU`klafrcafrcafrcafrcafrcafrcafrcafrcafrcafrcafrccasecasecasecasecasecase case(case0case8dnom@dnomFdnomLdnomRdnomXdnom^dnomddnomjdnompdnomvdnom|fracfracfracfracfracfracfracfracfracfracfracloclloclloclloclloclloclnumrnumrnumr numrnumrnumrnumr"numr(numr.numr4numr:ordn@saltFsaltPsinfZsinf`sinffsinflsinfrsinfxsinf~sinfsinfsinfsinfss01ss01ss02ss02ss02ss02ss02ss02ss02ss02ss02ss02 ss02subssubs$subs*subs0subs6subsFNV^nnntnun~nnnnn $,nnunnn*2:BJRZbjrznnnnnnqnsnunyn{n}nnnnnnnnn*2:BJRZbjrznnnnnpnqnsntnzn{n}n~nnnnnnnn(RZbjrz "*2:BJRZbjrznnnnnnnnnnnpnqnrnsntnunxnynzn{n|n}n~nnnnnnnnnnnnnnnnn (08@HPX`hpxnnnnqnsnun{n}nnnnnnn*2:BJRZbjrznnnnnrnsntnun|n}n~nnnnnnnnn $,nnsn}nn &.6>FNV^nnntnun~nnnnn $,nnunnn(RZbjrz "*2:BJRZbjrznnnnnnnnnnnpnqnrnsntnunxnynzn{n|n}n~nnnnnnnnnnnnnnnnn*2:BJRZbjrznnnnnnqnsnunyn{n}nnnnnnnnn*2:BJRZbjrznnnnnpnqnsntnzn{n}n~nnnnnnnn (08@HPX`hpxnnnnqnsnun{n}nnnnnnn*2:BJRZbjrznnnnnrnsntnun|n}n~nnnnnnnnn $,nnsn}nn &.6>FNV^nnntnun~nnnnn $,nnunnn(RZbjrz "*2:BJRZbjrznnnnnnnnnnnpnqnrnsntnunxnynzn{n|n}n~nnnnnnnnnnnnnnnnn*2:BJRZbjrznnnnnnqnsnunyn{n}nnnnnnnnn*2:BJRZbjrznnnnnpnqnsntnzn{n}n~nnnnnnnn (08@HPX`hpxnnnnqnsnun{n}nnnnnnn*2:BJRZbjrznnnnnrnsntnun|n}n~nnnnnnnnn $,nnsn}nn &.6>FNV^nnntnun~nnnnn $,nnunnn(RZbjrz "*2:BJRZbjrznnnnnnnnnnnpnqnrnsntnunxnynzn{n|n}n~nnnnnnnnnnnnnnnnn*2:BJRZbjrznnnnnnqnsnunyn{n}nnnnnnnnn*2:BJRZbjrznnnnnpnqnsntnzn{n}n~nnnnnnnn (08@HPX`hpxnnnnqnsnun{n}nnnnnnn*2:BJRZbjrznnnnnrnsntnun|n}n~nnnnnnnnn $,nnsn}nn &.6>FNV^nnntnun~nnnnn $,nnunnnnn (opqrstuopqrstuopqrstuopqrstun~6-$%&'()*+,-./0123456789:;<=kmz   "$&(*,.02468:<>@BDFHJLNPRTVY[fhjl~ v n  xywvz{|}~Lv DR ou  o  o  pt w~ nnvou #>?@^`lrstx|X]DEFGHIJKLMNOPQRSTUVWXYZ[\]wy  !#%')+-/13579;=?ACEGIKMOQSUWZ\gikmSZcjsz0 *H 01 0 +0a +7S0Q0, +7<<<Obsolete>>>0!0 +;h,w5BR"0V0> 0  *H 01 0 UUS10UArizona10U Scottsdale1%0#U Starfield Technologies, Inc.1:08U 1http://certificates.starfieldtech.com/repository/1604U-Starfield Services Root Certificate Authority0 110503070000Z 160503070000Z01 0 UUS10UArizona10U Scottsdale1%0#U Starfield Technologies, Inc.1301U *http://certs.starfieldtech.com/repository/1/0-U&Starfield Services Timestamp Authority0"0  *H 0 򳑥I }-`Zc0xC;#(2&_2%蕅 >O ^>Q.;V A)+5yi]rE%Ĵ֬ea- vcLy< tfYP _Y$9ӉpUH {( ]P( */ӿJ^=zu_^̧@ցU]E'€t?>3 He27m UUO"/+0tC,K "@xQF0B0 U00U0U% 0 +0U# kro> را0U#0C̛u]/KQ0:+.0,0*+0http://ocsp.starfieldtech.com/09U2000.,*(http://crl.starfieldtech.com/sfsroot.crl0SU L0J0H `Hn0907++https://certs.starfieldtech.com/repository/0  *H S~tz+xY<%_B7[<a4jh"!ZHUMΫ4ƭ yڧW900Р*\%S-Lm{rRU>:EKd`5T%L[76J ioW"AcdǾwX6f8dR^/`ro6D"@AT3#l{X4~hTx RMKGlSL>3ĊsI"Ȝ|c000  *H 0c1 0 UUS1!0U The Go Daddy Group, Inc.110/U (Go Daddy Class 2 Certification Authority0 040629170620Z 340629170620Z0c1 0 UUS1!0U The Go Daddy Group, Inc.110/U (Go Daddy Class 2 Certification Authority0 0  *H  0ޝWI[_HgehWq^wIp=Vco?T"Tزu=Kw>x k/j+ň~ĻE'o7X&-r6N?e*n] :-؎_=\e8E``tArbbo_BQe#jxMZ@^s wyg ݠXD{ >b(_ASX~8tit00UİґLqa=ݨj0U#0İґLqa=ݨjge0c1 0 UUS1!0U The Go Daddy Group, Inc.110/U (Go Daddy Class 2 Certification Authority0 U00  *H 2K>ơw3\= ni04cr8(1zT1Xb۔EsE$Ղ#yiML3#An 剞;p~& T%ns! l l a+r9 ͗nN&s+L&qatJWuH.Qia@LĬC Օb ψ2 +E (*ZW7۽00Ơ0  *H 0c1 0 UUS1!0U The Go Daddy Group, Inc.110/U (Go Daddy Class 2 Certification Authority0 061116015437Z 261116015437Z01 0 UUS10UArizona10U Scottsdale10U GoDaddy.com, Inc.1301U *http://certificates.godaddy.com/repository100.U'Go Daddy Secure Certification Authority10U079692870"0  *H 0 -&L25_YZaY;pc=*3y:<0#0=Tߙ %!e)~5T29&UXמ* BΧ?Rifھ],fkQJ/Hǘuع)fm x|z%.enjDSp0Ü+X+=tJQL'Xk5ŝ1 6:%IgE96~7qt0? O20.0Ua2lE_vh0U#0İґLqa=ݨj0U003+'0%0#+0http://ocsp.godaddy.com0FU?0=0;975http://certificates.godaddy.com/repository/gdroot.crl0KU D0B0@U 0806+*http://certificates.godaddy.com/repository0U0  *H ҆gf :PrJtS7DIk3ٖV0<2!{ $F%#go]{z̟X*Ğ!ZFc/))r,)7'Oh! SY ;$IHE:6oEEADN>tvբU,ƇuLn=qQ@"(IK4Zц6d5oownP^S#c͹c:h5S0>0& i{0  *H 01 0 UUS10UArizona10U Scottsdale10U GoDaddy.com, Inc.1301U *http://certificates.godaddy.com/repository100.U'Go Daddy Secure Certification Authority10U079692870 090924134423Z 120924134423Z0k1 0 UGB10 ULondon10 ULondon10U Dalton Maag Limited10UDalton Maag Limited0"0  *H 0 _ҡ~4rخS5F+  +Fnb执 AkUr]X3u-`>T>슑6zU=k'F __~/BbQF0q{̌3v൙ wx1http://certificates.godaddy.com/repository/gd_intermediate.crt0U#0a2lE_vh0Uhf0gU7u30  *H V> }EHTԒDqi07Ghٳ% 6 ;Qt^;$9àLRBfZuhI:-#k^C/ >=nZt?.K0msÞoG$`d\cݩP4e(IZzF*فe {>U0ϯ#kEܑ7 f_U`4>ja@0ĥ[`mO4~ 1SxЃĪb2"#N.|\#10001 0 UUS10UArizona10U Scottsdale10U GoDaddy.com, Inc.1301U *http://certificates.godaddy.com/repository100.U'Go Daddy Secure Certification Authority10U07969287 i{0 +0 +7 100 +7(10 *H  1  +70 +7 10  +70# *H  1Vae 0  *H 0L#K-[U{ E-&Nę0suݛ739$ߊZFH:ea_wHW7UW'9$=/x`¨o$@NC?{H3ׯ+]2&݆~(2v*ؙ`[.$Jn=`raj;]@=M>Q .[dCQNF 2&I#̈lӇǔQ~B5xT^wx. xՂϴefСt0p *H  1a0]001 0 UUS10UArizona10U Scottsdale1%0#U Starfield Technologies, Inc.1:08U 1http://certificates.starfieldtech.com/repository/1604U-Starfield Services Root Certificate Authority 0  *H ]0 *H  1  *H 0 *H  1 110922154442Z0# *H  1?}!Q knA4 FZN0  *H I3x ț|@Gd)~8U+dw',q.P?PbDqsef<{ \AiLFw j8ȞjU!pkJdS;|pW~fs/mn RiAh)hO.xI}\`ʕPa'V1=ϽF~g$A+KqDYuqXqln acfƬi" 7`N˳Zcoq-8.15.0/doc/tools/coqrst/notations/__init__.py000066400000000000000000000000001417001151100217130ustar00rootroot00000000000000coq-8.15.0/doc/tools/coqrst/notations/fontsupport.py000077500000000000000000000057751417001151100226120ustar00rootroot00000000000000#!/usr/bin/env python2 # -*- coding: utf-8 -*- ########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## = 191: fnt.removeGlyph(g) return fnt def center_glyphs(src_font_path, dst_font_path, dst_name): fnt = trim_font(fontforge.open(src_font_path)) size = max(g.width for g in fnt.glyphs()) fnt.ascent, fnt.descent = size, 0 for glyph in fnt.glyphs(): scale_single_glyph(glyph, size, size) fnt.sfnt_names = [] fnt.fontname = fnt.familyname = fnt.fullname = dst_name fnt.generate(dst_font_path) if __name__ == '__main__': from os.path import dirname, join, abspath curdir = dirname(abspath(__file__)) ubuntumono_path = join(curdir, "UbuntuMono-B.ttf") ubuntumono_mod_path = join(curdir, "CoqNotations.ttf") center_glyphs(ubuntumono_path, ubuntumono_mod_path, "CoqNotations") coq-8.15.0/doc/tools/coqrst/notations/html.py000066400000000000000000000063211417001151100211340ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## str: """Configure a coqtop instance (but don't start it yet). :param coqtop_bin: The path to coqtop; uses $COQBIN by default, falling back to "coqtop" :param color: When True, tell coqtop to produce ANSI color codes (see the ansicolors module) :param args: Additional arguments to coqtop. """ self.coqtop_bin = coqtop_bin or os.path.join(os.getenv('COQBIN', ""), "coqtop") if not pexpect.utils.which(self.coqtop_bin): raise ValueError("coqtop binary not found: '{}'".format(self.coqtop_bin)) self.args = (args or []) + ["-q"] + ["-color", "on"] * color self.coqtop = None def __enter__(self): if self.coqtop: raise ValueError("This module isn't re-entrant") self.coqtop = pexpect.spawn(self.coqtop_bin, args=self.args, echo=False, encoding="utf-8") # Disable delays (http://pexpect.readthedocs.io/en/stable/commonissues.html?highlight=delaybeforesend) self.coqtop.delaybeforesend = 0 self.next_prompt() return self def __exit__(self, type, value, traceback): self.coqtop.kill(9) def next_prompt(self): """Wait for the next coqtop prompt, and return the output preceding it.""" self.coqtop.expect(CoqTop.COQTOP_PROMPT, timeout = 10) return self.coqtop.before def sendone(self, sentence): """Send a single sentence to coqtop. :sentence: One Coq sentence (otherwise, Coqtop will produce multiple prompts and we'll get confused) """ # Suppress newlines, but not spaces: they are significant in notations sentence = re.sub(r"[\r\n]+", " ", sentence).strip() try: self.coqtop.sendline(sentence) output = self.next_prompt() except Exception as err: raise CoqTopError(err, sentence, self.coqtop.before) return output def send_initial_options(self): """Options to send when starting the toplevel and after a Reset Initial.""" self.sendone('Set Coqtop Exit On Error.') self.sendone('Set Warnings "+default".') def sendmany(*sentences): """A small demo: send each sentence in sentences and print the output""" with CoqTop() as coqtop: for sentence in sentences: print("=====================================") print(sentence) print("-------------------------------------") response = coqtop.sendone(sentence) print(response) def main(): """Run a simple performance test and demo `sendmany`""" with CoqTop() as coqtop: for _ in range(200): print(repr(coqtop.sendone("Check nat."))) sendmany("Goal False -> True.", "Proof.", "intros H.", "Check H.", "Chchc.", "apply I.", "Qed.") if __name__ == '__main__': main() coq-8.15.0/doc/tools/docgram/000077500000000000000000000000001417001151100156775ustar00rootroot00000000000000coq-8.15.0/doc/tools/docgram/README.md000066400000000000000000000304741417001151100171660ustar00rootroot00000000000000# Grammar extraction tool for documentation `doc_grammar` extracts Coq's grammar from `.mlg` files, edits it and inserts it into `.rst` files. The tool inserts `prodn` directives for grammar productions. It also updates `tacn` and `cmd` directives when they can be unambiguously matched to productions of the grammar (in practice, that's probably almost always). `tacv` and `cmdv` directives are not updated because matching them appears to require human judgement. `doc_grammar` generates a few files that may be useful to developers and documentors. The mlg grammars present several challenges to generating an accurate grammar for documentation purposes: * The 30+ mlg files don't define an overall order in which nonterminals should appear in a complete grammar. * Even within a single mlg file, nonterminals and productions are often given in an order that's much different from what a reader of the documentation would expect. In a small number of cases, changing the order in the mlg would change how some inputs are parsed, in particular when the order determines how to distinguish otherwise ambiguous inputs. Strictly speaking, that means our grammar is not a context free grammar even though we gloss over that distinction in the documentation. * For a few nonterminals, some productions are only available if certain plugins are activated (e.g. SSR). Readers should be informed about these. * Some limited parts of the grammar are defined in OCaml, including lookahead symbols like `test_bracket_ident` and references to nonterminals in other files using qualified names such as `Prim.ident`. A few symbols are defined multiple times, such as `scope` and `orient`. ## What the tool does 1. The tool reads all the `mlg` files and generates `fullGrammar`, which includes all the grammar without the actions for each production or the OCaml code. This file is provided as a convenience to make it easier to examine the (mostly) unprocessed grammar of the mlg files with less clutter. This step includes two transformations that rename some nonterminal symbols: First, nonterminals that use levels (`"5" RIGHTA` below) are modified, for example: ``` ltac_expr: [ "5" RIGHTA [ te = binder_tactic -> { te } ] [ "4" ... ``` becomes ``` tactic_expr5: [ | binder_tactic | tactic_expr4 ] ``` Second, nonterminals that are local to an .mlg will be renamed, if necessary, to make them unique. For example, `strategy_level` is defined as a local nonterminal in both `g_prim.mlg` and in `extraargs.mlg`. The nonterminal defined in the former remains `strategy_level` because it happens to be processed before the latter, in which the nonterminal is renamed to `EXTRAARGS_strategy_level` to make the local symbol unique. Nonterminals listed after `GLOBAL:` are global; otherwise they are local. References to renamed symbols are updated with the modified names. 2. The tool applies grammar editing operations specified by `common.edit_mlg` to generate `editedGrammar`. 3. `orderedGrammar` gives the desired order for nonterminals and individual productions in the documented grammar. Developers should edit this file only to reorder lines. `doc_grammar` updates `orderedGrammar` so it has the same set of nonterminals and productions as `editedGrammar` while retaining the previous ordering. Since the position of new or renamed nonterminals is unspecified, they tend to show up in the wrong place in `orderedGrammar`, therefore users should review the output and make appropriate adjustments to the order. The update process removes manually-added comments from `orderedGrammar` while automatically-generated comments will be regenerated. 4. The tool updates the `.rst` files. Comments in the form `.. insertprodn ` indicate inserting the productions for a range of nonterminals. `.. cmd::` and `.. tacn::` directives are updated using prefixes in the form `[a-zA-Z0-9_ ]+` from the directive and the grammar. If there is unique match in the grammar, the directive is updated, if needed. Multiple matches or no match gives an error message. 5. For reference, the tool generates `prodnGrammar`, which has the entire grammar in the form of `prodns`. 6. The tool generates `prodnCommands` (for commands) and `prodnTactics` (for tactics). The former lists all commands that are under `command` in `orderedGrammar` and compares it to the `:cmd:` and `:cmdv:` given in the rst files. The latter lists all tactics that are under `simple_tactic` in the grammar and compares it to the `:tacn:` and `:tacv:`. The tags at the beginning of each line mean: - (no tag) - the grammar and the rst match exactly and uniquely - `-` - a grammar production that can't be matched to an rst file entry - `+` - an rst entry that doesn't match a grammar production - `v` - the rst entry is a `:cmdv:` or `:tacv:` - `?` - the match between the grammar and the rst files is not unique ## How to use the tool * `make doc_gram` updates `fullGrammar`. * `make doc_gram_verify` verifies that `fullGrammar`, `orderedGrammar` and `*.rst` are consistent with the `.mlg` files. This is for use by CI. * `make doc_gram_rsts` updates the `*Grammar` and `.rst` files. * `make doc_gram_rsts DOCGRAMWARN=1` will additionally print warnings. Changes to `fullGrammar`, `orderedGrammar` and the `.rsts` should be checked in to git. The `prodn*` and other `*Grammar` files should not. ### Command line arguments The executable takes a list of `.mlg` and `.rst` files as arguments. The tool inserts the grammar into the `.rsts` as specified by comments in those files. The order of the `.mlg` files affects the order of nonterminals and productions in `fullGrammar`. The order doesn't matter for the `.rst` files. Specifying the `-verify` command line argument avoids updating any of the files, but verifies that the current files are consistent. This setting is meant for use in CI; it will be up to each developer to include the changes to `*Grammar` and the `.rst` files in their PRs when they've changed the grammar. Other command line arguments: * `-check-tacs` reports on differences in tactics between the `rsts` and the grammar * `-check-cmds` reports on differences in commands between the `rsts` and the grammar * `-no-warn` suppresses printing of some warning messages * `-no-update` puts updates to `fullGrammar` and `orderedGrammar` into new files named `*.new`, leaving the originals unmodified. For use in Dune. * `-short` limits processing to updating/verifying only the `fullGrammar` file * `-verbose` prints more messages about the grammar * `-verify` described above ### Grammar editing scripts The grammar editing script `common.edit_mlg` is similar in format to `.mlg` files but stripped of all OCaml features. This is an easy way to include productions to match or add without writing another parser. The `DOC_GRAMMAR` token at the beginning of each file signals the use of the streamlined syntax. The edit file has a series of items in the form of productions. Items are applied in the order they appear. There are two types of editing operations: * Global edits - edit rules that apply to the entire grammar in a single operation. These are identified by using specific reserved names as the non-terminal name. * Local edits - edit rules that apply to the productions of a single non-terminal. The rule is a local edit if the non-terminal name isn't reserved. Individual productions within a local edit that begin with a different set of reserved names edit existing productions. For example `binders: [ | DELETE Pcoq.Constr.binders ]` deletes the production `binders: [ | Pcoq.Constr.binders]` Productions that don't begin with a reserved name are added to the grammar, such as `empty: [ | ]`, which adds a new non-terminal `empty` with an empty production on the right-hand side. Another example: `LEFTQMARK: [ | "?" ]` is a local edit that treats `LEFTQMARK` as the name of a non-terminal and adds a production for it. (We know that LEFTQMARK is a token but doc_grammar does not.) `SPLICE: [ | LEFTQMARK ]` requests replacing all uses of `LEFTQMARK` anywhere in the grammar with its productions and removing the non-terminal. The combined effect of these two is to replace all uses of `LEFTQMARK` with `"?"`. Here are the current operations. They are likely to be refined as we learn what operations are most useful while we update the mlg files and documentation: ### Global edits `DELETE` - deletes the specified non-terminals anywhere in the grammar. Each should appear as a separate production. Useful for removing non-terminals that only do lookahead that shouldn't be in the documentation. `RENAME` - each production specifies an (old name, new name) pair of non-terminals to rename. `SPLICE` - requests replacing all uses of the nonterminals anywhere in the grammar with its productions and removing the non-terminal. Each should appear as a separate production. (Doesn't work recursively; splicing for both `A: [ | B ]` and `B: [ | C ]` must be done in separate SPLICE operations.) `OPTINREF` - applies the local `OPTINREF` edit to every nonterminal ### Local edits `DELETE ` - removes the specified production from the grammar `EDIT ` - modifies the specified production using the following tags that appear in the specified production: * `USE_NT ` LIST* - extracts LIST* as new nonterminal with the specified new non-terminal name * `ADD_OPT ` - looks for a production that matches the specified production **without** ``. If found, both productions are replaced with single production with `OPT ` The current version handles a single USE_NT or ADD_OPT per EDIT. These symbols may appear in the middle of the production given in the EDIT. `APPENDALL ` - inserts at the end of every production in . `INSERTALL ` - inserts at the beginning of every production in . `REPLACE` - (2 sequential productions) - removes `` and inserts `` in its place. ``` | REPLACE | WITH ``` `COPYALL ` - creates a new nonterminal `` and copies all the productions in the nonterminal to ``. `MOVETO ` - moves the production to `` and, if needed, creates a new production -> \. `MOVEALLBUT ` - moves all the productions in the nonterminal to `` *except* for the productions following the `MOVEALLBUT` production in the edit script (terminated only by the closing `]`). `OPTINREF` - verifies that has an empty production. If so, it removes the empty production and replaces all references to throughout the grammar with `OPT ` `PRINT` - prints the nonterminal definition at that point in applying the edits. Most useful when the edits get a bit complicated to follow. `(any other nonterminal name)` - adds a new production (and possibly a new nonterminal) to the grammar. ### `.rst` file updates `doc_grammar` updates `.rst` files where it sees the following 3 lines ``` .. insertprodn .. prodn:: ``` The end of the existing `prodn` is recognized by a blank line. ### Tagging productions `doc_grammar` tags the origin of productions from plugins that aren't automatically loaded. In grammar files, they appear as `(* XXX plugin *)`. In rsts, productions generated by `.. insertprodn` will include where relevant three spaces as (a delimiter) and a tag name after each production, which Sphinx will show on the far right-hand side of the production. The origin of a production can be specified explicitly in `common.edit_mlg` with the `TAG name` appearing at the end of a production. `name` must be in quotes if it contains whitespace characters. Some edit operations preserve the tags, but others, such as `REPLACE ... WITH ...` do not. A mapping from filenames to tags (e.g. "g_ltac2.mlg" is "Ltac2") is hard-coded as is filtering to avoid showing tags for, say, Ltac2 productions from appearing on every production in that chapter. If desired, this mechanism could be extended to tag certain productions as deprecated, perhaps in conjunction with a coqpp change. coq-8.15.0/doc/tools/docgram/common.edit_mlg000066400000000000000000002457641417001151100207170ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* " LIST1 global OPT natural | WITH "Resolve" [ "->" | "<-" ] LIST1 global OPT natural | DELETE "Resolve" "<-" LIST1 global OPT natural | REPLACE "Variables" "Transparent" | WITH [ "Constants" | "Variables" ] [ "Transparent" | "Opaque" ] | DELETE "Variables" "Opaque" | DELETE "Constants" "Transparent" | DELETE "Constants" "Opaque" | REPLACE "Transparent" LIST1 global | WITH [ "Transparent" | "Opaque" ] LIST1 global | DELETE "Opaque" LIST1 global | REPLACE "Extern" natural OPT Constr.constr_pattern "=>" Pltac.tactic | WITH "Extern" natural OPT constr_pattern "=>" tactic | INSERTALL "Hint" | APPENDALL opt_hintbases ] (* todo: does ARGUMENT EXTEND make the symbol global? It is in both extraargs and extratactics *) strategy_level_or_var: [ | DELETE EXTRAARGS_strategy_level | strategy_level ] EXTRAARGS_natural: [ | DELETENT ] EXTRAARGS_lconstr: [ | DELETENT ] EXTRAARGS_strategy_level: [ | DELETENT ] binders: [ | DELETE Pcoq.Constr.binders ] G_TACTIC_in_clause: [ | in_clause | MOVEALLBUT in_clause | in_clause ] SPLICE: [ | G_TACTIC_in_clause ] RENAME: [ | G_LTAC2_delta_flag ltac2_delta_reductions | G_LTAC2_strategy_flag ltac2_reductions | G_LTAC2_binder ltac2_binder | G_LTAC2_branches ltac2_branches | G_LTAC2_let_clause ltac2_let_clause | G_LTAC2_tactic_atom ltac2_tactic_atom | G_LTAC2_rewriter ltac2_rewriter | G_LTAC2_constr_with_bindings ltac2_constr_with_bindings | G_LTAC2_match_rule ltac2_match_rule | G_LTAC2_match_pattern ltac2_match_pattern | G_LTAC2_intropatterns ltac2_intropatterns | G_LTAC2_simple_intropattern ltac2_simple_intropattern | G_LTAC2_simple_intropattern_closed ltac2_simple_intropattern_closed | G_LTAC2_or_and_intropattern ltac2_or_and_intropattern | G_LTAC2_equality_intropattern ltac2_equality_intropattern | G_LTAC2_naming_intropattern ltac2_naming_intropattern | G_LTAC2_destruction_arg ltac2_destruction_arg | G_LTAC2_with_bindings ltac2_with_bindings | G_LTAC2_bindings ltac2_bindings | G_LTAC2_simple_binding ltac2_simple_binding | G_LTAC2_in_clause ltac2_in_clause | G_LTAC2_occs ltac2_occs | G_LTAC2_occs_nums ltac2_occs_nums | G_LTAC2_concl_occ ltac2_concl_occ | G_LTAC2_hypident_occ ltac2_hypident_occ | G_LTAC2_hypident ltac2_hypident | G_LTAC2_induction_clause ltac2_induction_clause | G_LTAC2_as_or_and_ipat ltac2_as_or_and_ipat | G_LTAC2_eqn_ipat ltac2_eqn_ipat | G_LTAC2_conversion ltac2_conversion | G_LTAC2_oriented_rewriter ltac2_oriented_rewriter | G_LTAC2_for_each_goal ltac2_for_each_goal | G_LTAC2_tactic_then_last ltac2_tactic_then_last | G_LTAC2_as_name ltac2_as_name | G_LTAC2_as_ipat ltac2_as_ipat | G_LTAC2_by_tactic ltac2_by_tactic | G_LTAC2_match_list ltac2_match_list | G_SSRMATCHING_cpattern ssr_one_term_pattern ] (* Renames to eliminate qualified names. Put other renames at the end *) RENAME: [ (* map missing names for rhs *) | Constr.constr term | Constr.global global | Constr.lconstr lconstr | Constr.cpattern cpattern | G_vernac.query_command query_command | G_vernac.section_subset_expr section_var_expr | Prim.ident ident | Prim.reference reference | Prim.string string | Prim.integer integer | Prim.qualid qualid | Prim.natural natural | Pvernac.Vernac_.main_entry vernac_control | Tactic.tactic tactic | Pltac.ltac_expr ltac_expr5 (* SSR *) | Pcoq.Constr.constr term | Prim.identref ident (* | G_vernac.def_body def_body | Prim.by_notation by_notation | Prim.natural natural *) | Vernac.fix_definition fix_definition (* todo: hmm, rename adds 1 prodn to closed_binder?? *) | Constr.closed_binder closed_binder ] (* written in OCaml *) impl_ident_head: [ | "{" ident ] lpar_id_coloneq: [ | "(" ident; ":=" ] (* lookahead symbols *) DELETE: [ | check_for_coloneq | local_test_lpar_id_colon | lookup_at_as_comma | test_only_starredidentrefs | test_bracket_ident | test_hash_ident | test_id_colon | test_lpar_id_colon | test_lpar_id_coloneq (* todo: grammar seems incorrect, repeats the "(" IDENT ":=" *) | test_lpar_nat_coloneq | test_lpar_id_rpar | test_lpar_idnum_coloneq | test_show_goal | test_name_colon | test_pipe_closedcurly | ensure_fixannot | test_array_opening | test_array_closing | test_variance_ident (* SSR *) | ssr_null_entry | ssrtermkind (* todo: rename as "test..." *) | ssrdoarg (* todo: this and the next one should be removed from the grammar? *) | ssrseqdir | ssrindex | ssrintrosarg | ssrtclarg | term_annotation (* todo: what is this? *) | test_idcomma | test_ident_no_do | test_nohidden | test_not_ssrslashnum | test_ssr_rw_syntax | test_ssreqid | test_ssrfwdid | test_ssrseqvar | test_ssrslashnum00 | test_ssrslashnum01 | test_ssrslashnum10 | test_ssrslashnum11 (* unused *) | constr_comma_sequence' | auto_using' | constr_may_eval ] (* additional nts to be spliced *) tactic_then_last: [ | REPLACE "|" LIST0 ( OPT ltac_expr5 ) SEP "|" | WITH LIST0 ( "|" ( OPT ltac_expr5 ) ) ] goal_tactics: [ | LIST0 ( OPT ltac_expr5 ) SEP "|" ] for_each_goal: [ | DELETENT ] for_each_goal: [ | goal_tactics | OPT ( goal_tactics "|" ) OPT ltac_expr5 ".." OPT ( "|" goal_tactics ) ] ltac2_tactic_then_last: [ | REPLACE "|" LIST0 ( OPT ltac2_expr6 ) SEP "|" (* Ltac2 plugin *) | WITH LIST0 ( "|" OPT ltac2_expr6 ) TAG Ltac2 ] ltac2_goal_tactics: [ | LIST0 ( OPT ltac2_expr6 ) SEP "|" TAG Ltac2 ] ltac2_for_each_goal: [ | DELETENT ] ltac2_for_each_goal: [ | ltac2_goal_tactics TAG Ltac2 | OPT ( ltac2_goal_tactics "|" ) OPT ltac2_expr6 ".." OPT ( "|" ltac2_goal_tactics ) TAG Ltac2 ] reference: [ | DELETENT ] reference: [ | qualid ] fullyqualid: [ | DELETENT ] fullyqualid: [ | qualid ] field_ident: [ | "." ident ] qualid: [ | DELETENT ] qualid: [ | ident LIST0 field_ident ] field: [ | DELETENT ] fields: [ | DELETENT ] dirpath: [ | REPLACE ident LIST0 field | WITH LIST0 ( ident "." ) ident ] let_type_cstr: [ | DELETE OPT [ ":" lconstr ] | type_cstr ] case_item: [ | REPLACE term100 OPT [ "as" name ] OPT [ "in" pattern200 ] | WITH term100 OPT ("as" name) OPT [ "in" pattern200 ] ] type: [ | term200 ] term_forall_or_fun: [ | "forall" open_binders "," type ] binder_constr: [ | DELETE "forall" open_binders "," term200 | MOVETO term_forall_or_fun "fun" open_binders "=>" term200 | MOVETO term_let "let" name binders let_type_cstr ":=" term200 "in" term200 (*| MOVETO term_let "let" ":" ssr_mpat ":=" lconstr "in" lconstr TAG SSR *) | DELETE "let" ":" ssr_mpat ":=" lconstr "in" lconstr TAG SSR (* todo: restore for ssr *) | REPLACE "let" ":" ssr_mpat "in" pattern200 ":=" lconstr ssr_rtype "in" lconstr (* ssr plugin *) | WITH "let" ":" ssr_mpat OPT ( "in" pattern200 ) ":=" lconstr ssr_rtype "in" lconstr TAG SSR | DELETE "let" ":" ssr_mpat ":=" lconstr ssr_rtype "in" lconstr (* SSR plugin *) | DELETE "let" ":" ssr_mpat OPT ( "in" pattern200 ) ":=" lconstr ssr_rtype "in" lconstr TAG SSR (* todo: restore for SSR *) (*| MOVETO term_let "let" ":" ssr_mpat OPT ( "in" pattern200 ) ":=" lconstr ssr_rtype "in" lconstr TAG SSR*) | MOVETO term_if "if" term200 as_return_type "then" term200 "else" term200 | REPLACE "if" term200 "is" ssr_dthen ssr_else | WITH "if" term200 [ "is" | "isn't" ] ssr_dthen ssr_else TAG SSR | DELETE "if" term200 "isn't" ssr_dthen ssr_else | DELETE "if" term200 [ "is" | "isn't" ] ssr_dthen ssr_else TAG SSR (* todo: restore as "MOVETO term_if" for SSR *) | MOVETO term_fix "let" "fix" fix_decl "in" term200 | MOVETO term_cofix "let" "cofix" cofix_body "in" term200 | MOVETO term_let "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type ":=" term200 "in" term200 | MOVETO term_let "let" "'" pattern200 ":=" term200 "in" term200 | MOVETO term_let "let" "'" pattern200 ":=" term200 case_type "in" term200 | MOVETO term_let "let" "'" pattern200 "in" pattern200 ":=" term200 case_type "in" term200 | MOVETO term_fix "fix" fix_decls | MOVETO term_cofix "cofix" cofix_decls ] term_let: [ | REPLACE "let" name binders let_type_cstr ":=" term200 "in" term200 | WITH "let" name let_type_cstr ":=" term200 "in" term200 | "let" name LIST1 binder let_type_cstr ":=" term200 "in" term200 (* Don't need to document that "( )" is equivalent to "()" *) | REPLACE "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type ":=" term200 "in" term200 | WITH "let" "(" LIST0 name SEP "," ")" as_return_type ":=" term200 "in" term200 | MOVETO destructuring_let "let" "(" LIST0 name SEP "," ")" as_return_type ":=" term200 "in" term200 | REPLACE "let" "'" pattern200 ":=" term200 "in" term200 | WITH "let" "'" pattern200 ":=" term200 OPT case_type "in" term200 | DELETE "let" "'" pattern200 ":=" term200 case_type "in" term200 | MOVETO destructuring_let "let" "'" pattern200 ":=" term200 OPT case_type "in" term200 | MOVETO destructuring_let "let" "'" pattern200 "in" pattern200 ":=" term200 case_type "in" term200 ] qualid_annotated: [ | global univ_annot ] atomic_constr: [ | qualid_annotated | MOVETO primitive_notations string | MOVETO term_evar "_" | REPLACE "?" "[" identref "]" | WITH "?[" identref "]" | MOVETO term_evar "?[" identref "]" | REPLACE "?" "[" pattern_ident "]" | WITH "?[" pattern_ident "]" | MOVETO term_evar "?[" pattern_ident "]" | MOVETO term_evar pattern_ident evar_instance ] ltac_expr0: [ | REPLACE "[" ">" for_each_goal "]" | WITH "[>" for_each_goal "]" | DELETE ssrparentacarg ] (* lexer token *) IDENT: [ | ident ] scope_key: [ | IDENT ] scope_name: [ | IDENT ] scope: [ | scope_name | scope_key ] scope_delimiter: [ | REPLACE "%" IDENT | WITH "%" scope_key ] term100: [ | REPLACE term99 "<:" term200 | WITH term99 "<:" type | MOVETO term_cast term99 "<:" type | REPLACE term99 "<<:" term200 | WITH term99 "<<:" type | MOVETO term_cast term99 "<<:" type | REPLACE term99 ":" term200 | WITH term99 ":" type | MOVETO term_cast term99 ":" type ] constr: [ | REPLACE "@" global univ_annot | WITH "@" qualid_annotated | MOVETO term_explicit "@" qualid_annotated ] term10: [ (* Separate this LIST0 in the nonempty and the empty case *) (* The empty case is covered by constr *) | REPLACE "@" global univ_annot LIST0 term9 | WITH "@" qualid_annotated LIST1 term9 | REPLACE term9 | WITH constr | MOVETO term_application term9 LIST1 arg | MOVETO term_application "@" qualid_annotated LIST1 term9 (* fixme: add in as a prodn somewhere *) | MOVETO dangling_pattern_extension_rule "@" pattern_ident LIST1 identref | DELETE dangling_pattern_extension_rule ] term9: [ (* @Zimmi48: Special token .. is for use in the Notation command. (see bug_3304.v) *) | DELETE ".." term0 ".." ] term1: [ | REPLACE term0 ".(" global univ_annot LIST0 arg ")" | WITH term0 ".(" global univ_annot LIST0 arg ")" (* huh? *) | REPLACE term0 "%" IDENT | WITH term0 "%" scope_key | MOVETO term_scope term0 "%" scope_key | MOVETO term_projection term0 ".(" global univ_annot LIST0 arg ")" | MOVETO term_projection term0 ".(" "@" global univ_annot LIST0 ( term9 ) ")" ] term0: [ | DELETE ident univ_annot | DELETE ident Prim.fields univ_annot (* @Zimmi48: This rule is a hack, according to Hugo, and should not be shown in the manual. *) | DELETE "{" binder_constr "}" | REPLACE "{|" record_declaration bar_cbrace | WITH "{|" LIST0 field_def SEP ";" OPT ";" bar_cbrace | MOVETO primitive_notations NUMBER | MOVETO term_record "{|" LIST0 field_def SEP ";" OPT ";" bar_cbrace | MOVETO term_generalizing "`{" term200 "}" | MOVETO term_generalizing "`(" term200 ")" | MOVETO term_ltac "ltac" ":" "(" ltac_expr5 ")" | REPLACE "[" "|" array_elems "|" lconstr type_cstr "|" "]" univ_annot | WITH "[|" array_elems "|" lconstr type_cstr "|]" univ_annot ] fix_decls: [ | DELETE fix_decl | REPLACE fix_decl "with" LIST1 fix_decl SEP "with" "for" identref | WITH fix_decl OPT ( LIST1 ("with" fix_decl) "for" identref ) ] cofix_decls: [ | DELETE cofix_body | REPLACE cofix_body "with" LIST1 cofix_body SEP "with" "for" identref | WITH cofix_body OPT ( LIST1 ( "with" cofix_body ) "for" identref ) ] fields_def: [ | REPLACE field_def ";" fields_def | WITH LIST1 field_def SEP ";" | DELETE field_def ] binders_fixannot: [ | DELETE binder binders_fixannot | DELETE fixannot | DELETE (* empty *) | LIST0 binder OPT fixannot ] binder: [ | DELETE name ] open_binders: [ | REPLACE name LIST0 name ":" lconstr | WITH LIST1 name ":" type (* @Zimmi48: Special token .. is for use in the Notation command. (see bug_3304.v) *) | DELETE name ".." name | REPLACE name LIST0 name binders | WITH LIST1 binder | DELETE closed_binder binders ] closed_binder: [ | name | REPLACE "(" name LIST1 name ":" lconstr ")" | WITH "(" LIST1 name ":" type ")" | DELETE "(" name ":" lconstr ")" | DELETE "(" name ":=" lconstr ")" | REPLACE "(" name ":" lconstr ":=" lconstr ")" | WITH "(" name type_cstr ":=" lconstr ")" | DELETE "{" name "}" | DELETE "{" name LIST1 name "}" | REPLACE "{" name LIST1 name ":" lconstr "}" | WITH "{" LIST1 name type_cstr "}" | DELETE "{" name ":" lconstr "}" | MOVETO implicit_binders "{" LIST1 name type_cstr "}" | DELETE "[" name "]" | DELETE "[" name LIST1 name "]" | REPLACE "[" name LIST1 name ":" lconstr "]" | WITH "[" LIST1 name type_cstr "]" | DELETE "[" name ":" lconstr "]" | MOVETO implicit_binders "[" LIST1 name type_cstr "]" | REPLACE "(" Prim.name ":" lconstr "|" lconstr ")" | WITH "(" Prim.name ":" type "|" lconstr ")" | MOVETO generalizing_binder "`(" LIST1 typeclass_constraint SEP "," ")" | MOVETO generalizing_binder "`{" LIST1 typeclass_constraint SEP "," "}" | MOVETO generalizing_binder "`[" LIST1 typeclass_constraint SEP "," "]" | DELETE [ "of" | "&" ] term99 (* todo: remove for SSR *) ] name_colon: [ | name ":" ] typeclass_constraint: [ | EDIT ADD_OPT "!" term200 | REPLACE "{" name "}" ":" [ "!" | ] term200 | WITH "{" name "}" ":" OPT "!" term200 | REPLACE name ":" [ "!" | ] term200 | WITH name ":" OPT "!" term200 ] (* ?? From the grammar, Prim.name seems to be only "_" but ident is also accepted "*) Prim.name: [ | REPLACE "_" | WITH name ] oriented_rewriter: [ | REPLACE orient_rw rewriter | WITH orient rewriter ] DELETE: [ | orient_rw ] pattern10: [ | REPLACE pattern1 LIST1 pattern1 | WITH pattern1 LIST0 pattern1 | DELETE pattern1 ] pattern1: [ | REPLACE pattern0 "%" IDENT | WITH pattern0 "%" scope_key ] pattern0: [ | REPLACE "(" pattern200 ")" | WITH "(" LIST1 pattern200 SEP "|" ")" | DELETE "(" pattern200 "|" LIST1 pattern200 SEP "|" ")" | REPLACE "{|" record_patterns bar_cbrace | WITH "{|" LIST0 record_pattern bar_cbrace ] DELETE: [ | record_patterns ] eqn: [ | REPLACE LIST1 mult_pattern SEP "|" "=>" lconstr | WITH LIST1 [ LIST1 pattern100 SEP "," ] SEP "|" "=>" lconstr ] (* No constructor syntax, OPT [ "|" binders ] is not supported for Record *) record_definition: [ | opt_coercion ident_decl binders OPT [ ":" sort ] OPT ( ":=" OPT [ identref ] "{" record_fields "}" ) ] (* No mutual recursion, no inductive classes, type must be a sort *) (* constructor is optional but "Class record_definition" covers that case *) singleton_class_definition: [ | opt_coercion ident_decl binders OPT [ ":" sort ] ":=" constructor ] (* No record syntax, opt_coercion not supported for Variant, := ... required *) variant_definition: [ | ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] ":=" OPT "|" LIST1 constructor SEP "|" decl_notations ] gallina: [ | REPLACE thm_token ident_decl binders ":" lconstr LIST0 [ "with" ident_decl binders ":" lconstr ] | WITH thm_token ident_decl binders ":" type LIST0 [ "with" ident_decl binders ":" type ] | DELETE assumptions_token inline assum_list | REPLACE finite_token LIST1 inductive_definition SEP "with" | WITH "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) | "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) | "Variant" variant_definition LIST0 ( "with" variant_definition ) | [ "Record" | "Structure" ] record_definition LIST0 ( "with" record_definition ) | "Class" record_definition | "Class" singleton_class_definition | REPLACE "Fixpoint" LIST1 fix_definition SEP "with" | WITH "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) | REPLACE "Let" "Fixpoint" LIST1 fix_definition SEP "with" | WITH "Let" "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) | REPLACE "CoFixpoint" LIST1 cofix_definition SEP "with" | WITH "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) | REPLACE "Let" "CoFixpoint" LIST1 cofix_definition SEP "with" | WITH "Let" "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) | REPLACE "Scheme" LIST1 scheme SEP "with" | WITH "Scheme" scheme LIST0 ( "with" scheme ) ] finite_token: [ | DELETENT ] record_fields: [ | REPLACE record_field ";" record_fields | WITH LIST0 record_field SEP ";" OPT ";" | DELETE record_field | DELETE (* empty *) ] assumptions_token: [ | DELETENT ] inline: [ | REPLACE "Inline" "(" natural ")" | WITH "Inline" OPT ( "(" natural ")" ) | DELETE "Inline" ] univ_decl: [ | REPLACE "@{" LIST0 identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] | WITH "@{" LIST0 identref OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" ] cumul_univ_decl: [ | REPLACE "@{" LIST0 variance_identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] | WITH "@{" LIST0 variance_identref OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" ] of_type: [ | DELETENT ] of_type: [ | [ ":" | ":>" ] type ] def_body: [ | DELETE binders ":=" reduce lconstr | REPLACE binders ":" lconstr ":=" reduce lconstr | WITH LIST0 binder OPT (":" type) ":=" reduce lconstr | REPLACE binders ":" lconstr | WITH LIST0 binder ":" type ] delta_flag: [ | REPLACE "-" "[" LIST1 smart_global "]" | WITH OPT "-" "[" LIST1 smart_global "]" | DELETE "[" LIST1 smart_global "]" | OPTINREF ] ltac2_delta_reductions: [ | EDIT ADD_OPT "-" "[" refglobals "]" (* Ltac2 plugin *) ] ltac2_branches: [ | EDIT ADD_OPT "|" LIST1 branch SEP "|" (* Ltac2 plugin *) ] strategy_flag: [ | REPLACE OPT delta_flag | WITH delta_flag (*| REPLACE LIST1 red_flags | WITH LIST1 red_flag*) | (* empty *) ] filtered_import: [ | REPLACE global "(" LIST1 one_import_filter_name SEP "," ")" | WITH global OPT [ "(" LIST1 one_import_filter_name SEP "," ")" ] | DELETE global ] is_module_expr: [ | REPLACE ":=" module_expr_inl LIST0 ext_module_expr | WITH ":=" LIST1 module_expr_inl SEP "<+" ] is_module_type: [ | REPLACE ":=" module_type_inl LIST0 ext_module_type | WITH ":=" LIST1 module_type_inl SEP "<+" ] gallina_ext: [ | REPLACE "Arguments" smart_global LIST0 arg_specs OPT [ "," LIST1 [ LIST0 implicits_alt ] SEP "," ] OPT [ ":" LIST1 args_modifier SEP "," ] | WITH "Arguments" smart_global LIST0 arg_specs LIST0 [ "," LIST0 implicits_alt ] OPT [ ":" LIST1 args_modifier SEP "," ] | REPLACE "Implicit" "Type" reserv_list | WITH "Implicit" [ "Type" | "Types" ] reserv_list | DELETE "Implicit" "Types" reserv_list (* Per @Zimmi48, the global (qualid) must be a simple identifier if def_body is present Note that smart_global is "qualid | by_notation" and that ident_decl is "ident OPT univ_decl"; move *) | REPLACE "Canonical" OPT "Structure" global OPT [ OPT univ_decl def_body ] | WITH "Canonical" OPT "Structure" ident_decl def_body | REPLACE "Canonical" OPT "Structure" by_notation | WITH "Canonical" OPT "Structure" smart_global | DELETE "Coercion" global ":" class_rawexpr ">->" class_rawexpr | REPLACE "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr | WITH "Coercion" smart_global ":" class_rawexpr ">->" class_rawexpr (* semantically restricted per https://github.com/coq/coq/pull/12936#discussion_r492705820 *) | REPLACE "Coercion" global OPT univ_decl def_body | WITH "Coercion" ident OPT univ_decl def_body | REPLACE "Include" "Type" module_type_inl LIST0 ext_module_type | WITH "Include" "Type" LIST1 module_type_inl SEP "<+" | REPLACE "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 identref ] | WITH "Generalizable" [ [ "Variable" | "Variables" ] LIST1 identref | "All" "Variables" | "No" "Variables" ] (* don't show Export for Set, Unset *) | REPLACE "Export" "Set" setting_name option_setting | WITH "Set" setting_name option_setting | REPLACE "Export" "Unset" setting_name | WITH "Unset" setting_name | REPLACE "Instance" instance_name ":" term200 hint_info [ ":=" "{" record_declaration "}" | ":=" lconstr | ] | WITH "Instance" instance_name ":" type hint_info OPT [ ":=" "{" record_declaration "}" | ":=" lconstr ] | DELETE "Require" export_token LIST1 global | REPLACE "From" global "Require" export_token LIST1 global | WITH OPT [ "From" dirpath ] "Require" export_token LIST1 global ] (* lexer stuff *) LEFTQMARK: [ | "?" ] digit: [ | "0" ".." "9" ] decnat: [ | digit LIST0 [ digit | "_" ] ] hexdigit: [ | [ "0" ".." "9" | "a" ".." "f" | "A" ".." "F" ] ] hexnat: [ | [ "0x" | "0X" ] hexdigit LIST0 [ hexdigit | "_" ] ] bignat: [ | REPLACE NUMBER | WITH [ decnat | hexnat ] ] integer: [ | REPLACE bigint | WITH OPT "-" natural ] number: [ | OPT "-" decnat OPT ( "." LIST1 [ digit | "_" ] ) OPT ( [ "e" | "E" ] OPT [ "+" | "-" ] decnat ) | OPT "-" hexnat OPT ( "." LIST1 [ hexdigit | "_" ] ) OPT ( [ "p" | "P" ] OPT [ "+" | "-" ] decnat ) ] bigint: [ | DELETE bignat | REPLACE test_minus_nat "-" bignat | WITH OPT "-" bignat ] first_letter: [ | [ "a" ".." "z" | "A" ".." "Z" | "_" | unicode_letter ] ] subsequent_letter: [ | [ first_letter | digit | "'" | unicode_id_part ] ] ident: [ | DELETE IDENT | DELETE IDENT (* 2nd copy from SSR *) | first_letter LIST0 subsequent_letter ] NUMBER: [ | number ] (* todo: QUOTATION only used in a test suite .mlg files, is it documented/useful? *) string: [ | DELETENT ] STRING: [ | string ] (* todo: is "bigint" useful?? *) (* todo: "check_int" in g_prim.mlg should be "check_num" *) (* added productions *) command_entry: [ | noedit_mode ] DELETE: [ | tactic_then_locality ] ltac_expr5: [ (* make these look consistent with use of binder_tactic in other ltac_expr* *) | DELETE binder_tactic | DELETE ltac_expr4 | [ ltac_expr4 | binder_tactic ] ] ltac_constructs: [ (* repeated in main ltac grammar - need to create a COPY edit *) | ltac_expr3 ";" [ ltac_expr3 | binder_tactic ] | ltac_expr3 ";" "[" for_each_goal "]" | ltac_expr1 "+" [ ltac_expr2 | binder_tactic ] | ltac_expr1 "||" [ ltac_expr2 | binder_tactic ] (* | qualid LIST0 tactic_value add later due renaming tactic_value *) | "[>" for_each_goal "]" | toplevel_selector ltac_expr5 ] ltac_expr4: [ | REPLACE ltac_expr3 ";" for_each_goal "]" | WITH ltac_expr3 ";" "[" for_each_goal "]" | REPLACE ltac_expr3 ";" binder_tactic | WITH ltac_expr3 ";" [ ltac_expr3 | binder_tactic ] | DELETE ltac_expr3 ";" ltac_expr3 | MOVETO simple_tactic ltac_expr5 ";" "first" ssr_first_else TAG SSR | MOVETO simple_tactic ltac_expr5 ";" "first" ssrseqarg TAG SSR | MOVETO simple_tactic ltac_expr5 ";" "last" ssrseqarg TAG SSR | DELETE simple_tactic ] l3_tactic: [ ] ltac_expr3: [ | DELETE "abstract" ltac_expr2 | REPLACE "abstract" ltac_expr2 "using" ident | WITH "abstract" ltac_expr2 OPT ( "using" ident ) | l3_tactic (* | EDIT "do" ADD_OPT nat_or_var ssrmmod ssrdotac ssrclauses TAG SSR *) | DELETE "do" ssrmmod ssrdotac ssrclauses (* SSR plugin *) | DELETE "do" ssrortacarg ssrclauses (* SSR plugin *) | DELETE "do" nat_or_var ssrmmod ssrdotac ssrclauses (* SSR plugin *) | MOVEALLBUT ltac_builtins | l3_tactic | ltac_expr2 ] l2_tactic: [ ] ltac_expr2: [ | REPLACE ltac_expr1 "+" binder_tactic | WITH ltac_expr1 "+" [ ltac_expr2 | binder_tactic ] | DELETE ltac_expr1 "+" ltac_expr2 | REPLACE ltac_expr1 "||" binder_tactic | WITH ltac_expr1 "||" [ ltac_expr2 | binder_tactic ] | DELETE ltac_expr1 "||" ltac_expr2 | MOVETO ltac_builtins "tryif" ltac_expr5 "then" ltac_expr5 "else" ltac_expr2 | l2_tactic | DELETE ltac_builtins ] l1_tactic: [ ] ltac_expr1: [ | EDIT match_key ADD_OPT "reverse" "goal" "with" match_context_list "end" | MOVETO simple_tactic match_key OPT "reverse" "goal" "with" match_context_list "end" | MOVETO simple_tactic match_key ltac_expr5 "with" match_list "end" | REPLACE failkw [ nat_or_var | ] LIST0 message_token | WITH failkw OPT nat_or_var LIST0 message_token | REPLACE reference LIST0 tactic_arg | WITH reference LIST1 tactic_arg | l1_tactic | DELETE simple_tactic | MOVEALLBUT ltac_builtins | l1_tactic | tactic_value | reference LIST1 tactic_arg | ltac_expr0 ] (* split match_context_rule *) goal_pattern: [ | LIST0 match_hyp SEP "," "|-" match_pattern | "[" LIST0 match_hyp SEP "," "|-" match_pattern "]" | "_" ] match_context_rule: [ | DELETE LIST0 match_hyp SEP "," "|-" match_pattern "=>" ltac_expr5 | DELETE "[" LIST0 match_hyp SEP "," "|-" match_pattern "]" "=>" ltac_expr5 | DELETE "_" "=>" ltac_expr5 | goal_pattern "=>" ltac_expr5 ] match_context_list: [ | EDIT ADD_OPT "|" LIST1 match_context_rule SEP "|" ] match_list: [ | EDIT ADD_OPT "|" LIST1 match_rule SEP "|" ] match_rule: [ (* redundant; match_pattern -> term -> _ *) | DELETE "_" "=>" ltac_expr5 ] selector: [ | REPLACE range_selector_or_nth (* depends on whether range_selector_or_nth is deleted first *) | WITH LIST1 range_selector SEP "," ] range_selector_or_nth: [ | DELETENT ] firstorder_rhs: [ | firstorder_using | "with" LIST1 preident | firstorder_using "with" LIST1 preident ] where: [ | "at" "top" | "at" "bottom" | "after" ident | "before" ident ] simple_occurrences: [ (* placeholder (yuck) *) ] simple_tactic: [ | REPLACE "eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | WITH "eauto" OPT nat_or_var auto_using hintbases | REPLACE "debug" "eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | WITH "debug" "eauto" OPT nat_or_var auto_using hintbases | REPLACE "info_eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | WITH "info_eauto" OPT nat_or_var auto_using hintbases | DELETE "autorewrite" "with" LIST1 preident clause | DELETE "autorewrite" "with" LIST1 preident clause "using" tactic | DELETE "autorewrite" "*" "with" LIST1 preident clause | REPLACE "autorewrite" "*" "with" LIST1 preident clause "using" tactic | WITH "autorewrite" OPT "*" "with" LIST1 preident clause OPT ( "using" tactic ) | REPLACE "autounfold" hintbases clause_dft_concl | WITH "autounfold" hintbases OPT simple_occurrences | REPLACE "red" clause_dft_concl | WITH "red" simple_occurrences | REPLACE "simpl" OPT delta_flag OPT ref_or_pattern_occ clause_dft_concl | WITH "simpl" OPT delta_flag OPT ref_or_pattern_occ simple_occurrences | REPLACE "hnf" clause_dft_concl | WITH "hnf" simple_occurrences | REPLACE "cbv" strategy_flag clause_dft_concl | WITH "cbv" strategy_flag simple_occurrences | REPLACE "compute" OPT delta_flag clause_dft_concl | WITH "compute" OPT delta_flag simple_occurrences | REPLACE "lazy" strategy_flag clause_dft_concl | WITH "lazy" strategy_flag simple_occurrences | REPLACE "cbn" strategy_flag clause_dft_concl | WITH "cbn" strategy_flag simple_occurrences | REPLACE "fold" LIST1 constr clause_dft_concl | WITH "fold" LIST1 constr simple_occurrences | DELETE "cofix" ident | REPLACE "cofix" ident "with" LIST1 cofixdecl | WITH "cofix" ident OPT ( "with" LIST1 cofixdecl ) | DELETE "constructor" | DELETE "constructor" nat_or_var | REPLACE "constructor" nat_or_var "with" bindings | WITH "constructor" OPT nat_or_var OPT ( "with" bindings ) | DELETE "econstructor" | DELETE "econstructor" nat_or_var | REPLACE "econstructor" nat_or_var "with" bindings | WITH "econstructor" OPT ( nat_or_var OPT ( "with" bindings ) ) | DELETE "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" constr ] | "dependent" "inversion" quantified_hypothesis as_or_and_ipat OPT [ "with" constr ] | "dependent" "simple" "inversion" quantified_hypothesis as_or_and_ipat OPT [ "with" constr ] | "dependent" "inversion_clear" quantified_hypothesis as_or_and_ipat OPT [ "with" constr ] | DELETE "dependent" "rewrite" orient constr | REPLACE "dependent" "rewrite" orient constr "in" hyp | WITH "dependent" "rewrite" orient constr OPT ( "in" hyp ) | "firstorder" OPT tactic firstorder_rhs | DELETE "firstorder" OPT tactic firstorder_using | DELETE "firstorder" OPT tactic "with" LIST1 preident | DELETE "firstorder" OPT tactic firstorder_using "with" LIST1 preident | DELETE "fix" ident natural | REPLACE "fix" ident natural "with" LIST1 fixdecl | WITH "fix" ident natural OPT ( "with" LIST1 fixdecl ) | DELETE "generalize" constr | REPLACE "generalize" constr LIST1 constr | WITH "generalize" constr OPT ( LIST1 constr ) | EDIT "simplify_eq" ADD_OPT destruction_arg | EDIT "esimplify_eq" ADD_OPT destruction_arg | EDIT "discriminate" ADD_OPT destruction_arg | EDIT "ediscriminate" ADD_OPT destruction_arg | DELETE "injection" | DELETE "injection" destruction_arg | DELETE "injection" "as" LIST0 simple_intropattern | REPLACE "injection" destruction_arg "as" LIST0 simple_intropattern | WITH "injection" OPT destruction_arg OPT ( "as" LIST0 simple_intropattern ) | DELETE "einjection" | DELETE "einjection" destruction_arg | DELETE "einjection" "as" LIST0 simple_intropattern | REPLACE "einjection" destruction_arg "as" LIST0 simple_intropattern | WITH "einjection" OPT destruction_arg OPT ( "as" LIST0 simple_intropattern ) | EDIT "simple" "injection" ADD_OPT destruction_arg | DELETE "intro" (* todo: change the mlg to simplify! *) | DELETE "intro" ident | DELETE "intro" ident "at" "top" | DELETE "intro" ident "at" "bottom" | DELETE "intro" ident "after" hyp | DELETE "intro" ident "before" hyp | DELETE "intro" "at" "top" | DELETE "intro" "at" "bottom" | DELETE "intro" "after" hyp | DELETE "intro" "before" hyp | "intro" OPT ident OPT where | DELETE "intros" | REPLACE "intros" ne_intropatterns | WITH "intros" intropatterns | DELETE "eintros" | REPLACE "eintros" ne_intropatterns | WITH "eintros" intropatterns | DELETE "move" hyp "at" "top" | DELETE "move" hyp "at" "bottom" | DELETE "move" hyp "after" hyp | DELETE "move" hyp "before" hyp | "move" ident OPT where | DELETE "replace" "->" uconstr clause | DELETE "replace" "<-" uconstr clause | DELETE "replace" uconstr clause | "replace" orient uconstr clause | REPLACE "rewrite" "*" orient uconstr "in" hyp "at" occurrences by_arg_tac | WITH "rewrite" "*" orient uconstr OPT ( "in" hyp ) OPT ( "at" occurrences ) by_arg_tac | DELETE "rewrite" "*" orient uconstr "in" hyp by_arg_tac | DELETE "rewrite" "*" orient uconstr "at" occurrences by_arg_tac | DELETE "rewrite" "*" orient uconstr by_arg_tac | DELETE "setoid_rewrite" orient glob_constr_with_bindings | DELETE "setoid_rewrite" orient glob_constr_with_bindings "in" hyp | DELETE "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences | REPLACE "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences "in" hyp | WITH "setoid_rewrite" orient glob_constr_with_bindings OPT ( "at" occurrences ) OPT ( "in" hyp ) | REPLACE "stepl" constr "by" tactic | WITH "stepl" constr OPT ( "by" tactic ) | DELETE "stepl" constr | REPLACE "stepr" constr "by" tactic | WITH "stepr" constr OPT ( "by" tactic ) | DELETE "stepr" constr | DELETE "unify" constr constr | REPLACE "unify" constr constr "with" preident | WITH "unify" constr constr OPT ( "with" preident ) | DELETE "cutrewrite" orient constr | REPLACE "cutrewrite" orient constr "in" hyp | WITH "cutrewrite" orient constr OPT ( "in" hyp ) | DELETE "destauto" | REPLACE "destauto" "in" hyp | WITH "destauto" OPT ( "in" hyp ) | REPLACE "autounfold_one" hintbases "in" hyp | WITH "autounfold_one" hintbases OPT ( "in" hyp ) | DELETE "autounfold_one" hintbases | REPLACE "rewrite_db" preident "in" hyp | WITH "rewrite_db" preident OPT ( "in" hyp ) | DELETE "rewrite_db" preident | DELETE "setoid_symmetry" | REPLACE "setoid_symmetry" "in" hyp | WITH "setoid_symmetry" OPT ( "in" hyp ) | REPLACE "rewrite_strat" rewstrategy "in" hyp | WITH "rewrite_strat" rewstrategy OPT ( "in" hyp ) | DELETE "rewrite_strat" rewstrategy | REPLACE "protect_fv" string "in" ident | WITH "protect_fv" string OPT ( "in" ident ) | DELETE "protect_fv" string | DELETE "symmetry" | REPLACE "symmetry" "in" in_clause | WITH "symmetry" OPT simple_occurrences | DELETE "split" | REPLACE "split" "with" bindings | WITH "split" OPT ( "with" bindings ) | DELETE "esplit" | REPLACE "esplit" "with" bindings | WITH "esplit" OPT ( "with" bindings ) | DELETE "specialize" constr_with_bindings | REPLACE "specialize" constr_with_bindings "as" simple_intropattern | WITH "specialize" constr_with_bindings OPT ( "as" simple_intropattern ) | DELETE "exists" | REPLACE "exists" LIST1 bindings SEP "," | WITH "exists" LIST0 bindings SEP "," | DELETE "eexists" | REPLACE "eexists" LIST1 bindings SEP "," | WITH "eexists" LIST0 bindings SEP "," | DELETE "left" | REPLACE "left" "with" bindings | WITH "left" OPT ( "with" bindings ) | DELETE "eleft" | REPLACE "eleft" "with" bindings | WITH "eleft" OPT ( "with" bindings ) | DELETE "right" | REPLACE "right" "with" bindings | WITH "right" OPT ( "with" bindings ) | DELETE "eright" | REPLACE "eright" "with" bindings | WITH "eright" OPT ( "with" bindings ) | DELETE "finish_timing" OPT string | REPLACE "finish_timing" "(" string ")" OPT string | WITH "finish_timing" OPT ( "(" string ")" ) OPT string | REPLACE "hresolve_core" "(" ident ":=" constr ")" "at" nat_or_var "in" constr | WITH "hresolve_core" "(" ident ":=" constr ")" OPT ( "at" nat_or_var ) "in" constr | DELETE "hresolve_core" "(" ident ":=" constr ")" "in" constr | EDIT "psatz_R" ADD_OPT nat_or_var tactic | EDIT "psatz_Q" ADD_OPT nat_or_var tactic | EDIT "psatz_Z" ADD_OPT nat_or_var tactic | REPLACE "subst" LIST1 hyp | WITH "subst" LIST0 hyp | DELETE "subst" | DELETE "congruence" OPT natural | REPLACE "congruence" OPT natural "with" LIST1 constr | WITH "congruence" OPT natural OPT ( "with" LIST1 constr ) | DELETE "simple" "congruence" OPT natural | REPLACE "simple" "congruence" OPT natural "with" LIST1 constr | WITH "simple" "congruence" OPT natural OPT ( "with" LIST1 constr ) | DELETE "show" "ltac" "profile" | REPLACE "show" "ltac" "profile" "cutoff" integer | WITH "show" "ltac" "profile" OPT [ "cutoff" integer | string ] | DELETE "show" "ltac" "profile" string (* perversely, the mlg uses "tactic3" instead of "ltac_expr3" *) | DELETE "transparent_abstract" tactic3 | REPLACE "transparent_abstract" tactic3 "using" ident | WITH "transparent_abstract" ltac_expr3 OPT ( "using" ident ) | "typeclasses" "eauto" OPT [ "bfs" | "dfs" | "best_effort" ] OPT nat_or_var OPT ( "with" LIST1 preident ) | DELETE "typeclasses" "eauto" "dfs" OPT nat_or_var "with" LIST1 preident | DELETE "typeclasses" "eauto" "dfs" OPT nat_or_var | DELETE "typeclasses" "eauto" "bfs" OPT nat_or_var "with" LIST1 preident | DELETE "typeclasses" "eauto" "bfs" OPT nat_or_var | DELETE "typeclasses" "eauto" OPT nat_or_var "with" LIST1 preident | DELETE "typeclasses" "eauto" OPT nat_or_var (* in Tactic Notation: *) | "setoid_replace" constr "with" constr OPT ( "using" "relation" constr ) OPT ( "in" hyp ) OPT ( "at" LIST1 int_or_var ) OPT ( "by" ltac_expr3 ) | REPLACE "apply" ssrapplyarg (* SSR plugin *) | WITH "apply" OPT ssrapplyarg TAG SSR | DELETE "apply" | REPLACE "elim" ssrarg ssrclauses (* SSR plugin *) | WITH "elim" OPT ( ssrarg ssrclauses ) TAG SSR | DELETE "elim" (* SSR plugin *) | REPLACE "case" ssrcasearg ssrclauses (* SSR plugin *) | WITH "case" OPT ( ssrcasearg ssrclauses ) TAG SSR | DELETE "case" (* SSR plugin *) | REPLACE "under" ssrrwarg ssrintros_ne "do" ssrhint3arg (* SSR plugin *) | WITH "under" ssrrwarg OPT ssrintros_ne OPT ( "do" ssrhint3arg ) TAG SSR | DELETE "under" ssrrwarg (* SSR plugin *) | DELETE "under" ssrrwarg ssrintros_ne (* SSR plugin *) | DELETE "under" ssrrwarg "do" ssrhint3arg (* SSR plugin *) | REPLACE "move" ssrmovearg ssrrpat (* SSR plugin *) | WITH "move" OPT ( OPT ssrmovearg ssrrpat ) TAG SSR | DELETE "move" ssrrpat (* SSR plugin *) | DELETE "move" (* SSR plugin *) | REPLACE "suff" "have" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | WITH [ "suff" | "suffices" ] OPT ( "have" ssrhpats_nobs ) ssrhavefwd TAG SSR | DELETE "suffices" "have" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | REPLACE "suff" ssrsufffwd (* SSR plugin *) | WITH [ "suff" | "suffices" ] ssrsufffwd TAG SSR | DELETE "suffices" ssrsufffwd (* SSR plugin *) | REPLACE "have" "suff" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | WITH "have" [ "suff" | "suffices" ] ssrhpats_nobs ssrhavefwd TAG SSR | DELETE "have" "suffices" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | REPLACE "gen" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | WITH [ "gen" | "generally" ] "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint TAG SSR | DELETE "generally" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | REPLACE "wlog" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | WITH [ "wlog" | "without loss" ] OPT [ "suff" | "suffices" ] ssrhpats_nobs ssrwlogfwd ssrhint TAG SSR | DELETE "wlog" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | DELETE "wlog" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | DELETE "without" "loss" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | DELETE "without" "loss" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | DELETE "without" "loss" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) ] (* todo: don't use DELETENT for this *) ne_intropatterns: [ | DELETENT ] or_and_intropattern: [ | REPLACE "[" LIST1 intropatterns SEP "|" "]" | WITH "[" LIST0 (LIST0 intropattern) SEP "|" "]" | DELETE "()" | DELETE "(" simple_intropattern ")" | REPLACE "(" simple_intropattern "," LIST1 simple_intropattern SEP "," ")" | WITH "(" LIST0 simple_intropattern SEP "," ")" ] bar_cbrace: [ | REPLACE "|" "}" | WITH "|}" ] printable: [ | REPLACE "Scope" IDENT | WITH "Scope" scope_name | REPLACE "Visibility" OPT IDENT | WITH "Visibility" OPT scope_name | REPLACE [ "Sorted" | ] "Universes" OPT printunivs_subgraph OPT ne_string | WITH OPT "Sorted" "Universes" OPT printunivs_subgraph OPT ne_string | DELETE "Term" smart_global OPT univ_name_list (* readded in commands *) | REPLACE "Hint" | WITH "Hint" OPT [ "*" | smart_global ] | DELETE "Hint" smart_global | DELETE "Hint" "*" | INSERTALL "Print" ] add_zify: [ | [ "InjTyp" | "BinOp" | "UnOp" | "CstOp" | "BinRel" | "UnOpSpec" | "BinOpSpec" ] TAG Micromega | [ "PropOp" | "PropBinOp" | "PropUOp" | "Saturate" ]TAG Micromega ] show_zify: [ | [ "InjTyp" | "BinOp" | "UnOp" | "CstOp" | "BinRel" | "UnOpSpec" | "BinOpSpec" | "Spec" ] TAG Micromega ] scheme_kind: [ | DELETE "Induction" "for" smart_global "Sort" sort_family | DELETE "Minimality" "for" smart_global "Sort" sort_family | DELETE "Elimination" "for" smart_global "Sort" sort_family | DELETE "Case" "for" smart_global "Sort" sort_family | [ "Induction" | "Minimality" | "Elimination" | "Case" ] "for" smart_global "Sort" sort_family ] command: [ | REPLACE "Print" printable | WITH printable | REPLACE "Hint" hint opt_hintbases | WITH hint | "SubClass" ident_decl def_body | REPLACE "Ltac" LIST1 ltac_tacdef_body SEP "with" | WITH "Ltac" ltac_tacdef_body LIST0 ( "with" ltac_tacdef_body ) | REPLACE "Function" LIST1 function_fix_definition SEP "with" (* funind plugin *) | WITH "Function" function_fix_definition LIST0 ( "with" function_fix_definition ) (* funind plugin *) | REPLACE "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) | WITH "Functional" "Scheme" fun_scheme_arg LIST0 ( "with" fun_scheme_arg ) (* funind plugin *) | DELETE "Cd" | REPLACE "Cd" ne_string | WITH "Cd" OPT ne_string | DELETE "Back" | REPLACE "Back" natural | WITH "Back" OPT natural | REPLACE "Load" [ "Verbose" | ] [ ne_string | IDENT ] | WITH "Load" OPT "Verbose" [ ne_string | IDENT ] | DELETE "Unset" setting_name | REPLACE "Set" setting_name option_setting | WITH OPT "Export" "Set" setting_name (* set flag *) | REPLACE "Test" setting_name "for" LIST1 table_value | WITH "Test" setting_name OPT ( "for" LIST1 table_value ) | DELETE "Test" setting_name (* hide the fact that table names are limited to 2 IDENTs *) | REPLACE "Add" IDENT IDENT LIST1 table_value | WITH "Add" setting_name LIST1 table_value | DELETE "Add" IDENT LIST1 table_value | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Parametric" "Relation" binders ":" constr constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr OPT ( "reflexivity" "proved" "by" constr ) OPT ( "symmetry" "proved" "by" constr ) OPT ("transitivity" "proved" "by" constr ) "as" identref | DELETE "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "as" identref | DELETE "Add" "Relation" constr constr "symmetry" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | DELETE "Add" "Relation" constr constr "transitivity" "proved" "by" constr "as" identref | "Add" "Relation" constr constr OPT ( "reflexivity" "proved" "by" constr ) OPT ( "symmetry" "proved" "by" constr ) OPT ( "transitivity" "proved" "by" constr ) "as" identref | REPLACE "Admit" "Obligations" "of" identref | WITH "Admit" "Obligations" OPT ( "of" identref ) | DELETE "Admit" "Obligations" | REPLACE "Create" "HintDb" IDENT; [ "discriminated" | ] | WITH "Create" "HintDb" IDENT; OPT "discriminated" | DELETE "Debug" "On" | REPLACE "Debug" "Off" | WITH "Debug" [ "On" | "Off" ] | EDIT "Defined" ADD_OPT identref | REPLACE "Derive" "Inversion" ident "with" constr "Sort" sort_family | WITH "Derive" "Inversion" ident "with" constr OPT ( "Sort" sort_family ) | DELETE "Derive" "Inversion" ident "with" constr | REPLACE "Derive" "Inversion_clear" ident "with" constr "Sort" sort_family | WITH "Derive" "Inversion_clear" ident "with" constr OPT ( "Sort" sort_family ) | DELETE "Derive" "Inversion_clear" ident "with" constr | EDIT "Focus" ADD_OPT natural | DELETE "Hint" "Rewrite" orient LIST1 constr ":" LIST0 preident | REPLACE "Hint" "Rewrite" orient LIST1 constr "using" tactic ":" LIST0 preident | WITH "Hint" "Rewrite" orient LIST1 constr OPT ( "using" tactic ) OPT ( ":" LIST0 preident ) | DELETE "Hint" "Rewrite" orient LIST1 constr | DELETE "Hint" "Rewrite" orient LIST1 constr "using" tactic | REPLACE "Next" "Obligation" "of" identref withtac | WITH "Next" "Obligation" OPT ( "of" identref ) withtac | DELETE "Next" "Obligation" withtac | REPLACE "Obligation" natural "of" identref ":" lglob withtac | WITH "Obligation" natural OPT ( "of" identref ) OPT ( ":" type withtac ) | DELETE "Obligation" natural "of" identref withtac | DELETE "Obligation" natural ":" lglob withtac | DELETE "Obligation" natural withtac | REPLACE "Obligations" "of" identref | WITH "Obligations" OPT ( "of" identref ) | DELETE "Obligations" | REPLACE "Preterm" "of" identref | WITH "Preterm" OPT ( "of" identref ) | DELETE "Preterm" | REPLACE "Proof" "using" section_var_expr "with" Pltac.tactic | WITH "Proof" "using" section_subset_expr OPT [ "with" ltac_expr5 ] | DELETE "Proof" "using" section_var_expr (* hide the fact that table names are limited to 2 IDENTs *) | REPLACE "Remove" IDENT IDENT LIST1 table_value | WITH "Remove" setting_name LIST1 table_value | DELETE "Remove" IDENT LIST1 table_value | DELETE "Show" | DELETE "Show" natural | DELETE "Show" ident | "Show" OPT [ ident | natural ] | DELETE "Show" "Ltac" "Profile" | REPLACE "Show" "Ltac" "Profile" "CutOff" integer | WITH "Show" "Ltac" "Profile" OPT [ "CutOff" integer | string ] | DELETE "Show" "Ltac" "Profile" string | DELETE "Show" "Proof" (* combined with Show Proof Diffs in vernac_toplevel *) | REPLACE "Solve" "All" "Obligations" "with" tactic | WITH "Solve" "All" "Obligations" OPT ( "with" tactic ) | DELETE "Solve" "All" "Obligations" | DELETE "Solve" "Obligations" "of" identref "with" tactic | DELETE "Solve" "Obligations" "of" identref | DELETE "Solve" "Obligations" "with" tactic | DELETE "Solve" "Obligations" | "Solve" "Obligations" OPT ( "of" identref ) OPT ( "with" tactic ) | REPLACE "Solve" "Obligation" natural "of" identref "with" tactic | WITH "Solve" "Obligation" natural OPT ( "of" identref ) "with" tactic | DELETE "Solve" "Obligation" natural "with" tactic | DELETE "Undo" | DELETE "Undo" natural | REPLACE "Undo" "To" natural | WITH "Undo" OPT ( OPT "To" natural ) | DELETE "Abort" | DELETE "Abort" "All" | REPLACE "Abort" identref | WITH "Abort" OPT [ "All" | identref ] (* show the locate options as separate commands *) | DELETE "Locate" locatable | locatable | REPLACE "Print" smart_global OPT univ_name_list | WITH "Print" OPT "Term" smart_global OPT univ_name_list | REPLACE "Declare" "Scope" IDENT | WITH "Declare" "Scope" scope_name (* odd that these are in command while other notation-related ones are in syntax *) | REPLACE "Number" "Notation" reference reference reference OPT number_options ":" preident | WITH "Number" "Notation" reference reference reference OPT number_options ":" scope_name | REPLACE "String" "Notation" reference reference reference OPT string_option ":" preident | WITH "String" "Notation" reference reference reference OPT string_option ":" scope_name | DELETE "Ltac2" ltac2_entry (* was split up *) | DELETE "Add" "Zify" "InjTyp" reference (* micromega plugin *) | DELETE "Add" "Zify" "BinOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "UnOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "CstOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "BinRel" reference (* micromega plugin *) | DELETE "Add" "Zify" "PropOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "PropBinOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "PropUOp" reference (* micromega plugin *) | DELETE "Add" "Zify" "BinOpSpec" reference (* micromega plugin *) | DELETE "Add" "Zify" "UnOpSpec" reference (* micromega plugin *) | DELETE "Add" "Zify" "Saturate" reference (* micromega plugin *) | "Add" "Zify" add_zify reference TAG Micromega | DELETE "Show" "Zify" "InjTyp" (* micromega plugin *) | DELETE "Show" "Zify" "BinOp" (* micromega plugin *) | DELETE "Show" "Zify" "UnOp" (* micromega plugin *) | DELETE "Show" "Zify" "CstOp" (* micromega plugin *) | DELETE "Show" "Zify" "BinRel" (* micromega plugin *) | DELETE "Show" "Zify" "UnOpSpec" (* micromega plugin *) | DELETE "Show" "Zify" "BinOpSpec" (* micromega plugin *) (* keep this one | "Show" "Zify" "Spec" (* micromega plugin *)*) | "Show" "Zify" show_zify TAG Micromega | REPLACE "Goal" lconstr | WITH "Goal" type ] syntax: [ | REPLACE "Open" "Scope" IDENT | WITH "Open" "Scope" scope | REPLACE "Close" "Scope" IDENT | WITH "Close" "Scope" scope | REPLACE "Delimit" "Scope" IDENT; "with" IDENT | WITH "Delimit" "Scope" scope_name; "with" scope_key | REPLACE "Undelimit" "Scope" IDENT | WITH "Undelimit" "Scope" scope_name | REPLACE "Bind" "Scope" IDENT; "with" LIST1 class_rawexpr | WITH "Bind" "Scope" scope_name; "with" LIST1 class_rawexpr | REPLACE "Infix" ne_lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] | WITH "Infix" ne_lstring ":=" constr syntax_modifiers OPT [ ":" scope_name ] | REPLACE "Notation" lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] | WITH "Notation" lstring ":=" constr syntax_modifiers OPT [ ":" scope_name ] ] syntax_modifier: [ | DELETE "in" "custom" IDENT | REPLACE "in" "custom" IDENT; "at" "level" natural | WITH "in" "custom" IDENT OPT ( "at" "level" natural ) | DELETE IDENT; "in" "scope" IDENT | REPLACE IDENT; "," LIST1 IDENT SEP "," [ "at" level | "in" "scope" IDENT ] | WITH LIST1 IDENT SEP "," [ "at" level | "in" "scope" IDENT ] ] explicit_subentry: [ | REPLACE "strict" "pattern" "at" "level" natural | WITH "strict" "pattern" OPT ( "at" "level" natural ) | DELETE "strict" "pattern" | DELETE "pattern" | REPLACE "pattern" "at" "level" natural | WITH "pattern" OPT ( "at" "level" natural ) | DELETE "constr" (* covered by another prod *) ] binder_tactic: [ | REPLACE "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" ltac_expr5 | WITH "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" ltac_expr5 | MOVEALLBUT ltac_builtins ] field_body: [ | REPLACE binders of_type lconstr | WITH binders of_type | REPLACE binders of_type lconstr ":=" lconstr | WITH binders of_type ":=" lconstr ] assumpt: [ | REPLACE LIST1 ident_decl of_type lconstr | WITH LIST1 ident_decl of_type ] constructor_type: [ | REPLACE binders [ of_type lconstr | ] | WITH binders OPT of_type ] (* todo: is this really correct? Search for "Pvernac.register_proof_mode" *) (* consider tactic_command vs tac2mode *) vernac_aux: [ | tactic_mode "." ] def_token: [ | DELETE "SubClass" (* document separately from Definition and Example *) ] assumption_token: [ | REPLACE "Axiom" | WITH [ "Axiom" | "Axioms" ] | REPLACE "Conjecture" | WITH [ "Conjecture" | "Conjectures" ] | REPLACE "Hypothesis" | WITH [ "Hypothesis" | "Hypotheses" ] | REPLACE "Parameter" | WITH [ "Parameter" | "Parameters" ] | REPLACE "Variable" | WITH [ "Variable" | "Variables" ] ] attributes: [ | LIST0 ( "#[" LIST0 attribute SEP "," "]" ) LIST0 legacy_attr ] legacy_attr: [ | REPLACE "Local" | WITH [ "Local" | "Global" ] | DELETE "Global" | REPLACE "Polymorphic" | WITH [ "Polymorphic" | "Monomorphic" ] | DELETE "Monomorphic" | REPLACE "Cumulative" | WITH [ "Cumulative" | "NonCumulative" ] | DELETE "NonCumulative" ] sentence: [ ] (* productions defined below *) fix_definition: [ | REPLACE ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations | WITH ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations ] cofix_definition: [ | REPLACE ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations | WITH ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations ] type_cstr: [ | REPLACE ":" lconstr | WITH ":" type ] inductive_definition: [ | REPLACE opt_coercion cumul_ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notations | WITH opt_coercion cumul_ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] opt_constructors_or_fields decl_notations ] (* note that constructor -> identref constructor_type *) constructors_or_record: [ | DELETE "|" LIST1 constructor SEP "|" | REPLACE identref constructor_type "|" LIST1 constructor SEP "|" | WITH OPT "|" LIST1 constructor SEP "|" | DELETE identref constructor_type | REPLACE identref "{" record_fields "}" | WITH OPT identref "{" record_fields "}" | DELETE "{" record_fields "}" ] record_binder: [ | REPLACE name field_body | WITH name OPT field_body | DELETE name ] query_command: [ | REPLACE "Eval" red_expr "in" lconstr "." | WITH "Eval" red_expr "in" lconstr | REPLACE "Compute" lconstr "." | WITH "Compute" lconstr | REPLACE "Check" lconstr "." | WITH "Check" lconstr | REPLACE "About" smart_global OPT univ_name_list "." | WITH "About" smart_global OPT univ_name_list | REPLACE "SearchPattern" constr_pattern in_or_out_modules "." | WITH "SearchPattern" constr_pattern in_or_out_modules | REPLACE "SearchRewrite" constr_pattern in_or_out_modules "." | WITH "SearchRewrite" constr_pattern in_or_out_modules | REPLACE "Search" search_query search_queries "." | WITH "Search" search_queries ] vernac_toplevel: [ (* note these commands can't be referenced by vernac_control commands *) | REPLACE "Drop" "." | WITH "Drop" | REPLACE "Quit" "." | WITH "Quit" | REPLACE "BackTo" natural "." | WITH "BackTo" natural | REPLACE "Show" "Goal" natural "at" natural "." | WITH "Show" "Goal" natural "at" natural | REPLACE "Show" "Proof" "Diffs" OPT "removed" "." | WITH "Show" "Proof" OPT ( "Diffs" OPT "removed" ) | DELETE vernac_control ] vernac_control: [ (* replacing vernac_control with command is cheating a little; they can't refer to the vernac_toplevel commands. cover this the descriptions of these commands *) | REPLACE "Time" vernac_control | WITH "Time" sentence | REPLACE "Redirect" ne_string vernac_control | WITH "Redirect" ne_string sentence | REPLACE "Timeout" natural vernac_control | WITH "Timeout" natural sentence | REPLACE "Fail" vernac_control | WITH "Fail" sentence | REPLACE "Succeed" vernac_control | WITH "Succeed" sentence | DELETE decorated_vernac ] of_module_type: [ | (* empty *) ] rewriter: [ | DELETE "!" constr_with_bindings_arg | DELETE [ "?" | LEFTQMARK ] constr_with_bindings_arg | DELETE natural "!" constr_with_bindings_arg | DELETE natural [ "?" | LEFTQMARK ] constr_with_bindings_arg | DELETE natural constr_with_bindings_arg | DELETE constr_with_bindings_arg | OPT natural OPT [ "?" | "!" ] constr_with_bindings_arg ] ltac2_rewriter: [ | DELETE "!" ltac2_constr_with_bindings (* Ltac2 plugin *) | DELETE [ "?" | LEFTQMARK ] ltac2_constr_with_bindings | DELETE lnatural "!" ltac2_constr_with_bindings (* Ltac2 plugin *) | DELETE lnatural [ "?" | LEFTQMARK ] ltac2_constr_with_bindings | DELETE lnatural ltac2_constr_with_bindings (* Ltac2 plugin *) | DELETE ltac2_constr_with_bindings (* Ltac2 plugin *) | OPT natural OPT [ "?" | "!" ] ltac2_constr_with_bindings ] ltac2_expr0: [ | DELETE "(" ")" ] tac2type_body: [ | REPLACE ":=" tac2typ_knd (* Ltac2 plugin *) | WITH [ ":=" | "::=" ] tac2typ_knd TAG Ltac2 | DELETE "::=" tac2typ_knd (* Ltac2 plugin *) ] record_declaration: [ | DELETE fields_def | LIST0 field_def ] fields_def: [ | DELETENT ] constr_body: [ | DELETE ":=" lconstr | REPLACE ":" lconstr ":=" lconstr | WITH OPT ( ":" type ) ":=" lconstr ] scheme: [ | DELETE scheme_kind | REPLACE identref ":=" scheme_kind | WITH OPT ( identref ":=" ) scheme_kind ] simple_reserv: [ | REPLACE LIST1 identref ":" lconstr | WITH LIST1 identref ":" type ] in_clause: [ | DELETE in_clause' | REPLACE LIST1 hypident_occ SEP "," "|-" concl_occ | WITH LIST1 hypident_occ SEP "," OPT ( "|-" concl_occ ) | DELETE LIST1 hypident_occ SEP "," | REPLACE "*" occs | WITH concl_occ (* todo: perhaps concl_occ should be "*" | "at" occs_nums *) ] ltac2_in_clause: [ | REPLACE LIST0 ltac2_hypident_occ SEP "," "|-" ltac2_concl_occ (* Ltac2 plugin *) | WITH LIST0 ltac2_hypident_occ SEP "," OPT ( "|-" ltac2_concl_occ ) TAG Ltac2 | DELETE LIST0 ltac2_hypident_occ SEP "," (* Ltac2 plugin *) ] decl_notations: [ | REPLACE "where" LIST1 decl_notation SEP decl_sep | WITH "where" decl_notation LIST0 (decl_sep decl_notation ) ] module_expr: [ | REPLACE module_expr_atom | WITH LIST1 module_expr_atom | DELETE module_expr module_expr_atom ] locatable: [ | INSERTALL "Locate" ] ne_in_or_out_modules: [ | REPLACE "inside" LIST1 global | WITH [ "inside" | "in" | "outside" ] LIST1 global | DELETE "in" LIST1 global | DELETE "outside" LIST1 global ] search_queries: [ | DELETE ne_in_or_out_modules | REPLACE search_query search_queries | WITH LIST1 ( search_query ) OPT ne_in_or_out_modules | DELETE (* empty *) ] positive_search_mark: [ | OPTINREF ] SPLICE: [ | positive_search_mark ] search_query: [ | REPLACE OPT "-" search_item | WITH search_item | "-" search_query | REPLACE OPT "-" "[" LIST1 ( LIST1 search_query ) SEP "|" "]" | WITH "[" LIST1 ( LIST1 search_query ) SEP "|" "]" ] search_item: [ | REPLACE search_where ":" ne_string OPT scope_delimiter | WITH OPT ( search_where ":" ) ne_string OPT scope_delimiter | DELETE ne_string OPT scope_delimiter | REPLACE search_where ":" constr_pattern | WITH OPT ( search_where ":" ) constr_pattern | DELETE constr_pattern ] by_notation: [ | REPLACE ne_string OPT [ "%" IDENT ] | WITH ne_string OPT [ "%" scope_key ] ] decl_notation: [ | REPLACE ne_lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] | WITH ne_lstring ":=" constr syntax_modifiers OPT [ ":" scope_name ] ] ltac_production_item: [ | REPLACE ident "(" ident OPT ltac_production_sep ")" | WITH ident OPT ( "(" ident OPT ltac_production_sep ")" ) | DELETE ident ] input_fun: [ | DELETE ident | DELETE "_" | name ] let_clause: [ | DELETE identref ":=" ltac_expr5 | REPLACE "_" ":=" ltac_expr5 | WITH name ":=" ltac_expr5 ] tactic_mode: [ (* todo: make sure to document this production! *) (* deleting to allow splicing query_command into command *) | DELETE OPT toplevel_selector query_command | DELETE OPT ltac_selector OPT ltac_info tactic ltac_use_default | DELETE "par" ":" OPT ltac_info tactic ltac_use_default (* Ignore attributes (none apply) and "...". *) | ltac_info tactic | MOVETO command ltac_info tactic | DELETE command | REPLACE OPT toplevel_selector "{" (* semantically restricted *) | WITH OPT ( [ natural | "[" ident "]" ] ":" ) "{" | MOVETO simple_tactic OPT ( [ natural | "[" ident "]" ] ":" ) "{" | DELETE simple_tactic ] tactic_mode: [ | DELETENT ] ltac2_scope: [ | REPLACE syn_node (* Ltac2 plugin *) | WITH name TAG Ltac2 | REPLACE syn_node "(" LIST1 ltac2_scope SEP "," ")" (* Ltac2 plugin *) | WITH name "(" LIST1 ltac2_scope SEP "," ")" TAG Ltac2 ] syn_node: [ | DELETENT ] RENAME: [ | toplevel_selector toplevel_selector_temp ] toplevel_selector: [ | selector | "all" | "!" (* par is accepted even though it's not in the .mlg *) | "par" ] toplevel_selector_temp: [ | DELETE selector ":" | DELETE "all" ":" | DELETE "!" ":" | toplevel_selector ":" ] (* not included in insertprodn; defined in rst with :production: *) control_command: [ ] (* move all commands under "command" *) DELETE: [ | vernac ] vernac_aux: [ | DELETE gallina "." | DELETE gallina_ext "." | DELETE syntax "." | DELETE command_entry ] command: [ | gallina | gallina_ext | syntax | query_command | vernac_control | vernac_toplevel | command_entry ] SPLICE: [ | query_command ] query_command: [ ] (* re-add as a placeholder *) sentence: [ | OPT attributes command "." | OPT attributes OPT ( natural ":" ) query_command "." | OPT attributes OPT ( toplevel_selector ":" ) ltac_expr5 [ "." | "..." ] | control_command ] document: [ | LIST0 sentence ] (* add in ltac and Tactic Notation tactics that appear in the doc: *) ltac_defined_tactics: [ | "case_eq" constr | "classical_left" | "classical_right" | "contradict" ident | "discrR" | "easy" | "exfalso" | "inversion_sigma" OPT ( ident OPT ( "as" simple_intropattern ) ) | "lia" | "lra" | "nia" | "now_show" constr | "nra" | "over" TAG SSR | "split_Rabs" | "split_Rmult" | "tauto" | "time_constr" ltac_expr5 | "zify" ] (* todo: need careful review; assume that "[" ... "]" are literals *) tactic_notation_tactics: [ | "assert_fails" ltac_expr3 | "assert_succeeds" ltac_expr3 | "dependent" "destruction" ident OPT ( "generalizing" LIST1 hyp ) OPT ( "using" constr ) | "dependent" "induction" ident OPT ( [ "generalizing" | "in" ] LIST1 hyp ) OPT ( "using" constr ) | "dintuition" OPT ltac_expr5 | "dtauto" | "field" OPT ( "[" LIST1 constr "]" ) | "field_simplify" OPT ( "[" LIST1 constr "]" ) LIST1 constr OPT ( "in" ident ) | "field_simplify_eq" OPT ( "[" LIST1 constr "]" ) OPT ( "in" ident ) | "intuition" OPT ltac_expr5 (* todo: Not too keen on things like "with_power_flags" in tauto.ml, not easy to follow *) | "now" ltac_expr5 | "nsatz" OPT ( "with" "radicalmax" ":=" constr "strategy" ":=" constr "parameters" ":=" constr "variables" ":=" constr ) | "psatz" constr OPT nat_or_var | "ring" OPT ( "[" LIST1 constr "]" ) | "ring_simplify" OPT ( "[" LIST1 constr "]" ) LIST1 constr OPT ( "in" ident ) (* todo: ident was "hyp", worth keeping? *) ] (* defined in OCaml outside of mlgs *) tactic_value: [ | "uconstr" ":" "(" term200 ")" | MOVEALLBUT simple_tactic ] nonterminal: [ ] value_tactic: [ ] syn_value: [ | IDENT; ":" "(" nonterminal ")" ] tactic_value: [ | [ value_tactic | syn_value ] ] (* defined in Ltac2/Notations.v *) ltac2_match_key: [ | "lazy_match!" | "match!" | "multi_match!" ] ltac2_constructs: [ | ltac2_match_key ltac2_expr6 "with" ltac2_match_list "end" | ltac2_match_key OPT "reverse" "goal" "with" goal_match_list "end" ] simple_tactic: [ | ltac_builtins | ltac_constructs | ltac2_constructs | ltac_defined_tactics | tactic_notation_tactics ] tacdef_body: [ | REPLACE global LIST1 input_fun ltac_def_kind ltac_expr5 | WITH global LIST0 input_fun ltac_def_kind ltac_expr5 | DELETE global ltac_def_kind ltac_expr5 ] tac2def_typ: [ | REPLACE "Type" rec_flag LIST1 tac2typ_def SEP "with" (* Ltac2 plugin *) | WITH "Type" rec_flag tac2typ_def LIST0 ( "with" tac2typ_def ) TAG Ltac2 ] tac2def_val: [ | REPLACE mut_flag rec_flag LIST1 tac2def_body SEP "with" (* Ltac2 plugin *) | WITH mut_flag rec_flag tac2def_body LIST0 ( "with" tac2def_body ) TAG Ltac1 ] tac2alg_constructors: [ | REPLACE "|" LIST1 tac2alg_constructor SEP "|" (* Ltac2 plugin *) | WITH OPT "|" LIST1 tac2alg_constructor SEP "|" TAG Ltac2 | DELETE LIST0 tac2alg_constructor SEP "|" (* Ltac2 plugin *) | (* empty *) | OPTINREF ] SPLICE: [ | def_token | extended_def_token ] logical_kind: [ | DELETE thm_token | DELETE assumption_token | [ thm_token | assumption_token ] | DELETE "Definition" | DELETE "Example" | DELETE "Context" | DELETE "Primitive" (* SubClass was deleted from def_token *) | [ "Definition" | "Example" | "Context" | "Primitive" ] | DELETE "Coercion" | DELETE "Instance" | DELETE "Scheme" | DELETE "Canonical" | [ "Coercion" | "Instance" | "Scheme" | "Canonical" | "SubClass" ] | DELETE "Field" | DELETE "Method" | [ "Field" | "Method" ] ] (* ltac2 *) DELETE: [ | test_ltac1_env ] rec_flag: [ | OPTINREF ] ltac2_orient: [ | DELETENT ] ltac2_orient: [ | orient ] SPLICE: [ | ltac2_orient ] ltac2_expr0: [ (* | DELETE "(" ")" (* covered by "()" prodn *) | REPLACE "{" [ | LIST1 tac2rec_fieldexpr OPT ";" ] "}" | WITH "{" OPT ( LIST1 tac2rec_fieldexpr OPT ";" ) "}" *) ] (* todo: should | tac2pat1 "," LIST0 tac2pat1 SEP "," use LIST1? *) SPLICE: [ | ltac2_expr4 ] ltac2_expr3: [ | REPLACE ltac2_expr2 "," LIST1 ltac2_expr2 SEP "," (* Ltac2 plugin *) | WITH LIST1 ltac2_expr2 SEP "," TAG Ltac2 | DELETE ltac2_expr2 (* Ltac2 plugin *) ] tac2rec_fieldexprs: [ | DELETE tac2rec_fieldexpr ";" tac2rec_fieldexprs | DELETE tac2rec_fieldexpr ";" | DELETE tac2rec_fieldexpr | LIST1 tac2rec_fieldexpr OPT ";" ] tac2rec_fields: [ | DELETE tac2rec_field ";" tac2rec_fields | DELETE tac2rec_field ";" | DELETE tac2rec_field | LIST1 tac2rec_field SEP ";" OPT ";" TAG Ltac2 ] int_or_var: [ | REPLACE integer | WITH [ integer | identref ] | DELETE identref ] nat_or_var: [ | REPLACE natural | WITH [ natural | identref ] | DELETE identref ] ltac2_occs_nums: [ | DELETE LIST1 nat_or_anti (* Ltac2 plugin *) | REPLACE "-" nat_or_anti LIST0 nat_or_anti (* Ltac2 plugin *) | WITH OPT "-" LIST1 nat_or_anti TAG Ltac2 ] ltac2_entry: [ | REPLACE tac2def_typ (* Ltac2 plugin *) | WITH "Ltac2" tac2def_typ | REPLACE tac2def_syn (* Ltac2 plugin *) | WITH "Ltac2" tac2def_syn | REPLACE tac2def_mut (* Ltac2 plugin *) | WITH "Ltac2" tac2def_mut | REPLACE tac2def_val (* Ltac2 plugin *) | WITH "Ltac2" tac2def_val | REPLACE tac2def_ext (* Ltac2 plugin *) | WITH "Ltac2" tac2def_ext | "Ltac2" "Notation" [ string | lident ] ":=" ltac2_expr6 TAG Ltac2 (* variant *) | MOVEALLBUT command (* todo: MOVEALLBUT should ignore tag on "but" prodns *) ] ltac2_match_list: [ | EDIT ADD_OPT "|" LIST1 ltac2_match_rule SEP "|" (* Ltac2 plugin *) ] ltac2_or_and_intropattern: [ | DELETE "(" ltac2_simple_intropattern ")" (* Ltac2 plugin *) | REPLACE "(" ltac2_simple_intropattern "," LIST1 ltac2_simple_intropattern SEP "," ")" (* Ltac2 plugin *) | WITH "(" LIST1 ltac2_simple_intropattern SEP "," ")" TAG Ltac2 | REPLACE "(" ltac2_simple_intropattern "&" LIST1 ltac2_simple_intropattern SEP "&" ")" (* Ltac2 plugin *) | WITH "(" LIST1 ltac2_simple_intropattern SEP "&" ")" TAG Ltac2 ] SPLICE: [ | tac2def_val | tac2def_typ | tac2def_ext | tac2def_syn | tac2def_mut | rec_flag | locident | tac2alg_constructors | ltac2_binder | branch | anti ] ltac2_expr5: [ | REPLACE "let" OPT "rec" LIST1 ltac2_let_clause SEP "with" "in" ltac2_expr6 (* Ltac2 plugin *) | WITH "let" OPT "rec" ltac2_let_clause LIST0 ( "with" ltac2_let_clause ) "in" ltac2_expr6 TAG Ltac2 | MOVETO simple_tactic "match" ltac2_expr5 "with" ltac2_branches "end" (* Ltac2 plugin *) | MOVETO simple_tactic "if" ltac2_expr5 "then" ltac2_expr5 "else" ltac2_expr5 (* Ltac2 plugin *) | DELETE simple_tactic ] goal_match_list: [ | EDIT ADD_OPT "|" LIST1 gmatch_rule SEP "|" (* Ltac2 plugin *) ] ltac2_quotations: [ ] ltac2_tactic_atom: [ | MOVETO ltac2_quotations "constr" ":" "(" lconstr ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "open_constr" ":" "(" lconstr ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "ident" ":" "(" lident ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "pat" ":" "(" cpattern ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "reference" ":" "(" globref ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "ltac1" ":" "(" ltac1_expr_in_env ")" (* Ltac2 plugin *) | MOVETO ltac2_quotations "ltac1val" ":" "(" ltac1_expr_in_env ")" (* Ltac2 plugin *) ] (* non-Ltac2 "clause" is really clause_dft_concl + there is an ltac2 "clause" *) ltac2_clause: [ ] clause: [ | MOVEALLBUT ltac2_clause ] clause: [ | clause_dft_concl ] q_clause: [ | REPLACE clause | WITH ltac2_clause TAG Ltac2 ] ltac2_induction_clause: [ | REPLACE ltac2_destruction_arg ltac2_as_or_and_ipat ltac2_eqn_ipat OPT clause (* Ltac2 plugin *) | WITH ltac2_destruction_arg ltac2_as_or_and_ipat ltac2_eqn_ipat OPT ltac2_clause TAG Ltac2 ] starredidentref: [ | EDIT identref ADD_OPT "*" | EDIT "Type" ADD_OPT "*" | "All" ] ssexpr0: [ | DELETE "(" LIST0 starredidentref ")" | DELETE "(" LIST0 starredidentref ")" "*" | DELETE "(" ssexpr35 ")" | DELETE "(" ssexpr35 ")" "*" | "(" section_subset_expr ")" OPT "*" ] ssexpr35: [ | EDIT ADD_OPT "-" ssexpr50 ] simple_binding: [ | REPLACE "(" identref ":=" lconstr ")" | WITH "(" [ ident | natural ] ":=" lconstr ")" | DELETE "(" natural ":=" lconstr ")" ] subprf: [ | MOVEALLBUT simple_tactic | "{" (* should be removed. See https://github.com/coq/coq/issues/12004 *) ] ltac2_expr: [ | DELETE _ltac2_expr ] opt_clause: [ | DELETE "in" in_clause | DELETE "at" occs_nums | DELETE | clause_dft_concl ] fixdecl: [ | REPLACE "(" ident LIST0 simple_binder struct_annot ":" lconstr ")" | WITH "(" ident LIST0 simple_binder struct_annot ":" type ")" ] cofixdecl: [ | REPLACE "(" ident LIST0 simple_binder ":" lconstr ")" | WITH "(" ident LIST0 simple_binder ":" type ")" ] ssrfwdview: [ | REPLACE "/" ast_closure_term ssrfwdview (* SSR plugin *) | WITH LIST1 ( "/" ast_closure_term ) TAG SSR | DELETE "/" ast_closure_term (* SSR plugin *) ] ssrfwd: [ | REPLACE ":" ast_closure_lterm ":=" ast_closure_lterm (* SSR plugin *) | WITH OPT ( ":" ast_closure_lterm ) ":=" ast_closure_lterm TAG SSR | DELETE ":=" ast_closure_lterm (* SSR plugin *) ] ssrsetfwd: [ | REPLACE ":" ast_closure_lterm ":=" "{" ssrocc "}" cpattern (* SSR plugin *) | WITH OPT ( ":" ast_closure_lterm ) ":=" [ "{" ssrocc "}" cpattern | lcpattern ] TAG SSR | DELETE ":" ast_closure_lterm ":=" lcpattern (* SSR plugin *) | DELETE ":=" "{" ssrocc "}" cpattern (* SSR plugin *) | DELETE ":=" lcpattern (* SSR plugin *) ] (* per @gares *) ssrdgens: [ | REPLACE ":" ssrgen ssrdgens_tl (* SSR plugin *) | WITH ":" ssrgen OPT ( "/" ssrgen ) TAG SSR ] ssrdgens_tl: [ | DELETENT ] ssrgen: [ | REPLACE ssrdocc cpattern (* SSR plugin *) | WITH cpattern LIST0 [ LIST1 ident | cpattern ] TAG SSR | DELETE cpattern (* SSR plugin *) ] OPTINREF: [ ] ssrortacs: [ | EDIT ssrtacarg "|" ADD_OPT ssrortacs (* ssr plugin *) | EDIT "|" ADD_OPT ssrortacs (* ssr plugin *) | EDIT ADD_OPT ssrtacarg "|" OPT ssrortacs ] ssrocc: [ | REPLACE natural LIST0 natural (* SSR plugin *) | WITH [ natural | "+" | "-" ] LIST0 natural TAG SSR | DELETE "-" LIST0 natural (* SSR plugin *) | DELETE "+" LIST0 natural (* SSR plugin *) ] ssripat: [ | DELETE ssrdocc "->" (* SSR plugin *) | DELETE ssrdocc "<-" (* SSR plugin *) | REPLACE ssrdocc (* SSR plugin *) | WITH ssrdocc OPT [ "->" | "<-" ] TAG SSR | DELETE "->" (* SSR plugin *) | DELETE "<-" (* SSR plugin *) | DELETE "-/" "=" (* SSR plugin *) | DELETE "-/" "/" (* SSR plugin *) | DELETE "-/" integer "/" (* SSR plugin *) | DELETE "-/" integer "/=" (* SSR plugin *) | REPLACE "-/" integer "/" integer "=" (* SSR plugin *) | WITH "-/" integer [ "/=" | "/" | "/" integer "=" ] TAG SSR | DELETE "-/" "/=" (* SSR plugin *) | DELETE "-//" "=" (* SSR plugin *) | DELETE "[" ":" LIST0 ident "]" (* SSR plugin *) ] ssrsimpl_ne: [ | DELETE "/" natural "/" "=" (* SSR plugin *) (* parsed but not supported per @gares *) | DELETE "/" natural "/" (* SSR plugin *) | DELETE "/" natural "=" (* SSR plugin *) | DELETE "//" natural "=" (* SSR plugin *) ] hat: [ | DELETE "^" "~" ident (* SSR plugin *) | DELETE "^" "~" natural (* SSR plugin *) ] ssriorpat: [ | ssripats OPT ( [ "|" | "|-" ] ssriorpat ) TAG SSR | DELETE OPT ssripats "|" ssriorpat (* SSR plugin *) | DELETE OPT ssripats "|-" ">" ssriorpat (* SSR plugin *) | DELETE OPT ssripats "|-" ssriorpat (* SSR plugin *) (* "|->" | "||" | "|||" | "||||" are parsing hacks *) | DELETE OPT ssripats "|->" ssriorpat (* SSR plugin *) | DELETE OPT ssripats "||" ssriorpat (* SSR plugin *) | DELETE OPT ssripats "|||" ssriorpat (* SSR plugin *) | DELETE OPT ssripats "||||" ssriorpat (* SSR plugin *) | DELETE OPT ssripats (* SSR plugin *) ] ssrbinder: [ | REPLACE "(" ssrbvar LIST1 ssrbvar ":" lconstr ")" (* SSR plugin *) | WITH "(" LIST1 ssrbvar ":" lconstr ")" TAG SSR | REPLACE "(" ssrbvar ":" lconstr ":=" lconstr ")" (* SSR plugin *) | WITH "(" ssrbvar OPT ( ":" lconstr ) OPT ( ":=" lconstr ) ")" TAG SSR | DELETE "(" ssrbvar ")" (* SSR plugin *) | DELETE "(" ssrbvar ":" lconstr ")" (* SSR plugin *) | DELETE "(" ssrbvar ":=" lconstr ")" (* SSR plugin *) ] ssrhavefwd: [ | REPLACE ":" ast_closure_lterm ":=" ast_closure_lterm (* SSR plugin *) | WITH ":" ast_closure_lterm ":=" OPT ast_closure_lterm TAG SSR | DELETE ":" ast_closure_lterm ":=" (* SSR plugin *) | DELETE ":=" ast_closure_lterm (* SSR plugin *) ] ssrmult_ne: [ | EDIT ADD_OPT natural ssrmmod TAG SSR ] rpattern: [ | REPLACE lconstr "in" lconstr "in" lconstr (* SSR plugin *) | WITH OPT ( OPT ( OPT ( OPT lconstr "in" ) lconstr ) "in" ) lconstr TAG SSR | DELETE lconstr (* SSR plugin *) | DELETE "in" lconstr (* SSR plugin *) | DELETE lconstr "in" lconstr (* SSR plugin *) | DELETE "in" lconstr "in" lconstr (* SSR plugin *) ] ssrrule_ne: [ | DELETE ssrsimpl_ne (* SSR plugin *) | REPLACE [ "/" ssrterm | ssrterm | ssrsimpl_ne ] (* SSR plugin *) | WITH [ OPT "/" ssrterm | ssrsimpl_ne ] TAG SSR ] ssrunlockarg: [ | REPLACE "{" ssrocc "}" ssrterm (* SSR plugin *) | WITH OPT ( "{" ssrocc "}" ) ssrterm TAG SSR | DELETE ssrterm (* SSR plugin *) ] ssrclauses: [ | REPLACE "in" ssrclausehyps "|-" "*" (* SSR plugin *) | WITH "in" ssrclausehyps OPT "|-" OPT "*" TAG SSR | DELETE "in" ssrclausehyps "|-" (* SSR plugin *) | DELETE "in" ssrclausehyps "*" (* SSR plugin *) | DELETE "in" ssrclausehyps (* SSR plugin *) | REPLACE "in" "|-" "*" (* SSR plugin *) | WITH "in" [ "*" | "*" "|-" | "|-" "*" ] TAG SSR | DELETE "in" "*" (* SSR plugin *) | DELETE "in" "*" "|-" (* SSR plugin *) ] ssrclausehyps: [ | REPLACE ssrwgen "," ssrclausehyps (* SSR plugin *) | WITH ssrwgen LIST0 ( OPT "," ssrwgen ) TAG SSR | DELETE ssrwgen ssrclausehyps (* SSR plugin *) | DELETE ssrwgen (* SSR plugin *) ] ssrwgen: [ | DELETE ssrhoi_hyp (* SSR plugin *) | REPLACE "@" ssrhoi_hyp (* SSR plugin *) | WITH OPT "@" ssrhoi_hyp TAG SSR | REPLACE "(" ssrhoi_id ":=" lcpattern ")" (* SSR plugin *) | WITH "(" ssrhoi_id OPT ( ":=" lcpattern ) ")" TAG SSR | DELETE "(" ssrhoi_id ")" (* SSR plugin *) | DELETE "(" "@" ssrhoi_id ":=" lcpattern ")" (* SSR plugin *) ] ssrcongrarg: [ | REPLACE natural constr ssrdgens (* SSR plugin *) | WITH OPT natural constr OPT ssrdgens TAG SSR | DELETE natural constr (* SSR plugin *) | DELETE constr ssrdgens (* SSR plugin *) | DELETE constr (* SSR plugin *) ] ssrviewpos: [ | DELETE "for" "apply" "/" "/" (* SSR plugin *) ] ssrhintref: [ | REPLACE constr "|" natural (* SSR plugin *) | WITH constr OPT ( "|" natural ) TAG SSR | DELETE constr (* SSR plugin *) ] ssrmmod: [ | DELETE LEFTQMARK (* SSR plugin *) (* duplicate *) ] clear_switch: [ | "{" LIST0 ident "}" ] ssrrwocc: [ | REPLACE "{" LIST0 ssrhyp "}" (* SSR plugin *) | WITH clear_switch ] ssrrwarg: [ | EDIT "{" ADD_OPT ssrocc "}" OPT ssrpattern_squarep ssrrule_ne TAG SSR | REPLACE "{" LIST1 ssrhyp "}" ssrpattern_ne_squarep ssrrule_ne (* SSR plugin *) | WITH OPT ( OPT ( "{" LIST1 ssrhyp "}" ) ssrpattern_ne_squarep ) ssrrule_ne TAG SSR | DELETE ssrpattern_ne_squarep ssrrule_ne (* SSR plugin *) | DELETE ssrrule_ne (* SSR plugin *) ] ssrpattern_squarep: [ (* fix inconsistency *) | REPLACE "[" rpattern "]" (* SSR plugin *) | WITH ssrpattern_ne_squarep TAG SSR ] ssripats_ne: [ | REPLACE ssripat OPT ssripats (* SSR plugin *) | WITH LIST1 ssripat TAG SSR ] ssripats: [ (* fix inconsistency *) | REPLACE ssripat OPT ssripats (* SSR plugin *) | WITH ssripats_ne TAG SSR ] lcpattern: [ (* per @gares *) | DELETE "Qed" lconstr ] ssrapplyarg: [ | EDIT ADD_OPT ssrbwdview ":" ssragen OPT ssragens OPT ssrintros TAG SSR ] constr_with_bindings_arg: [ | EDIT ADD_OPT ">" constr_with_bindings ] destruction_arg: [ | DELETE constr_with_bindings ] ssrhintarg: [ | EDIT "[" ADD_OPT ssrortacs "]" TAG SSR ] ssrhint3arg: [ | EDIT "[" ADD_OPT ssrortacs "]" TAG SSR ] ssr_first: [ | DELETE ssr_first ssrintros_ne (* SSR plugin *) | REPLACE "[" LIST0 ltac_expr5 SEP "|" "]" (* SSR plugin *) | WITH "[" LIST0 ltac_expr5 SEP "|" "]" LIST0 ssrintros_ne TAG SSR ] ssr_first_else: [ | EDIT ssr_first ADD_OPT ssrorelse TAG SSR ] ssrseqarg: [ | EDIT ADD_OPT ssrseqidx ssrswap TAG SSR ] ssr_dpat: [ | REPLACE ssr_mpat "in" pattern200 ssr_rtype (* SSR plugin *) | WITH ssr_mpat OPT ( OPT ( "in" pattern200 ) ssr_rtype ) TAG SSR | DELETE ssr_mpat ssr_rtype (* SSR plugin *) | DELETE ssr_mpat (* SSR plugin *) ] ssr_one_term_pattern: [ | DELETE "Qed" constr ] ssrarg: [ | EDIT ADD_OPT ssrfwdview OPT ssreqid ssrdgens OPT ssrintros (* SSR plugin *) ] ssragen: [ | REPLACE "{" LIST1 ssrhyp "}" ssrterm (* SSR plugin *) | WITH OPT ( "{" LIST1 ssrhyp "}" ) ssrterm TAG SSR | DELETE ssrterm (* SSR plugin *) ] ssrbinder: [ | REPLACE [ "of" | "&" ] term99 (* SSR plugin *) | WITH "of" term99 TAG SSR | "&" term99 TAG SSR ] firstorder_rhs: [ | DELETE OPT firstorder_using | DELETE "with" LIST1 preident | REPLACE OPT firstorder_using "with" LIST1 preident | WITH OPT firstorder_using OPT ( "with" LIST1 preident ) ] attribute: [ | DELETE "using" OPT attr_value ] hypident: [ (* todo: restore for SSR *) | DELETE "(" "type" "of" ident ")" (* SSR plugin *) | DELETE "(" "value" "of" ident ")" (* SSR plugin *) ] ref_or_pattern_occ: [ | DELETE smart_global OPT occs | DELETE constr OPT occs | unfold_occ | pattern_occ ] clause_dft_concl: [ (* omit an OPT since clause_dft_concl is always OPT *) | REPLACE OPT occs | WITH occs ] simple_occurrences: [ | clause_dft_concl (* semantically restricted: no "at" clause *) ] occs_nums: [ | EDIT ADD_OPT "-" LIST1 nat_or_var ] variance_identref: [ | EDIT ADD_OPT variance identref ] conversion: [ | DELETE constr | DELETE constr "with" constr | REPLACE constr "at" occs_nums "with" constr | WITH OPT ( constr OPT ( "at" occs_nums ) "with" ) constr ] induction_principle: [ | eliminator opt_clause ] induction_clause: [ | REPLACE destruction_arg OPT as_or_and_ipat OPT eqn_ipat opt_clause | WITH destruction_arg OPT as_or_and_ipat OPT eqn_ipat opt_clause ] induction_clause_list: [ | DELETE LIST1 induction_clause SEP "," OPT eliminator opt_clause | LIST1 induction_clause SEP "," OPT induction_principle ] (* see https://github.com/coq/coq/pull/14179#discussion_r654000296 *) as_or_and_ipat: [ | DELETE "as" or_and_intropattern_loc | DELETE "as" equality_intropattern | "as" or_and_intropattern ] SPLICE: [ | clause | noedit_mode | match_list | match_context_list | IDENT | LEFTQMARK | NUMBER | STRING | hyp | identref | pattern_ident | constr_eval (* splices as multiple prods *) | tactic_then_last (* todo: dependency on c.edit_mlg edit?? really useful? *) | ltac2_tactic_then_last | Prim.name | ltac_selector | Constr.ident | attribute_list | term99 | term90 | term9 | term8 | pattern200 | pattern99 | pattern90 | ne_lstring | ne_string | lstring | fullyqualid | global | reference | bar_cbrace | lconstr (* SSR *) | ast_closure_term | ast_closure_lterm | ident_no_do | ssrterm | ssrtacarg | ssrtac3arg | ssrhyp | ssrhoi_hyp | ssrhoi_id | ssrhpats | ssrhpats_nobs | ssrfwdid | ssrmovearg | ssrcasearg | ssrrwargs | ssrviewposspc | ssrpatternarg | ssr_elsepat | ssr_mpat | ssrunlockargs | ssrcofixfwd | ssrfixfwd | ssrhavefwdwbinders | ssrparentacarg | ssrposefwd | ssrstruct | ssrrpat | ssrhint | ssrpattern_squarep | ssrhintref | ssrexactarg | ssrclear | ssrmult | ssripats | ssrintros | ssrrule | ssrpattern_squarep | ssrcongrarg | ssrdotac | ssrunlockarg | ssrortacarg | ssrsetfwd | ssr_idcomma | ssr_dthen | ssr_else | ssr_rtype | ssreqid | preident | lpar_id_coloneq | binders | check_module_types | decl_sep | function_fix_definition (* loses funind annotation *) | glob | glob_constr_with_bindings | id_or_meta | lglob | ltac_tacdef_body | mode | mult_pattern | open_constr | record_declaration | tactic | uconstr | impl_ident_head | branches | check_module_type | decorated_vernac | ext_module_expr | ext_module_type | test | binder_constr | atomic_constr | let_type_cstr | name_colon | closed_binder | binders_fixannot | as_return_type | case_type | universe_increment | type_cstr | record_pattern | evar_instance | fix_decls | cofix_decls | assum_list | assum_coe | inline | occs | ltac_info | field_mods | ltac_production_sep | ltac_tactic_level | printunivs_subgraph | ring_mods | eliminator (* todo: splice or not? *) | quoted_attributes (* todo: splice or not? *) | printable | hint | record_fields | constructor_type | record_binder | at_level_opt | table_value | in_or_out_modules | option_setting | orient | with_bindings | by_arg_tac | by_tactic | quantified_hypothesis | in_hyp_list | rename | export_token | reserv_tuple | inst | opt_coercion | opt_constructors_or_fields | is_module_type | is_module_expr | module_expr | mlname | withtac | debug | eauto_search_strategy | constr_body | reference_or_constr | opt_hintbases | hints_path_atom | opthints | scheme | fresh_id | ltac_def_kind | intropatterns | instance_name | failkw | ne_in_or_out_modules | search_queries | locatable | scope_delimiter | one_import_filter_name | search_where | message_token | input_fun | ltac_use_default | toplevel_selector_temp | comment | register_token | match_context_rule | match_rule | by_notation | lnatural | nat_or_anti | globref | let_binder | refglobals (* Ltac2 *) | syntax_modifiers | array_elems | G_LTAC2_input_fun | ltac2_simple_intropattern_closed | ltac2_with_bindings | int_or_id | fun_ind_using | with_names | eauto_search_strategy_name | constr_with_bindings | simple_binding | ssexpr35 (* strange in mlg, ssexpr50 is after this *) | number_string_mapping | number_options | string_option | tac2type_body | tac2rec_fields | mut_flag | tac2rec_fieldexprs | syn_level | firstorder_rhs | firstorder_using | hints_path_atom | ref_or_pattern_occ | cumul_ident_decl | variance | variance_identref | rewriter | clause_dft_all | or_and_intropattern_loc | eqn_ipat | conversion | type_cast | opt_clause | struct_annot | fixdecl | cofixdecl | induction_clause_list | as_or_and_ipat ] (* end SPLICE *) RENAME: [ | occurrences rewrite_occs ] RENAME: [ | tactic3 ltac_expr3 (* todo: can't figure out how this gets mapped by coqpp *) | tactic1 ltac_expr1 (* todo: can't figure out how this gets mapped by coqpp *) | tactic0 ltac_expr0 (* todo: can't figure out how this gets mapped by coqpp *) | ltac_expr5 ltac_expr (* | nonsimple_intropattern intropattern (* ltac2 *) *) | term200 term | pattern100 pattern (*| impl_ident_tail impl_ident*) | ssexpr50 section_var_expr50 | ssexpr0 section_var_expr0 | section_subset_expr section_var_expr | fun_scheme_arg func_scheme_def | BULLET bullet | constr one_term (* many, many, many *) | class_rawexpr class (* OCaml reserved word *) | smart_global reference (* many, many *) (* | searchabout_query search_item *) | Pltac.tactic ltac_expr (* many uses in EXTENDs *) | ltac2_type5 ltac2_type | ltac2_expr6 ltac2_expr | starredidentref starred_ident_ref | ssrocc ssr_occurrences | ssrsimpl_ne s_item | ssrclear_ne ssrclear | ssrmult_ne mult | ssripats_ne ssripats | ssrrule_ne r_item | ssrintros_ne ssrintros | ssrpattern_ne_squarep ssrpattern_squarep | ssrrwarg rewrite_item | ssrrwocc occ_or_clear | rpattern rewrite_pattern | ssripat i_item | ssrwgen gen_item | ssrfwd ssrdefbody | ssrclauses ssr_in | ssrcpat ssrblockpat | constr_pattern one_pattern | hints_path hints_regexp | clause_dft_concl occurrences | in_clause goal_occurrences | unfold_occ reference_occs | pattern_occ pattern_occs | hypident_occ hyp_occs | concl_occ concl_occs | constr_with_bindings_arg one_term_with_bindings | red_flag reduction | strategy_flag reductions | delta_flag delta_reductions | q_strategy_flag q_reductions | destruction_arg induction_arg ] simple_tactic: [ (* due to renaming of tactic_value; Use LIST1 for function application *) | qualid LIST1 tactic_arg ] SPLICE: [ | gallina | gallina_ext | syntax | vernac_control | vernac_toplevel | command_entry | ltac_builtins | ltac_constructs | ltac2_constructs | ltac_defined_tactics | tactic_notation_tactics ] (* todo: ssrreflect*.rst ref to fix_decl is incorrect *) REACHABLE: [ | command | simple_tactic ] NOTINRSTS: [ | simple_tactic | REACHABLE | NOTINRSTS | l1_tactic | l2_tactic | l3_tactic | binder_tactic | value_tactic | ltac2_entry (* ltac2 syntactic classes *) | q_intropatterns | q_intropattern | q_ident | q_destruction_arg | q_with_bindings | q_bindings | q_reductions | q_reference | q_clause | q_occurrences | q_induction_clause | q_conversion | q_rewriting | q_dispatch | q_hintdb | q_move_location | q_pose | q_assert | q_constr_matching | q_goal_matching ] REACHABLE: [ | NOTINRSTS ] coq-8.15.0/doc/tools/docgram/doc_grammar.ml000066400000000000000000002160061417001151100205110ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* sprintf "(Sterm %s) " s | Snterm s -> sprintf "(Snterm %s) " s | Slist1 sym -> sprintf "(Slist1 %s) " (db_output_prodn sym) | Slist1sep (sym, sep) -> sprintf "(Slist1sep %s %s) " (db_output_prodn sep) (db_output_prodn sym) | Slist0 sym -> sprintf "(Slist0 %s) " (db_output_prodn sym) | Slist0sep (sym, sep) -> sprintf "(Slist0sep %s %s) " (db_output_prodn sep) (db_output_prodn sym) | Sopt sym -> sprintf "(Sopt %s) " (db_output_prodn sym) | Sparen prod -> sprintf "(Sparen %s) " (db_out_list prod) | Sprod prods -> sprintf "(Sprod %s) " (db_out_prods prods) | Sedit s -> sprintf "(Sedit %s) " s | Sedit2 (s, s2) -> sprintf "(Sedit2 %s %s) " s s2 and db_out_list prod = sprintf "(%s)" (map_and_concat db_output_prodn prod) and db_out_prods prods = sprintf "( %s )" (map_and_concat ~delim:" | " db_out_list prods) (* identify special chars that don't get a trailing space in output *) let omit_space s = List.mem s ["?"; "."; "#"] let rec output_prod plist need_semi = function | Sterm s -> if plist then sprintf "%s" s else sprintf "\"%s\"" s | Snterm s -> if plist then sprintf "`%s`" s else sprintf "%s%s" s (if s = "IDENT" && need_semi then ";" else "") | Slist1 sym -> sprintf "LIST1 %s" (prod_to_str ~plist [sym]) | Slist1sep (sym, sep) -> sprintf "LIST1 %s SEP %s" (prod_to_str ~plist [sym]) (prod_to_str ~plist [sep]) | Slist0 sym -> sprintf "LIST0 %s" (prod_to_str ~plist [sym]) | Slist0sep (sym, sep) -> sprintf "LIST0 %s SEP %s" (prod_to_str ~plist [sym]) (prod_to_str ~plist [sep]) | Sopt sym -> sprintf "OPT %s" (prod_to_str ~plist [sym]) | Sparen sym_list -> sprintf "( %s )" (prod_to_str sym_list) | Sprod sym_list_list -> sprintf "[ %s ]" (String.concat " " (List.mapi (fun i r -> let prod = (prod_to_str r) in let sep = if i = 0 then "" else if prod <> "" then "| " else "|" in sprintf "%s%s" sep prod) sym_list_list)) | Sedit s -> sprintf "%s" s (* todo: make TAG info output conditional on the set of prods? *) | Sedit2 ("TAG", plugin) -> if plist then sprintf " (%s plugin)" plugin else sprintf " (* %s plugin *)" plugin | Sedit2 ("FILE", file) -> let file_suffix_regex = Str.regexp ".*/\\([a-zA-Z0-9_\\.]+\\)" in let suffix = if Str.string_match file_suffix_regex file 0 then Str.matched_group 1 file else file in if plist then sprintf " (%s)" suffix else sprintf " (* %s *)" suffix | Sedit2 (s, s2) -> sprintf "%s \"%s\"" s s2 and prod_to_str_r plist prod = match prod with | Sterm s :: Snterm "ident" :: tl when omit_space s && plist -> (sprintf "%s`ident`" s) :: (prod_to_str_r plist tl) | p :: tl -> let need_semi = match prod with | Snterm "IDENT" :: Sterm _ :: _ | Snterm "IDENT" :: Sprod _ :: _ -> true | _ -> false in (output_prod plist need_semi p) :: (prod_to_str_r plist tl) | [] -> [] and prod_to_str ?(plist=false) prod = String.concat " " (prod_to_str_r plist prod) (* Determine if 2 productions are equal ignoring Sedit and Sedit2 *) let ematch prod edit = let rec ematchr prod edit = (*Printf.printf "%s and\n %s\n\n" (prod_to_str prod) (prod_to_str edit);*) match (prod, edit) with | (_, Sedit _ :: tl) | (_, Sedit2 _ :: tl) -> ematchr prod tl | (Sedit _ :: tl, _) | (Sedit2 _ :: tl, _) -> ematchr tl edit | (phd :: ptl, hd :: tl) -> let m = match (phd, hd) with | (Slist1 psym, Slist1 sym) | (Slist0 psym, Slist0 sym) | (Sopt psym, Sopt sym) -> ematchr [psym] [sym] | (Slist1sep (psym, psep), Slist1sep (sym, sep)) | (Slist0sep (psym, psep), Slist0sep (sym, sep)) -> ematchr [psym] [sym] && ematchr [psep] [sep] | (Sparen psyml, Sparen syml) -> ematchr psyml syml | (Sprod psymll, Sprod symll) -> if List.compare_lengths psymll symll != 0 then false else List.fold_left (&&) true (List.map2 ematchr psymll symll) | _, _ -> phd = hd in m && ematchr ptl tl | ([], hd :: tl) -> false | (phd :: ptl, []) -> false | ([], []) -> true in (*Printf.printf "\n";*) let rv = ematchr prod edit in (*Printf.printf "%b\n" rv;*) rv let get_first m_prod prods = let rec find_first_r prods i = match prods with | [] -> raise Not_found | prod :: tl -> if ematch prod m_prod then i else find_first_r tl (i+1) in find_first_r prods 0 let find_first edit prods nt = try get_first edit prods with Not_found -> error "Can't find '%s' in edit for '%s'\n" (prod_to_str edit) nt; raise Not_found module DocGram = struct (* these guarantee that order and map have a 1-1 relationship on the nt name. They don't guarantee that nts on rhs of a production are defined, nor do they prohibit duplicate productions *) exception Duplicate exception Invalid let g_empty () = ref { map = NTMap.empty; order = [] } (* add an nt at the end (if not already present) then set its prods *) let g_maybe_add g nt prods = if not (NTMap.mem nt !g.map) then g := { !g with order = !g.order @ [nt] }; g := { !g with map = NTMap.add nt prods !g.map } (* add an nt at the beginning (if not already present) then set its prods *) let g_maybe_add_begin g nt prods = if not (NTMap.mem nt !g.map) then g := { !g with order = nt :: !g.order }; g := { !g with map = NTMap.add nt prods !g.map } (* reverse the order of the grammar *) let g_reverse g = g := { !g with order = List.rev !g.order } (* update the productions of an existing nt *) let g_update_prods g nt prods = ignore (NTMap.find nt !g.map); (* don't add the nt if it's not present *) g := { !g with map = NTMap.add nt prods !g.map } (* remove a non-terminal *) let g_remove g nt = g := { map = NTMap.remove nt !g.map; order = List.filter (fun elt -> elt <> nt) !g.order } (* rename an nt and update its prods, keeping its original position. If the new name already exists, include its prods *) let g_rename_merge g nt nt' nprods = let oprods = try let oprods = NTMap.find nt' !g.map in g := { !g with order = List.filter (fun elt -> elt <> nt') !g.order }; oprods with Not_found -> g := { !g with map = NTMap.add nt' [] !g.map }; [] in g := { map = NTMap.remove nt !g.map; order = List.map (fun n -> if n = nt then nt' else n) !g.order }; g_update_prods g nt' (oprods @ nprods) (* add a new nonterminal after "ins_after" None means insert at the beginning *) let g_add_after g ?(update=true) ins_after nt prods = if (not update) && NTMap.mem nt !g.map then raise Duplicate; (* don't update the nt if it's already present *) let rec insert_nt order res = match ins_after, order with | None, _ -> nt :: order | Some _, [] -> raise Not_found | Some ins_after_nt, hd :: tl -> if hd = ins_after_nt then (List.rev res) @ (hd :: nt :: tl) else insert_nt tl (hd :: res) in g := { order = insert_nt !g.order []; map = NTMap.add nt prods !g.map } let g_add_prod_after g ins_after nt prod = let prods = try NTMap.find nt !g.map with Not_found -> [] in if prods <> [] then g_update_prods g nt (prods @ [prod]) else g_add_after g ~update:true ins_after nt [prod] (* replace the map and order *) let g_reorder g map order = let order_nts = StringSet.of_list order in let map_nts = List.fold_left (fun res b -> let (nt, _) = b in StringSet.add nt res) StringSet.empty (NTMap.bindings map) in if List.length order <> NTMap.cardinal map || not (StringSet.equal order_nts map_nts) then raise Invalid; g := { order = order; map = map } end open DocGram let rec output_prodn = function | Sterm s -> let s = match s with | "|}" -> "%|%}" | "{|" -> "%{%|" | "`{" -> "`%{" | "@{" -> "@%{" | "|-" -> "%|-" | "|->" -> "%|->" | "||" -> "%||" | "|||" -> "%|||" | "||||" -> "%||||" | "{" | "}" | "|" -> "%" ^ s | _ -> s in sprintf "%s" s | Snterm s -> sprintf "@%s" s | Slist1 sym -> sprintf "{+ %s }" (output_prodn sym) | Slist1sep (sym, sep) -> sprintf "{+%s %s }" (output_sep sep) (output_prodn sym) | Slist0 sym -> sprintf "{* %s }" (output_prodn sym) | Slist0sep (sym, sep) -> sprintf "{*%s %s }" (output_sep sep) (output_prodn sym) | Sopt sym -> sprintf "{? %s }" (output_prodn sym) | Sparen sym_list -> sprintf "%s" (prod_to_prodn sym_list) | Sprod sym_list -> let lcurly, rcurly = if List.length sym_list = 1 then "", "" else "{| ", " }" in sprintf "%s%s%s" lcurly (String.concat " " (List.mapi (fun i r -> let prod = (prod_to_prodn r) in let sep = if i = 0 then "" else if prod <> "" then "| " else "|" in sprintf "%s%s" sep prod) sym_list)) rcurly | Sedit s -> sprintf "%s" s | Sedit2 ("TAG", s2) -> "" | Sedit2 (s, s2) -> sprintf "%s \"%s\"" s s2 and output_sep sep = match sep with | Sterm s -> sprintf "%s" s (* avoid escaping separator *) | _ -> output_prodn sep and prod_to_prodn_r prod = match prod with | Sterm s :: Snterm "ident" :: tl when omit_space s -> (sprintf "%s@ident" s) :: (prod_to_prodn_r tl) | p :: tl -> (output_prodn p) :: (prod_to_prodn_r tl) | [] -> [] and prod_to_prodn prod = String.concat " " (prod_to_prodn_r prod) let get_tag file prod = List.fold_left (fun rv sym -> match sym with (* todo: only Ltac2 and SSR for now, outside of their main chapters *) | Sedit2 ("TAG", "Ltac2") when file <> "doc/sphinx/proof-engine/ltac2.rst" -> " Ltac2" | Sedit2 ("TAG", "SSR") when file <> "doc/sphinx/proof-engine/ssreflect-proof-language.rst" -> " SSR" | _ -> rv ) "" prod let pr_prods nt prods = (* duplicative *) Printf.printf "%s: [\n" nt; List.iter (fun prod -> let str = prod_to_str ~plist:false prod in let pfx = if str = "" then "|" else "| " in Printf.printf "%s%s\n" pfx str) prods; Printf.printf "]\n\n" type fmt = [`MLG | `PRODLIST | `PRODN ] (* print a subset of the grammar with nts in the specified order *) let print_in_order out g fmt nt_order hide = List.iter (fun nt -> if not (StringSet.mem nt hide) then try let prods = NTMap.find nt !g.map in match fmt with | `MLG -> fprintf out "%s: [\n" nt; List.iter (fun prod -> let str = prod_to_str ~plist:false prod in let pfx = if str = "" then "|" else "| " in fprintf out "%s%s\n" pfx str) prods; fprintf out "]\n\n" | `PRODLIST -> fprintf out "%s :" nt; List.iteri (fun i prod -> if i > 0 then fprintf out "%s :" (String.make (String.length nt) ' '); let str = prod_to_str ~plist:true prod in let pfx = if str = "" then "" else " " in fprintf out "%s%s\n" pfx str) prods; | `PRODN -> fprintf out "\n%s:\n%s " nt nt; List.iteri (fun i prod -> let str = prod_to_prodn prod in let op = if i = 0 then "::=" else "+=" in fprintf out "%s %s\n" op str) prods; with Not_found -> error "Missing nt '%s' in print_in_order\n" nt) nt_order (*** Read grammar routines ***) let cvt_ext prod = let rec to_doc_sym = function | Ulist1 sym -> Slist1 (to_doc_sym sym) | Ulist1sep (sym, s) -> Slist1sep ((to_doc_sym sym), Sterm s) | Ulist0 sym -> Slist0 (to_doc_sym sym) | Ulist0sep (sym, s) -> Slist0sep ((to_doc_sym sym), Sterm s) | Uopt sym -> Sopt (to_doc_sym sym) | Uentry s -> Snterm s | Uentryl (s, i) -> Snterm (s ^ (string_of_int i)) in let from_ext = function | ExtTerminal s -> Sterm s | ExtNonTerminal (s, _) -> to_doc_sym s in List.map from_ext prod let keywords = ref StringSet.empty let rec cvt_gram_sym = function | GSymbString s -> Sterm s | GSymbQualid (s, level) -> Snterm (match level with | Some str -> s ^ str | None -> s) | GSymbParen l -> Sparen (cvt_gram_sym_list l) | GSymbProd ll -> let cvt = List.map cvt_gram_prod ll in (match cvt with | (Snterm x :: []) :: [] -> Snterm x | (Sterm x :: []) :: [] -> Sterm x | _ -> Sprod cvt) and cvt_gram_sym_list l = let get_sym = function | GSymbQualid (s, level) -> s | _ -> "" in match l with | GSymbQualid ("LIST0", _) :: s :: GSymbQualid ("SEP", _) :: sep :: tl -> Slist0sep (cvt_gram_sym s, cvt_gram_sym sep) :: cvt_gram_sym_list tl | GSymbQualid ("LIST1", _) :: s :: GSymbQualid ("SEP", _) :: sep :: tl -> Slist1sep (cvt_gram_sym s, cvt_gram_sym sep) :: cvt_gram_sym_list tl | GSymbQualid ("LIST0", _) :: s :: tl -> Slist0 (cvt_gram_sym s) :: cvt_gram_sym_list tl | GSymbQualid ("LIST1", _) :: s :: tl -> Slist1 (cvt_gram_sym s) :: cvt_gram_sym_list tl | GSymbQualid ("OPT", _) :: s :: tl -> Sopt (cvt_gram_sym s) :: cvt_gram_sym_list tl | GSymbQualid ("IDENT", _) :: s2 :: tl when get_sym s2 = "" -> cvt_gram_sym s2 :: cvt_gram_sym_list tl | GSymbQualid ("ADD_OPT", _) :: tl -> (Sedit "ADD_OPT") :: cvt_gram_sym_list tl | GSymbQualid ("NOTE", _) :: GSymbQualid (s2, l) :: tl -> (Sedit2 ("NOTE", s2)) :: cvt_gram_sym_list tl | GSymbQualid ("USE_NT", _) :: GSymbQualid (s2, l) :: tl -> (Sedit2 ("USE_NT", s2)) :: cvt_gram_sym_list tl | GSymbQualid ("TAG", _) :: GSymbQualid (s2, l) :: tl -> (Sedit2 ("TAG", s2)) :: cvt_gram_sym_list tl | GSymbQualid ("TAG", _) :: GSymbString (s2) :: tl -> (Sedit2 ("TAG", s2)) :: cvt_gram_sym_list tl | GSymbString s :: tl -> (* todo: not seeing "(bfs)" here for some reason *) keywords := StringSet.add s !keywords; cvt_gram_sym (GSymbString s) :: cvt_gram_sym_list tl | hd :: tl -> cvt_gram_sym hd :: cvt_gram_sym_list tl | [] -> [] and cvt_gram_prod p = List.concat (List.map (fun x -> let _, gs = x in cvt_gram_sym_list gs) p.gprod_symbs) let add_symdef nt file symdef_map = let ent = try StringMap.find nt !symdef_map with Not_found -> [] in symdef_map := StringMap.add nt (Filename.basename file::ent) !symdef_map let rec edit_SELF nt cur_level next_level right_assoc inner prod = let subedit sym = List.hd (edit_SELF nt cur_level next_level right_assoc true [sym]) in let len = List.length prod in List.mapi (fun i sym -> match sym with | Sterm _ -> sym | Snterm s when s = nt || s = "SELF"-> if inner then Snterm nt (* first level *) else if i = 0 then Snterm next_level else if i + 1 = len then (if right_assoc then Snterm cur_level else Snterm next_level) else Snterm nt | Snterm "NEXT" -> Snterm next_level | Snterm _ -> sym | Slist1 sym -> Slist1 (subedit sym) | Slist0 sym -> Slist0 (subedit sym) | Slist1sep (sym, sep) -> Slist1sep ((subedit sym), (subedit sep)) | Slist0sep (sym, sep) -> Slist0sep ((subedit sym), (subedit sep)) | Sopt sym -> Sopt (subedit sym) | Sparen syms -> Sparen (List.map (fun sym -> subedit sym) syms) | Sprod prods -> Sprod (List.map (fun prod -> edit_SELF nt cur_level next_level right_assoc true prod) prods) | Sedit _ -> sym | Sedit2 _ -> sym) prod let autoloaded_mlgs = [ (* productions from other mlgs are marked with TAGs *) "parsing/g_constr.mlg"; "parsing/g_prim.mlg"; "plugins/btauto/g_btauto.mlg"; "plugins/cc/g_congruence.mlg"; "plugins/firstorder/g_ground.mlg"; "plugins/ltac/coretactics.mlg"; "plugins/ltac/extraargs.mlg"; "plugins/ltac/extratactics.mlg"; "plugins/ltac/g_auto.mlg"; "plugins/ltac/g_class.mlg"; "plugins/ltac/g_eqdecide.mlg"; "plugins/ltac/g_ltac.mlg"; "plugins/ltac/g_obligations.mlg"; "plugins/ltac/g_rewrite.mlg"; "plugins/ltac/g_tactic.mlg"; "plugins/ltac/profile_ltac_tactics.mlg"; "plugins/rtauto/g_rtauto.mlg"; "plugins/syntax/g_number_string.mlg"; "toplevel/g_toplevel.mlg"; "vernac/g_proofs.mlg"; "vernac/g_vernac.mlg"; ] let has_match p prods = List.exists (fun p2 -> ematch p p2) prods let plugin_regex = Str.regexp "^plugins/\\([a-zA-Z0-9_]+\\)/" let level_regex = Str.regexp "[a-zA-Z0-9_]*$" let get_plugin_name file = if List.mem file ["plugins/ssr/ssrparser.mlg"; "plugins/ssr/ssrvernac.mlg"; "plugins/ssrmatching/g_ssrmatching.mlg"] then "SSR" else if Str.string_match plugin_regex file 0 then Str.matched_group 1 file else "" let read_mlg g is_edit ast file level_renames symdef_map = let res = ref [] in let locals = ref StringSet.empty in let dup_renames = ref StringMap.empty in let add_prods nt prods gramext_globals = if not is_edit then if NTMap.mem nt !g.map && not (List.mem nt gramext_globals) && nt <> "command" && nt <> "simple_tactic" then begin let new_name = String.uppercase_ascii (Filename.remove_extension (Filename.basename file)) ^ "_" ^ nt in dup_renames := StringMap.add nt new_name !dup_renames; if false then Printf.printf "** dup local sym %s -> %s in %s\n" nt new_name file end; add_symdef nt file symdef_map; let plugin = get_plugin_name file in let prods = if not is_edit && not (List.mem file autoloaded_mlgs) && plugin <> "" then List.map (fun p -> p @ [Sedit2 ("TAG", plugin)]) prods else prods in (* todo: doesn't yet work perfectly with SPLICE *) (* let prods = if not is_edit then List.map (fun p -> p @ [Sedit2 ("FILE", file)]) prods else prods in*) res := (nt, prods) :: !res in let prod_loop = function | GramExt grammar_ext -> let get_label = function | Some s -> s | None -> "" in let gramext_globals = ref grammar_ext.gramext_globals in List.iter (fun ent -> let pos, rules = match ent.gentry_rules with | GDataFresh (pos, r) -> (pos, r) | GDataReuse (lbl, r) -> let r = { grule_label = lbl; grule_assoc = None; grule_prods = r; } in (None, [r]) in let len = List.length rules in List.iteri (fun i rule -> let nt = ent.gentry_name in if not (List.mem nt !gramext_globals) then locals := StringSet.add nt !locals; let level = (get_label rule.grule_label) in let level = if level <> "" then level else match pos with | Some (Before lev) | Some (After lev) -> lev (* Looks like FIRST/LAST can be ignored for documenting the current grammar *) | _ -> "" in if len > 1 && level = "" then error "Missing level string for `%s`\n" nt else if not (Str.string_match level_regex level 0) then error "Invalid level string `%s` for `%s`\n" level nt; let cur_level = nt ^ level in let next_level = nt ^ if i+1 < len then (get_label (List.nth rules (i+1)).grule_label) else "" in let right_assoc = (rule.grule_assoc = Some RightA) in if i = 0 && cur_level <> nt && not (StringMap.mem nt !level_renames) then begin level_renames := StringMap.add nt cur_level !level_renames; end; let cvted = List.map cvt_gram_prod rule.grule_prods in (* edit names for levels *) (* See https://camlp5.github.io/doc/html/grammars.html#b:Associativity *) let edited = List.map (edit_SELF nt cur_level next_level right_assoc false) cvted in let prods_to_add = if cur_level <> nt && i+1 < len then edited @ [[Snterm next_level]] else edited in if cur_level <> nt && List.mem nt !gramext_globals then gramext_globals := cur_level :: !gramext_globals; add_prods cur_level prods_to_add !gramext_globals) rules ) grammar_ext.gramext_entries | VernacExt vernac_ext -> let node = match vernac_ext.vernacext_entry with | None -> "command" | Some c -> String.trim c.code in add_prods node (List.map (fun r -> cvt_ext r.vernac_toks) vernac_ext.vernacext_rules) [] | VernacArgumentExt vernac_argument_ext -> add_prods vernac_argument_ext.vernacargext_name (List.map (fun r -> cvt_ext r.tac_toks) vernac_argument_ext.vernacargext_rules) [] | TacticExt tactic_ext -> add_prods "simple_tactic" (List.map (fun r -> cvt_ext r.tac_toks) tactic_ext.tacext_rules) [] | ArgumentExt argument_ext -> add_prods argument_ext.argext_name (List.map (fun r -> cvt_ext r.tac_toks) argument_ext.argext_rules) [] | _ -> () in List.iter prod_loop ast; List.rev !res, !locals, !dup_renames let dir s = "doc/tools/docgram/" ^ s let read_mlg_edit file = let fdir = dir file in let level_renames = ref StringMap.empty in (* ignored *) let symdef_map = ref StringMap.empty in (* ignored *) let prods, _, _ = read_mlg (g_empty ()) true (parse_file fdir) fdir level_renames symdef_map in prods let add_rule g nt prods file = let ent = try NTMap.find nt !g.map with Not_found -> [] in let nodups = List.concat (List.map (fun prod -> if has_match prod ent then begin if !show_warn then warn "%s: Duplicate production '%s: %s'\n" file nt (prod_to_str prod); [] end else [prod]) prods) in g_maybe_add_begin g nt (ent @ nodups) let remove_Sedit2 p = List.filter (fun sym -> match sym with | Sedit2 _ -> false | _ -> true) p (* edit a production: rename nonterminals, drop nonterminals, substitute nonterminals *) let rec edit_prod g top edit_map prod = let edit_nt edit_map sym0 nt = try let binding = StringMap.find nt edit_map in match binding with | "DELETE" -> [] | "SPLICE" -> begin try let splice_prods = NTMap.find nt !g.map in match splice_prods with | [] -> error "Empty splice for '%s'\n" nt; [] | [p] -> List.rev (remove_Sedit2 p) | _ -> [Sprod (List.map remove_Sedit2 splice_prods)] (* todo? check if we create a dup *) with Not_found -> error "Missing nt '%s' for splice\n" nt; [Snterm nt] end | _ -> [Snterm binding] with Not_found -> [sym0] in let maybe_wrap syms = match syms with | s :: [] -> List.hd syms | s -> Sparen (List.rev syms) in let rec edit_symbol sym0 = match sym0 with | Sterm s -> [sym0] | Snterm s -> edit_nt edit_map sym0 s | Slist1 sym -> [Slist1 (maybe_wrap (edit_symbol sym))] (* you'll get a run-time failure deleting a SEP symbol *) | Slist1sep (sym, sep) -> [Slist1sep (maybe_wrap (edit_symbol sym), (List.hd (edit_symbol sep)))] | Slist0 sym -> [Slist0 (maybe_wrap (edit_symbol sym))] | Slist0sep (sym, sep) -> [Slist0sep (maybe_wrap (edit_symbol sym), (List.hd (edit_symbol sep)))] | Sopt sym -> [Sopt (maybe_wrap (edit_symbol sym))] | Sparen slist -> [Sparen (List.hd (edit_prod g false edit_map slist))] | Sprod slistlist -> let (_, prods) = edit_rule g edit_map "" slistlist in [Sprod prods] | Sedit _ | Sedit2 _ -> [sym0] (* these constructors not used here *) in let is_splice nt = try StringMap.find nt edit_map = "SPLICE" with Not_found -> false in let get_splice_prods nt = try NTMap.find nt !g.map with Not_found -> (error "Missing nt '%s' for splice\n" nt; []) in (* special case splice creating multiple new productions *) let splice_prods = match prod with | Snterm nt :: [] when is_splice nt -> get_splice_prods nt | Snterm nt :: Sedit2 ("TAG", _) :: [] when is_splice nt -> get_splice_prods nt | _ -> [] in if top && splice_prods <> [] then splice_prods else [List.rev (List.concat (List.rev (List.map (fun sym -> edit_symbol sym) prod)))] and edit_rule g edit_map nt rule = let nt = try let new_name = StringMap.find nt edit_map in match new_name with | "SPLICE" -> nt | "DELETE" -> "" | _ -> new_name with Not_found -> nt in (nt, (List.concat (List.map (edit_prod g true edit_map) rule))) let read_mlg_files g args symdef_map = let level_renames = ref StringMap.empty in let last_autoloaded = List.hd (List.rev autoloaded_mlgs) in List.iter (fun file -> (* todo: ??? does nt renaming, deletion and splicing *) let rules, locals, dup_renames = read_mlg g false (parse_file file) file level_renames symdef_map in let numprods = List.fold_left (fun num rule -> let nt, prods = rule in (* rename local duplicates *) let prods = List.map (fun prod -> List.hd (edit_prod g true dup_renames prod)) prods in let nt = try StringMap.find nt dup_renames with Not_found -> nt in (* if NTMap.mem nt !g.map && (StringSet.mem nt locals) &&*) (* StringSet.cardinal (StringSet.of_list (StringMap.find nt !symdef_map)) > 1 then*) (* warn "%s: local nonterminal '%s' already defined\n" file nt; (* todo: goes away *)*) add_rule g nt prods file; num + List.length prods) 0 rules in if args.verbose then begin Printf.eprintf "%s: %d nts, %d prods\n" file (List.length rules) numprods; if file = last_autoloaded then Printf.eprintf " Optionally loaded plugins:\n" end ) args.mlg_files; g_reverse g; !level_renames (* get the nt's in the production, preserving order, don't worry about dups *) let nts_in_prod prod = let rec traverse = function | Sterm s -> [] | Snterm s -> if List.mem s tokens then [] else [s] | Slist1 sym | Slist0 sym | Sopt sym -> traverse sym | Slist1sep (sym, sep) | Slist0sep (sym, sep) -> traverse sym @ (traverse sep) | Sparen sym_list -> List.concat (List.map traverse sym_list) | Sprod sym_list_list -> List.concat (List.map (fun l -> List.concat (List.map traverse l)) sym_list_list) | Sedit _ | Sedit2 _ -> [] in List.rev (List.concat (List.map traverse prod)) let get_refdef_nts g = let rec get_nts_r refd defd bindings = match bindings with | [] -> refd, defd | (nt, prods) :: tl -> get_nts_r (List.fold_left (fun res prod -> StringSet.union res (StringSet.of_list (nts_in_prod prod))) refd prods) (StringSet.add nt defd) tl in let toks = StringSet.of_list tokens in get_nts_r toks toks (NTMap.bindings !g.map) (*** global editing ops ***) let create_edit_map g op edits = let rec aux edits map = match edits with | [] -> map | edit :: tl -> let (key, binding) = edit in let all_nts_ref, all_nts_def = get_refdef_nts g in (match op with (* todo: messages should tell you which edit file causes the error *) | "SPLICE" -> if not (StringSet.mem key all_nts_def) then error "Undefined nt `%s` in SPLICE\n" key | "DELETE" -> if not (StringSet.mem key all_nts_ref || (StringSet.mem key all_nts_def)) then error "Unused/undefined nt `%s` in DELETE\n" key; | "RENAME" -> if not (StringSet.mem key all_nts_ref || (StringSet.mem key all_nts_def)) then error "Unused/undefined nt `%s` in RENAME\n" key; | "MERGE" -> if not (StringSet.mem key all_nts_ref || (StringSet.mem key all_nts_def)) then error "Unused/undefined nt `%s` in MERGE\n" key; if not (StringSet.mem binding all_nts_ref || (StringSet.mem binding all_nts_def)) then error "Unused/undefined nt `%s` in MERGE\n" key; (* todo: could not get the following code to type check (match binding with | _ :: Snterm new_nt :: _ -> if not (StringSet.mem new_nt all_nts_ref) then error "nt `%s` already exists in %s\n" new_nt op | _ -> ()) *) | _ -> ()); aux tl (StringMap.add key binding map) in aux edits StringMap.empty (* don't deal with Sedit, Sedit2 yet (ever?) *) let rec pmatch fullprod fullpat repl = let map_prod prod = List.concat (List.map (fun s -> pmatch [s] fullpat repl) prod) in let pmatch_wrap sym = let r = pmatch [sym] fullpat repl in match r with | a :: b :: tl -> Sparen r | [a] -> a | x -> error "pmatch: should not happen"; Sterm "??" in let symmatch_r s = let res = match s with | Slist1 sym -> Slist1 (pmatch_wrap sym) | Slist1sep (sym, sep) -> Slist1sep (pmatch_wrap sym, sep) | Slist0 sym -> Slist0 (pmatch_wrap sym) | Slist0sep (sym, sep) -> Slist0sep (pmatch_wrap sym, sep) | Sopt sym -> Sopt (pmatch_wrap sym) | Sparen prod -> Sparen (map_prod prod) | Sprod prods -> Sprod (List.map map_prod prods) | sym -> sym in (* Printf.printf "symmatch of %s gives %s\n" (prod_to_str [s]) (prod_to_str [res]);*) res in let rec pmatch_r prod pat match_start start_res res = (* Printf.printf "pmatch_r: prod = %s; pat = %s; res = %s\n" (prod_to_str prod) (prod_to_str pat) (prod_to_str res);*) match prod, pat with | _, [] -> let new_res = (List.rev repl) @ res in pmatch_r prod fullpat prod new_res new_res (* subst and continue *) | [], _ -> (List.rev ((List.rev match_start) @ res)) (* leftover partial match *) | hdprod :: tlprod, hdpat :: tlpat -> if hdprod = hdpat then pmatch_r tlprod tlpat match_start start_res res else (* match from the next starting position *) match match_start with | hd :: tl -> let new_res = (symmatch_r hd) :: start_res in pmatch_r tl fullpat tl new_res new_res | [] -> List.rev res (* done *) in pmatch_r fullprod fullpat fullprod [] [] (* global replace of production substrings, rhs only *) let global_repl g pat repl = List.iter (fun nt -> g_update_prods g nt (List.map (fun prod -> pmatch prod pat repl) (NTMap.find nt !g.map)) ) !g.order (*** splice: replace a reference to a nonterminal with its definition ***) (* todo: create a better splice routine *) let apply_splice g edit_map = List.iter (fun b -> let (nt0, prods0) = b in let rec splice_loop nt prods cnt = if cnt >= 10 then begin error "Splice for '%s' not done after %d iterations. Current value is:\n" nt0 cnt; List.iter (fun prod -> Printf.eprintf " %s\n" (prod_to_str prod)) prods; (nt, prods) end else begin let (nt', prods') = edit_rule g edit_map nt prods in if nt' = nt && prods' = prods then (nt, prods) else splice_loop nt' prods' (cnt+1) end in let (nt', prods') = splice_loop nt0 prods0 0 in g_update_prods g nt' prods') (NTMap.bindings !g.map); List.iter (fun b -> let (nt, op) = b in match op with | "DELETE" | "SPLICE" -> g_remove g nt; | _ -> ()) (StringMap.bindings edit_map) let remove_prod edit prods nt = let res, got_first = List.fold_left (fun args prod -> let res, got_first = args in if not got_first && ematch prod edit then res, true else prod :: res, got_first) ([], false) prods in if not got_first then error "Can't find '%s' to DELETE for '%s'\n" (prod_to_str edit) nt; List.rev res let insert_after posn insert prods = List.concat (List.mapi (fun i prod -> if i = posn then prod :: insert else [prod]) prods) (*** replace LIST*, OPT with new nonterminals ***) (* generate a non-terminal name for a replacement *) let nt_regex = Str.regexp "^[a-zA-Z_][a-zA-Z0-9_\\.]*$" let good_name name = if Str.string_match nt_regex name 0 then name else "" let map_name s = let s = match s with | "|" -> "or" | "!" -> "exclam" | ">" -> "gt" | "<" -> "lt" | "+" -> "plus" | "?" -> "qmark" | "}" -> "rbrace" | "," -> "comma" | ";" -> "semi" | _ -> s in good_name s let rec gen_nt_name sym = let name_from_prod prod = let rec aux name sterm_name prod = if name <> "" then name else match prod with | [] -> sterm_name | Sterm s :: tl | Snterm s :: tl -> if good_name s <> "" then aux (map_name s) sterm_name tl else aux name (map_name s) tl; | sym :: tl-> aux (gen_nt_name sym) sterm_name tl in aux "" "" prod in let name = match sym with | Sterm s -> map_name s | Snterm s -> s | Slist1 sym | Slist1sep (sym, _) | Slist0 sym | Slist0sep (sym, _) | Sopt sym -> gen_nt_name sym | Sparen slist -> name_from_prod slist | Sprod slistlist -> name_from_prod (List.hd slistlist) | _ -> "" in good_name name (* create a new nt for LIST* or OPT with the specified name *) let maybe_add_nt g insert_after name sym queue = let empty = [Snterm "empty"] in let maybe_unwrap ?(multi=false) sym = match sym with | Sprod slist when List.length slist = 1 || multi -> slist | Sparen slist -> [ slist ] | _ -> [ [sym] ] in let unw sym = List.hd (maybe_unwrap sym) in let get_prods nt = match sym with | Slist1 sym -> let sym' = unw sym in [ [Snterm nt] @ sym'; sym' ] | Slist1sep (sym, sep) | Slist0sep (sym, sep) -> let sym' = unw sym in [ [Snterm nt; sep] @ sym'; sym' ] | Slist0 sym -> [ [Snterm nt] @ (unw sym); empty ] | Sopt sym -> (maybe_unwrap ~multi:true sym) @ [ empty ] | Sprod slistlist -> slistlist | _ -> [] in let is_slist0sep sym = match sym with | Slist0sep _ -> true | _ -> false in (* find an existing nt with an identical definition, or generate an unused nt name *) let rec find_name nt i = let trial_name = sprintf "%s%s" nt (if i = 1 then "" else string_of_int i) in try if NTMap.find trial_name !g.map = get_prods trial_name then trial_name else find_name nt (succ i) with Not_found -> trial_name in let list_name sep = match sep with | Sterm s -> let name = map_name s in if name = s then "_list" else "_list_" ^ name | _ -> "_list" in let nt = name ^ match sym with | Slist1 sym -> "_list" | Slist1sep (sym, sep) -> list_name sep | Slist0 sym -> "_list_opt" | Slist0sep (sym, sep) -> list_name sep (* special handling *) | Sopt sym -> "_opt" | Sprod slistlist -> "_alt" | _ -> (error "Invalid symbol for USE_NT for nt '%s'\n" name; "ERROR") in let base_nt = find_name nt 1 in let new_nt = if is_slist0sep sym then base_nt ^ "_opt" else base_nt in if not (NTMap.mem new_nt !g.map) then begin let prods = if is_slist0sep sym then [ [Snterm base_nt]; empty ] else get_prods base_nt in g_add_after g (Some !insert_after) new_nt prods; insert_after := new_nt; Queue.add new_nt queue end; if is_slist0sep sym && not (NTMap.mem base_nt !g.map) then begin match sym with | Slist0sep (sym, sep) -> let prods = get_prods base_nt in g_add_after g (Some !insert_after) base_nt prods; insert_after := base_nt; Queue.add base_nt queue | _ -> () end; new_nt let apply_merge g edit_map = List.iter (fun b -> let (from_nt, to_nt) = b in let from_prods = NTMap.find from_nt !g.map in List.iter (fun prod -> try ignore( get_first prod (NTMap.find to_nt !g.map)); with Not_found -> g_add_prod_after g None to_nt prod) from_prods) (NTMap.bindings edit_map) let apply_rename_delete g edit_map = List.iter (fun b -> let (nt, _) = b in let prods = try NTMap.find nt !g.map with Not_found -> [] in let (nt', prods') = edit_rule g edit_map nt prods in if nt' = "" then g_remove g nt else if nt <> nt' then g_rename_merge g nt nt' prods' else g_update_prods g nt prods') (NTMap.bindings !g.map) let edit_all_prods g op eprods = let do_it op eprods num = let rec aux eprods res = match eprods with | [] -> res | [Snterm old_nt; Snterm new_nt] :: tl when num = 2 -> aux tl ((old_nt, new_nt) :: res) | [Snterm old_nt] :: tl when num = 1 -> aux tl ((old_nt, op) :: res) | eprod :: tl -> error "Production '%s: %s' must have only %d nonterminal(s)\n" op (prod_to_str eprod) num; aux tl res in let edit_map = create_edit_map g op (aux eprods []) in match op with | "SPLICE" -> apply_splice g edit_map | "MERGE" -> apply_merge g edit_map; apply_rename_delete g edit_map | "RENAME" | "DELETE" -> apply_rename_delete g edit_map | _ -> () in match op with | "RENAME" -> do_it op eprods 2; true | "DELETE" -> do_it op eprods 1; true | "SPLICE" -> do_it op eprods 1; true | "MERGE" -> do_it op eprods 2; true | "OPTINREF" -> List.iter (fun nt -> let prods = NTMap.find nt !g.map in if has_match [] prods then begin let prods' = remove_prod [] prods nt in g_update_prods g nt prods'; global_repl g [(Snterm nt)] [(Sopt (Snterm nt))] end) !g.order; true | _ -> false let edit_single_prod g edit0 prods nt = let rec edit_single_prod_r edit prods nt seen = match edit with | [] -> prods | Sedit "ADD_OPT" :: sym :: tl -> let prods' = (try let pfx = List.rev seen in let posn = find_first edit0 prods nt in let prods = insert_after posn [pfx @ (Sopt sym :: tl)] prods in let prods = remove_prod (pfx @ (sym :: tl)) prods nt in remove_prod (pfx @ tl) prods nt with Not_found -> prods) in edit_single_prod_r tl prods' nt seen | Sedit "ADD_OPT" :: [] -> error "Bad position for ADD_OPT\n"; prods | Sedit2 ("USE_NT", name) :: sym :: tl -> let prods' = (try let nt = maybe_add_nt g (ref nt) name sym (Queue.create ()) in let pfx = List.rev seen in let posn = find_first edit0 prods nt in let prods = insert_after posn [pfx @ (Snterm nt :: tl)] prods in remove_prod (pfx @ (sym :: tl)) prods nt with Not_found -> prods) in edit_single_prod_r tl prods' nt seen | Sedit2 ("USE_NT", _) :: [] -> error "Bad position for USE_NT\n"; prods | sym :: tl -> edit_single_prod_r tl prods nt (sym :: seen) in edit_single_prod_r edit0 prods nt [] let report_undef_nts g prod rec_nt = let nts = nts_in_prod prod in List.iter (fun nt -> if not (NTMap.mem nt !g.map) && not (List.mem nt tokens) && nt <> rec_nt then error "Undefined nonterminal `%s` in edit: %s\n" nt (prod_to_str prod)) nts let apply_edit_file g edits = let moveto src_nt dest_nt oprod prods = g_add_prod_after g (Some src_nt) dest_nt oprod; remove_prod oprod prods src_nt (* remove orig prod *) in List.iter (fun b -> let (nt, eprod) = b in if not (edit_all_prods g nt eprod) then begin let rec aux eprod prods add_nt = match eprod with | [] -> prods, add_nt | (Snterm "DELETE" :: oprod) :: tl -> aux tl (remove_prod oprod prods nt) add_nt | (Snterm "DELETENT" :: _) :: tl -> (* note this doesn't remove references *) if not (NTMap.mem nt !g.map) then error "DELETENT for undefined nonterminal `%s`\n" nt; g_remove g nt; aux tl prods false | (Snterm "MOVETO" :: Snterm dest_nt :: oprod) :: tl -> let prods = try (* add "nt -> dest_nt" production *) let posn = find_first oprod prods nt in if List.mem [Snterm dest_nt] prods then prods else insert_after posn [[Snterm dest_nt]] prods (* insert new prod *) with Not_found -> prods in let prods' = moveto nt dest_nt oprod prods in aux tl prods' add_nt | [Snterm "COPYALL"; Snterm dest_nt] :: tl -> if NTMap.mem dest_nt !g.map then error "COPYALL target nonterminal `%s` already exists\n" dest_nt; g_maybe_add g dest_nt prods; aux tl prods add_nt | [Snterm "MOVEALLBUT"; Snterm dest_nt] :: tl -> List.iter (fun tlprod -> if not (List.mem tlprod prods) then error "MOVEALLBUT for %s can't find '%s'\n" nt (prod_to_str tlprod)) tl; let prods' = List.fold_left (fun prods oprod -> if not (List.mem oprod tl) then begin moveto nt dest_nt oprod prods end else prods) prods prods in prods', add_nt | (Snterm "OPTINREF" :: _) :: tl -> if not (has_match [] prods) then error "OPTINREF but no empty production for %s\n" nt; global_repl g [(Snterm nt)] [(Sopt (Snterm nt))]; aux tl (remove_prod [] prods nt) add_nt | (Snterm "INSERTALL" :: syms) :: tl -> aux tl (List.map (fun p -> syms @ p) prods) add_nt | (Snterm "APPENDALL" :: syms) :: tl -> aux tl (List.map (fun p -> p @ syms) prods) add_nt | (Snterm "PRINT" :: _) :: tl -> pr_prods nt prods; aux tl prods add_nt | (Snterm "EDIT" :: oprod) :: tl -> aux tl (edit_single_prod g oprod prods nt) add_nt | (Snterm "REPLACE" :: oprod) :: (Snterm "WITH" :: rprod) :: tl -> report_undef_nts g rprod ""; (* todo: check result not already present *) let prods' = (try let posn = find_first oprod prods nt in let prods = insert_after posn [rprod] prods in (* insert new prod *) remove_prod oprod prods nt (* remove orig prod *) with Not_found -> prods) in aux tl prods' add_nt | (Snterm "REPLACE" :: _ as eprod) :: tl -> error "Missing WITH after '%s' in '%s'\n" (prod_to_str eprod) nt; aux tl prods add_nt (* todo: check for unmatched editing keywords here *) | prod :: tl -> (* add a production *) if has_match prod prods then error "Duplicate production '%s' for %s\n" (prod_to_str prod) nt; report_undef_nts g prod nt; aux tl (prods @ [prod]) add_nt in let prods, add_nt = aux eprod (try NTMap.find nt !g.map with Not_found -> []) true in if add_nt then g_maybe_add g nt prods end) edits (*** main routines ***) (* get the special tokens in the grammar *) let print_special_tokens g = let rec traverse set = function | Sterm s -> let c = s.[0] in if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') then set else StringSet.add s set | Snterm s -> set | Slist1 sym | Slist0 sym | Sopt sym -> traverse set sym | Slist1sep (sym, sep) | Slist0sep (sym, sep) -> traverse (traverse set sym) sep | Sparen sym_list -> traverse_prod set sym_list | Sprod sym_list_list -> traverse_prods set sym_list_list | Sedit _ | Sedit2 _ -> set and traverse_prod set prod = List.fold_left traverse set prod and traverse_prods set prods = List.fold_left traverse_prod set prods in let spec_toks = List.fold_left (fun set b -> let nt, prods = b in traverse_prods set prods) StringSet.empty (NTMap.bindings !g.map) in Printf.printf "Special tokens:"; StringSet.iter (fun t -> Printf.printf " %s" t) spec_toks; Printf.printf "\n\n" (* get the transitive closure of a non-terminal excluding "stops" symbols. Preserve ordering to the extent possible *) (* todo: at the moment, the code doesn't use the ordering; consider switching to using sets instead of lists *) let nt_closure g start stops = let stop_set = StringSet.of_list stops in let rec nt_closure_r res todo = match todo with | [] -> res | nt :: tl -> if List.mem nt res || StringSet.mem nt stop_set then nt_closure_r res tl else begin let more_to_do = try let prods = NTMap.find nt !g.map in tl @ (List.concat (List.map nts_in_prod prods)) with Not_found -> tl in nt_closure_r (nt :: res) more_to_do end in List.rev (nt_closure_r [] [start]) let header = "--------------------------------------------" let nt_subset_in_orig_order g nts = let subset = StringSet.of_list nts in List.filter (fun nt -> StringSet.mem nt subset) !g.order let index_of str list = let rec index_of_r str list index = match list with | [] -> None | hd :: list -> if hd = str then Some index else index_of_r str list (index+1) in index_of_r str list 0 exception IsNone (* todo: raise exception for bad n? *) let rec nthcdr n list = if n <= 0 then list else nthcdr (n-1) (List.tl list) let pfx n list = let rec pfx_r n res = function | item :: tl -> if n < 0 then res else pfx_r (n-1) (item :: res) tl | [] -> res in List.rev (pfx_r n [] list) (* todo: adjust Makefile to include Option.ml/mli *) let get_opt = function | Some y -> y | _ -> raise IsNone let get_range g start end_ = let starti, endi = get_opt (index_of start !g.order), get_opt (index_of end_ !g.order) in pfx (endi - starti) (nthcdr starti !g.order) let get_rangeset g start end_ = StringSet.of_list (get_range g start end_) let check_range_consistency g start end_ = let defined_list = get_range g start end_ in let defined = StringSet.of_list defined_list in let referenced = List.fold_left (fun set nt -> let prods = NTMap.find nt !g.map in let refs = List.concat (List.map nts_in_prod prods) in StringSet.union set (StringSet.of_list refs)) StringSet.empty defined_list in let undef = StringSet.diff referenced defined in let unused = StringSet.diff defined referenced in if StringSet.cardinal unused > 0 || (StringSet.cardinal undef > 0) then begin Printf.printf "\nFor range '%s' to '%s':\n External reference:" start end_; StringSet.iter (fun nt -> Printf.printf " %s" nt) undef; Printf.printf "\n"; if StringSet.cardinal unused > 0 then begin Printf.printf " Unreferenced:"; StringSet.iter (fun nt -> Printf.printf " %s" nt) unused; Printf.printf "\n" end end (* print info on symbols with a single production of a single nonterminal *) let check_singletons g = NTMap.iter (fun nt prods -> if List.length prods = 1 && !show_warn then if List.length (remove_Sedit2 (List.hd prods)) = 1 then warn "Singleton non-terminal, maybe SPLICE?: %s\n" nt else (*warn "Single production, maybe SPLICE?: %s\n" nt*) ()) !g.map let report_bad_nts g file = let all_nts_ref, all_nts_def = get_refdef_nts g in let undef = StringSet.diff all_nts_ref all_nts_def in if !show_warn then List.iter (fun nt -> warn "%s: Undefined symbol '%s'\n" file nt) (StringSet.elements undef); let reachable = List.fold_left (fun res sym -> StringSet.union res (StringSet.of_list (nt_closure g sym []))) StringSet.empty start_symbols in let unreachable = List.filter (fun nt -> not (StringSet.mem nt reachable)) !g.order in if !show_warn then List.iter (fun nt -> warn "%s: Unreachable symbol '%s'\n" file nt) unreachable let report_info g symdef_map = let num_prods = List.fold_left (fun sum nt -> let prods = NTMap.find nt !g.map in sum + (List.length prods)) 0 !g.order in Printf.eprintf "\nstart symbols: %s\n" (String.concat " " start_symbols); Printf.eprintf "%d nonterminals defined, %d productions\n" (NTMap.cardinal !g.map) num_prods; Printf.eprintf "%d terminals\n" (List.length tokens); Printf.eprintf "\nSymbols with multiple definition points in *.mlg:\n"; let bindings = List.sort (fun a b -> let (ak, _) = a and (bk, _) = b in String.compare ak bk) (StringMap.bindings symdef_map) in List.iter (fun b -> let (k, v) = b in if List.length v > 1 then begin Printf.eprintf " %s: " k; List.iter (fun f -> Printf.eprintf "%s " f) v; Printf.eprintf "\n" end) bindings; Printf.eprintf "\n" [@@@ocaml.warning "-32"] let rec dump prod = match prod with | hd :: tl -> let s = (match hd with | Sterm s -> sprintf "Sterm %s" s | Snterm s -> sprintf "Snterm \"%s\"" s | Slist1 sym -> "Slist1" | Slist0 sym -> "Slist0" | Sopt sym -> "Sopt" | Slist1sep _ -> "Slist1sep" | Slist0sep _ -> "Slist0sep" | Sparen sym_list -> "Sparen" | Sprod sym_list_list -> "Sprod" | Sedit _ -> "Sedit" | Sedit2 _ -> "Sedit2") in Printf.printf "%s " s; dump tl | [] -> Printf.printf "\n" [@@@ocaml.warning "+32"] let reorder_grammar eg reordered_rules file = let og = g_empty () in List.iter (fun rule -> let nt, prods = rule in try (* only keep nts and prods in common with editedGrammar *) let eg_prods = NTMap.find nt !eg.map in let prods = List.filter (fun prod -> (has_match prod eg_prods)) prods in if NTMap.mem nt !og.map && !show_warn then warn "%s: Duplicate nonterminal '%s'\n" file nt; add_rule og nt prods file with Not_found -> ()) reordered_rules; g_reverse og; (* insert a prod in a list after prev_prod (None=at the beginning) *) let rec insert_prod prev_prod prod prods res = match prev_prod, prods with | None, _ -> prod :: prods | Some _, [] -> raise Not_found | Some ins_after_prod, hd :: tl -> if ematch hd ins_after_prod then (List.rev res) @ (hd :: prod :: tl) else insert_prod prev_prod prod tl (hd :: res) in (* insert prods that are not already in og_prods *) let rec upd_prods prev_prod eg_prods og_prods = match eg_prods with | [] -> og_prods | prod :: tl -> let og_prods = if has_match prod og_prods then List.map (fun p -> if ematch p prod then prod else p) og_prods else insert_prod prev_prod prod og_prods [] in upd_prods (Some prod) tl og_prods in (* add nts and prods not present in orderedGrammar *) let _ = List.fold_left (fun prev_nt nt -> let e_prods = NTMap.find nt !eg.map in if not (NTMap.mem nt !og.map) then g_add_after og prev_nt nt e_prods else g_update_prods og nt (upd_prods None e_prods (NTMap.find nt !og.map)); Some nt) None !eg.order in g_reorder eg !og.map !og.order let finish_with_file old_file args = let files_eq f1 f2 = let chunksize = 8192 in (try let ofile = open_in_bin f1 in let nfile = open_in_bin f2 in let rv = if (in_channel_length ofile) <> (in_channel_length nfile) then false else begin let obuf = Bytes.create chunksize in Bytes.fill obuf 0 chunksize '\x00'; let nbuf = Bytes.create chunksize in Bytes.fill nbuf 0 chunksize '\x00'; let rec read () = let olen = input ofile obuf 0 chunksize in let _ = input nfile nbuf 0 chunksize in if obuf <> nbuf then false else if olen = 0 then true else read () in read () end in close_in ofile; close_in nfile; rv with Sys_error _ -> false) in let temp_file = (old_file ^ ".new") in if !exit_code <> 0 then Sys.remove temp_file else if args.verify then begin if not (files_eq old_file temp_file) then begin error "%s is not current\n" old_file; ignore (CUnix.sys_command "diff" [ old_file ; old_file ^ ".new"]) end; Sys.remove temp_file end else if args.update then Sys.rename temp_file old_file let open_temp_bin file = open_out_bin (sprintf "%s.new" file) let match_cmd_regex = Str.regexp "[a-zA-Z0-9_ ]+" let match_subscripts = Str.regexp "__[a-zA-Z0-9]+" let remove_subscrs str = Str.global_replace match_subscripts "" str let find_longest_match prods str = let get_pfx str = String.trim (if Str.string_match match_cmd_regex str 0 then Str.matched_string str else "") in let prods = StringSet.fold (fun a lst -> a :: lst) prods [] in (* todo: wasteful! *) let common_prefix_len s1 s2 = let limit = min (String.length s1) (String.length s2) in let rec aux off = if off = limit then off else if s1.[off] = s2.[off] then aux (succ off) else off in aux 0 in let slen = String.length str in let str_pfx = get_pfx str in let no_subscrs = remove_subscrs str in let has_subscrs = no_subscrs <> str in let rec longest best multi best_len prods = match prods with | [] -> best, multi, best_len | prod :: tl -> let pstr = String.trim prod in (* todo: should be pretrimmed *) let clen = common_prefix_len str pstr in if has_subscrs && no_subscrs = pstr then str, false, clen (* exact match ignoring subscripts *) else if pstr = str then pstr, false, clen (* exact match of full line *) else if str_pfx = "" || str_pfx <> get_pfx pstr then longest best multi best_len tl (* prefixes don't match *) else if clen = slen && slen = String.length pstr then pstr, false, clen (* exact match on prefix *) else if clen > best_len then longest pstr false clen tl (* better match *) else if clen = best_len then longest best true best_len tl (* 2nd match with same length *) else longest best multi best_len tl (* worse match *) in let mtch, multi, _ = longest "" false 0 prods in if has_subscrs && mtch <> str then "", multi, mtch (* no match for subscripted entry *) else mtch, multi, "" type seen = { nts: (string * int) NTMap.t; tacs: (string * int) NTMap.t; tacvs: (string * int) NTMap.t; cmds: (string * int) NTMap.t; cmdvs: (string * int) NTMap.t; } (* Sphinx notations can't handle empty productions *) let has_empty_prod rhs = let rec has_empty_prod_r rhs = match rhs with | [] -> false | Sterm _ :: tl | Snterm _ :: tl | Sedit _ :: tl | Sedit2 (_, _) :: tl -> has_empty_prod_r tl | Slist1 sym :: tl | Slist0 sym :: tl | Slist1sep (sym, _) :: tl | Slist0sep (sym, _) :: tl | Sopt sym :: tl -> has_empty_prod_r [ sym ] || has_empty_prod_r tl | Sparen prod :: tl -> List.length prod = 0 || has_empty_prod_r tl | Sprod prods :: tl -> List.fold_left (fun rv prod -> List.length prod = 0 || has_empty_prod_r tl || rv) false prods in List.length rhs = 0 || has_empty_prod_r rhs let process_rst g file args seen tac_prods cmd_prods = let old_rst = open_in file in let new_rst = open_temp_bin file in let linenum = ref 0 in let dir_regex = Str.regexp "^\\([ \t]*\\)\\.\\.[ \t]*\\([a-zA-Z0-9:]* *\\)\\(.*\\)" in let contin_regex = Str.regexp "^\\([ \t]*\\)\\(.*\\)" in let ip_args_regex = Str.regexp "^[ \t]*\\([a-zA-Z0-9_\\.]*\\)[ \t]*\\([a-zA-Z0-9_\\.]*\\)" in let blank_regex = Str.regexp "^[ \t]*$" in let end_prodlist_regex = Str.regexp "^[ \t]*$" in let getline () = let line = input_line old_rst in incr linenum; line in (* todo: maybe pass end_index? *) let output_insertprodn start_index end_ indent = let rec copy_prods list = match list with | [] -> () | nt :: tl -> (try let (prev_file, prev_linenum) = NTMap.find nt !seen.nts in if !show_warn then warn "%s line %d: '%s' already included at %s line %d\n" file !linenum nt prev_file prev_linenum; with Not_found -> seen := { !seen with nts = (NTMap.add nt (file, !linenum) !seen.nts)} ); let prods = NTMap.find nt !g.map in List.iteri (fun i prod -> let rhs = String.trim (prod_to_prodn prod) in let tag = get_tag file prod in let sep = if i = 0 then " ::=" else "|" in if has_empty_prod prod then error "%s line %d: Empty (sub-)production for %s, edit to remove: '%s %s'\n" file !linenum nt sep rhs; fprintf new_rst "%s %s%s %s%s\n" indent (if i = 0 then nt else "") sep rhs tag) prods; if nt <> end_ then copy_prods tl in copy_prods (nthcdr start_index !g.order) in let process_insertprodn line rhs = if not (Str.string_match ip_args_regex rhs 0) then error "%s line %d: bad arguments '%s' for 'insertprodn'\n" file !linenum rhs else begin let start = Str.matched_group 1 rhs in let end_ = Str.matched_group 2 rhs in let start_index = index_of start !g.order in let end_index = index_of end_ !g.order in if start_index = None then error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum start; if end_index = None then error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum end_; (* if start_index <> None && end_index <> None then*) (* check_range_consistency g start end_;*) match start_index, end_index with | Some start_index, Some end_index -> if start_index > end_index then error "%s line %d: '%s' must appear before '%s' in orderedGrammar\n" file !linenum start end_ else begin try let line2 = getline() in if not (Str.string_match blank_regex line2 0) then error "%s line %d: expecting a blank line after 'insertprodn'\n" file !linenum else begin let line3 = getline() in if not (Str.string_match dir_regex line3 0) || (String.trim (Str.matched_group 2 line3)) <> "prodn::" then error "%s line %d: expecting '.. prodn::' after 'insertprodn'\n" file !linenum else begin let indent = Str.matched_group 1 line3 in let rec skip_to_end () = let endline = getline() in if Str.string_match end_prodlist_regex endline 0 then begin fprintf new_rst "%s\n\n%s\n" line line3; output_insertprodn start_index end_ indent; fprintf new_rst "%s\n" endline end else skip_to_end () in skip_to_end () end end with End_of_file -> error "%s line %d: unexpected end of file\n" file !linenum; end | _ -> () end in let cmd_exclude_files = [ "doc/sphinx/proof-engine/ssreflect-proof-language.rst"; "doc/sphinx/proof-engine/tactics.rst"; ] in let save_n_get_more direc pfx first_rhs seen_map prods = let replace rhs prods = if StringSet.is_empty prods || (List.mem file cmd_exclude_files) then rhs (* no change *) else let mtch, multi, best = find_longest_match prods rhs in (* Printf.printf "mtch = '%s' rhs = '%s'\n" mtch rhs;*) if mtch = rhs then rhs (* no change *) else if mtch = "" then begin error "%s line %d: NO MATCH for `%s`\n" file !linenum rhs; if best <> "" then begin Printf.eprintf " closest match is: `%s`\n" best; Printf.eprintf " Please update the rst manually while preserving any subscripts, e.g. 'NT__sub'\n" end; rhs end else if multi then begin error "%s line %d: MULTIPLE MATCHES for `%s`\n" file !linenum rhs; Printf.eprintf " Please update the rst manually while preserving any subscripts, e.g. 'NT__sub'\n"; rhs end else mtch (* update cmd/tacn *) in let map = ref seen_map in if NTMap.mem first_rhs !map && !show_warn then warn "%s line %d: Repeated %s: '%s'\n" file !linenum direc first_rhs; (* if not (StringSet.mem rhs seen_map) then*) (* warn "%s line %d: Unknown tactic: '%s'\n" file !linenum rhs;*) fprintf new_rst "%s%s\n" pfx (replace first_rhs prods); map := NTMap.add (remove_subscrs first_rhs) (file, !linenum) !map; while let nextline = getline() in ignore (Str.string_match contin_regex nextline 0); let indent = Str.matched_group 1 nextline in let rhs = Str.matched_group 2 nextline in let replaceable = rhs <> "" && rhs.[0] <> ':' in let upd_rhs = if replaceable then (replace rhs prods) else rhs in fprintf new_rst "%s%s\n" indent upd_rhs; if replaceable then begin map := NTMap.add rhs (file, !linenum) !map end; rhs <> "" do () done; !map in try while true do let line = getline() in if Str.string_match dir_regex line 0 then begin let dir = String.trim (Str.matched_group 2 line) in let rhs = Str.matched_group 3 line in let pfx = String.sub line 0 (Str.group_end 2) in match dir with | "prodn::" -> if rhs = "coq" && !show_warn then warn "%s line %d: Missing 'insertprodn' before 'prodn:: coq'\n" file !linenum; fprintf new_rst "%s\n" line; | "tacn::" when args.check_tacs -> seen := { !seen with tacs = save_n_get_more "tacn" pfx rhs !seen.tacs tac_prods } | "tacv::" when args.check_tacs -> seen := { !seen with tacvs = save_n_get_more "tacv" pfx rhs !seen.tacvs StringSet.empty } | "cmd::" when args.check_cmds -> seen := { !seen with cmds = save_n_get_more "cmd" pfx rhs !seen.cmds cmd_prods } | "cmdv::" when args.check_cmds -> seen := { !seen with cmdvs = save_n_get_more "cmdv" pfx rhs !seen.cmdvs StringSet.empty } | "insertprodn" -> process_insertprodn line rhs | _ -> fprintf new_rst "%s\n" line end else fprintf new_rst "%s\n" line; done with End_of_file -> (); close_in old_rst; close_out new_rst; finish_with_file file args let report_omitted_prods g seen label split = if !show_warn then begin let included = try List.map (fun prod -> match prod with | Snterm nt :: tl -> nt | _ -> "") (NTMap.find "NOTINRSTS" !g.map) with Not_found -> [] in Printf.printf "\n\n"; let missing = NTMap.filter (fun nt _ -> not (NTMap.mem nt seen || (List.mem nt included))) !g.map in NTMap.iter (fun nt _ -> warn "%s %s not included in .rst files\n" "Nonterminal" nt) missing; let total = NTMap.cardinal missing in if total <> 0 then Printf.eprintf "TOTAL %ss not included = %d\n" label total end let process_grammar args = let symdef_map = ref StringMap.empty in let g = g_empty () in let level_renames = read_mlg_files g args symdef_map in if args.verbose then begin Printf.printf "Keywords:\n"; StringSet.iter (fun kw -> Printf.printf "%s " kw) !keywords; Printf.printf "\n\n"; end; (* rename nts with levels *) List.iter (fun b -> let (nt, prod) = b in let (_, prod) = edit_rule g level_renames nt prod in g_update_prods g nt prod) (NTMap.bindings !g.map); (* print the full grammar with minimal editing *) let out = open_temp_bin (dir "fullGrammar") in fprintf out "%s\n%s\n\n" "(* Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *)" "DOC_GRAMMAR"; print_in_order out g `MLG !g.order StringSet.empty; close_out out; finish_with_file (dir "fullGrammar") args; if args.verbose then print_special_tokens g; if not args.fullGrammar then begin (* do shared edits *) if !exit_code = 0 then begin let common_edits = read_mlg_edit "common.edit_mlg" in apply_edit_file g common_edits end; let prodn_gram = ref { map = !g.map; order = !g.order } in (* todo: should just be 'g', right? *) if !exit_code = 0 && not args.verify then begin let out = open_temp_bin (dir "editedGrammar") in fprintf out "%s\n%s\n\n" "(* Edited Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *)" "DOC_GRAMMAR"; print_in_order out g `MLG !g.order StringSet.empty; close_out out; finish_with_file (dir "editedGrammar") args; report_bad_nts g "editedGrammar" end; if !exit_code = 0 then begin let ordered_grammar = read_mlg_edit "orderedGrammar" in let out = open_temp_bin (dir "orderedGrammar") in fprintf out "%s\n%s\n\n" ("(* Defines the order to apply to editedGrammar to get the final grammar for the doc.\n" ^ "doc_grammar will modify this file to add/remove nonterminals and productions\n" ^ "to match editedGrammar, which will remove comments. Not compiled into Coq *)") "DOC_GRAMMAR"; reorder_grammar g ordered_grammar "orderedGrammar"; print_in_order out g `MLG !g.order StringSet.empty; close_out out; finish_with_file (dir "orderedGrammar") args; (* check_singletons g*) let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; tacvs=NTMap.empty; cmds=NTMap.empty; cmdvs=NTMap.empty } in let plist nt = let list = (List.map (fun t -> String.trim (prod_to_prodn t)) (NTMap.find nt !g.map)) in list, StringSet.of_list list in let tac_list, tac_prods = plist "simple_tactic" in let cmd_list, cmd_prods = plist "command" in List.iter (fun file -> process_rst g file args seen tac_prods cmd_prods) args.rst_files; report_omitted_prods g !seen.nts "Nonterminal" ""; let out = open_out (dir "updated_rsts") in close_out out; (* if args.check_tacs then report_omitted_prods tac_list !seen.tacs "Tactic" "\n "; if args.check_cmds then report_omitted_prods cmd_list !seen.cmds "Command" "\n "; *) (* generate report on cmds or tacs *) let cmdReport outfile cmdStr cmd_nts cmds cmdvs = let rstCmds = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings cmds)) in let rstCmdvs = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings cmdvs)) in let gramCmds = List.fold_left (fun set nt -> StringSet.union set (StringSet.of_list (List.map (fun p -> String.trim (prod_to_prodn p)) (NTMap.find nt !prodn_gram.map))) ) StringSet.empty cmd_nts in let allCmds = StringSet.union rstCmdvs (StringSet.union rstCmds gramCmds) in let out = open_out_bin (dir outfile) in StringSet.iter (fun c -> let rsts = StringSet.mem c rstCmds in let gram = StringSet.mem c gramCmds in let pfx = match rsts, gram with | true, false -> "+" | false, true -> "-" | false, false -> "?" | _, _ -> " " in let var = if StringSet.mem c rstCmdvs then "v" else " " in fprintf out "%s%s %s\n" pfx var c) allCmds; close_out out; Printf.printf "# %s in rsts, gram, total = %d %d %d\n" cmdStr (StringSet.cardinal gramCmds) (StringSet.cardinal rstCmds) (StringSet.cardinal allCmds); in let cmd_nts = ["command"] in (* TODO: need to handle tactic_mode (overlaps with query_command) and subprf *) if args.check_cmds then cmdReport "prodnCommands" "cmds" cmd_nts !seen.cmds !seen.cmdvs; let tac_nts = ["simple_tactic"] in if args.check_tacs then cmdReport "prodnTactics" "tacs" tac_nts !seen.tacs !seen.tacvs; (* generate prodnGrammar for reference *) if not args.verify then begin let out = open_out_bin (dir "prodnGrammar") in print_in_order out prodn_gram `PRODN !prodn_gram.order StringSet.empty; close_out out; end end (* if !exit_code = 0 *) end (* if not args.fullGrammar *) let parse_args () = let suffix_regex = Str.regexp ".*\\.\\([a-z]+\\)$" in let args = List.fold_left (fun args arg -> match arg with | "-check-cmds" -> { args with check_cmds = true } | "-check-tacs" -> { args with check_tacs = true } | "-no-warn" -> show_warn := false; { args with show_warn = false } | "-no-update" -> { args with update = false } | "-short" -> { args with fullGrammar = true } | "-verbose" -> { args with verbose = true } | "-verify" -> { args with verify = true } | arg when Str.string_match suffix_regex arg 0 -> (match Str.matched_group 1 arg with | "mlg" -> { args with mlg_files = (arg :: args.mlg_files) } | "rst" -> { args with rst_files = (arg :: args.rst_files) } | _ -> error "Unknown command line argument '%s'\n" arg; args) | arg -> error "Unknown command line argument '%s'\n" arg; args) default_args (List.tl (Array.to_list Sys.argv)) in { args with mlg_files = (List.rev args.mlg_files); rst_files = (List.rev args.rst_files)} let () = (*try*) Printexc.record_backtrace true; let args = parse_args () in if !exit_code = 0 then begin process_grammar args end; if !error_count > 0 then Printf.eprintf "%d error(s)\n" !error_count; exit !exit_code (*with _ -> Printexc.print_backtrace stdout; exit 1*) coq-8.15.0/doc/tools/docgram/dune000066400000000000000000000041241417001151100165560ustar00rootroot00000000000000(executable (name doc_grammar) (libraries coq-core.clib coqpp)) (env (_ (binaries doc_grammar.exe))) (rule (alias check-gram) (deps (:input ; Main grammar (glob_files %{project_root}/parsing/*.mlg) (glob_files %{project_root}/toplevel/*.mlg) (glob_files %{project_root}/vernac/*.mlg) (glob_files %{project_root}/plugins/btauto/*.mlg) (glob_files %{project_root}/plugins/cc/*.mlg) (glob_files %{project_root}/plugins/derive/*.mlg) (glob_files %{project_root}/plugins/extraction/*.mlg) (glob_files %{project_root}/plugins/firstorder/*.mlg) (glob_files %{project_root}/plugins/funind/*.mlg) (glob_files %{project_root}/plugins/ltac/*.mlg) (glob_files %{project_root}/plugins/micromega/*.mlg) (glob_files %{project_root}/plugins/nsatz/*.mlg) (glob_files %{project_root}/plugins/ring/*.mlg) (glob_files %{project_root}/plugins/rtauto/*.mlg) (glob_files %{project_root}/plugins/ssr/*.mlg) (glob_files %{project_root}/plugins/ssrmatching/*.mlg) (glob_files %{project_root}/plugins/syntax/*.mlg) (glob_files %{project_root}/plugins/ltac2/*.mlg) ; Sphinx files (glob_files %{project_root}/doc/sphinx/language/*.rst) (glob_files %{project_root}/doc/sphinx/proof-engine/*.rst) (glob_files %{project_root}/doc/sphinx/user-extensions/*.rst) (glob_files %{project_root}/doc/sphinx/practical-tools/*.rst) (glob_files %{project_root}/doc/sphinx/addendum/*.rst) (glob_files %{project_root}/doc/sphinx/language/core/*.rst) (glob_files %{project_root}/doc/sphinx/language/extensions/*.rst) (glob_files %{project_root}/doc/sphinx/proofs/writing-proofs/*.rst) (glob_files %{project_root}/doc/sphinx/proofs/automatic-tactics/*.rst) (glob_files %{project_root}/doc/sphinx/proofs/creating-tactics/*.rst) (glob_files %{project_root}/doc/sphinx/using/libraries/*.rst) (glob_files %{project_root}/doc/sphinx/using/tools/*.rst)) common.edit_mlg orderedGrammar) (action (progn (chdir %{project_root} (run doc_grammar -no-warn -check-cmds -no-update %{input})) (diff? fullGrammar fullGrammar.new) (diff? orderedGrammar orderedGrammar.new)))) coq-8.15.0/doc/tools/docgram/fullGrammar000066400000000000000000002502531417001151100201020ustar00rootroot00000000000000(* Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *) DOC_GRAMMAR Constr.ident: [ | Prim.ident ] Prim.name: [ | "_" ] global: [ | Prim.reference ] constr_pattern: [ | constr ] cpattern: [ | lconstr ] sort: [ | "Set" | "Prop" | "SProp" | "Type" | "Type" "@{" "_" "}" | "Type" "@{" universe "}" ] sort_family: [ | "Set" | "Prop" | "SProp" | "Type" ] universe_increment: [ | "+" natural | ] universe_name: [ | global | "Set" | "Prop" ] universe_expr: [ | universe_name universe_increment ] universe: [ | "max" "(" LIST1 universe_expr SEP "," ")" | universe_expr ] lconstr: [ | term200 ] constr: [ | term8 | "@" global univ_annot ] term200: [ | binder_constr | term100 ] term100: [ | term99 "<:" term200 | term99 "<<:" term200 | term99 ":" term200 | term99 ] term99: [ | term90 ] term90: [ | term10 ] term10: [ | term9 LIST1 arg | "@" global univ_annot LIST0 term9 | "@" pattern_ident LIST1 identref | term9 ] term9: [ | ".." term0 ".." | term8 ] term8: [ | term1 ] term1: [ | term0 ".(" global univ_annot LIST0 arg ")" | term0 ".(" "@" global univ_annot LIST0 ( term9 ) ")" | term0 "%" IDENT | term0 ] term0: [ | atomic_constr | term_match | ident Prim.fields univ_annot | ident univ_annot | NUMBER | "(" term200 ")" | "{|" record_declaration bar_cbrace | "{" binder_constr "}" | "`{" term200 "}" | test_array_opening "[" "|" array_elems "|" lconstr type_cstr test_array_closing "|" "]" univ_annot | "`(" term200 ")" | "ltac" ":" "(" Pltac.ltac_expr ")" ] array_elems: [ | LIST0 lconstr SEP ";" ] record_declaration: [ | fields_def ] fields_def: [ | field_def ";" fields_def | field_def | ] field_def: [ | global binders ":=" lconstr ] binder_constr: [ | "forall" open_binders "," term200 | "fun" open_binders "=>" term200 | "let" name binders let_type_cstr ":=" term200 "in" term200 | "let" "fix" fix_decl "in" term200 | "let" "cofix" cofix_body "in" term200 | "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type ":=" term200 "in" term200 | "let" "'" pattern200 ":=" term200 "in" term200 | "let" "'" pattern200 ":=" term200 case_type "in" term200 | "let" "'" pattern200 "in" pattern200 ":=" term200 case_type "in" term200 | "if" term200 as_return_type "then" term200 "else" term200 | "fix" fix_decls | "cofix" cofix_decls | "if" term200 "is" ssr_dthen ssr_else (* SSR plugin *) | "if" term200 "isn't" ssr_dthen ssr_else (* SSR plugin *) | "let" ":" ssr_mpat ":=" lconstr "in" lconstr (* SSR plugin *) | "let" ":" ssr_mpat ":=" lconstr ssr_rtype "in" lconstr (* SSR plugin *) | "let" ":" ssr_mpat "in" pattern200 ":=" lconstr ssr_rtype "in" lconstr (* SSR plugin *) ] arg: [ | test_lpar_id_coloneq "(" identref ":=" lconstr ")" | test_lpar_nat_coloneq "(" natural ":=" lconstr ")" | term9 ] atomic_constr: [ | sort | string | "_" | "?" "[" identref "]" | "?" "[" pattern_ident "]" | pattern_ident evar_instance ] inst: [ | identref ":=" lconstr ] evar_instance: [ | "@{" LIST1 inst SEP ";" "}" | ] univ_annot: [ | "@{" LIST0 universe_level "}" | ] universe_level: [ | "Set" | "Prop" | "Type" | "_" | global ] fix_decls: [ | fix_decl | fix_decl "with" LIST1 fix_decl SEP "with" "for" identref ] cofix_decls: [ | cofix_body | cofix_body "with" LIST1 cofix_body SEP "with" "for" identref ] fix_decl: [ | identref binders_fixannot type_cstr ":=" term200 ] cofix_body: [ | identref binders type_cstr ":=" term200 ] term_match: [ | "match" LIST1 case_item SEP "," OPT case_type "with" branches "end" ] case_item: [ | term100 OPT [ "as" name ] OPT [ "in" pattern200 ] ] case_type: [ | "return" term100 ] as_return_type: [ | OPT [ OPT [ "as" name ] case_type ] ] branches: [ | OPT "|" LIST0 eqn SEP "|" ] mult_pattern: [ | LIST1 pattern200 SEP "," ] eqn: [ | LIST1 mult_pattern SEP "|" "=>" lconstr ] record_pattern: [ | global ":=" pattern200 ] record_patterns: [ | record_pattern ";" record_patterns | record_pattern | ] pattern200: [ | pattern100 ] pattern100: [ | pattern99 ":" term200 | pattern99 ] pattern99: [ | pattern90 ] pattern90: [ | pattern10 ] pattern10: [ | pattern1 "as" name | pattern1 LIST1 pattern1 | "@" Prim.reference LIST0 pattern1 | pattern1 ] pattern1: [ | pattern0 "%" IDENT | pattern0 ] pattern0: [ | Prim.reference | "{|" record_patterns bar_cbrace | "_" | "(" pattern200 ")" | "(" pattern200 "|" LIST1 pattern200 SEP "|" ")" | NUMBER | string ] fixannot: [ | "{" "struct" identref "}" | "{" "wf" constr identref "}" | "{" "measure" constr OPT identref OPT constr "}" ] binders_fixannot: [ | ensure_fixannot fixannot | binder binders_fixannot | ] open_binders: [ | name LIST0 name ":" lconstr | name LIST0 name binders | name ".." name | closed_binder binders ] binders: [ | LIST0 binder | Pcoq.Constr.binders ] binder: [ | name | closed_binder ] closed_binder: [ | "(" name LIST1 name ":" lconstr ")" | "(" name ":" lconstr ")" | "(" name ":=" lconstr ")" | "(" name ":" lconstr ":=" lconstr ")" | "{" name "}" | "{" name LIST1 name ":" lconstr "}" | "{" name ":" lconstr "}" | "{" name LIST1 name "}" | "[" name "]" | "[" name LIST1 name ":" lconstr "]" | "[" name ":" lconstr "]" | "[" name LIST1 name "]" | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" | "`[" LIST1 typeclass_constraint SEP "," "]" | "'" pattern0 | [ "of" | "&" ] term99 (* SSR plugin *) ] one_open_binder: [ | name | name ":" lconstr | one_closed_binder ] one_closed_binder: [ | "(" name ":" lconstr ")" | "{" name "}" | "{" name ":" lconstr "}" | "[" name "]" | "[" name ":" lconstr "]" | "'" pattern0 ] typeclass_constraint: [ | "!" term200 | "{" name "}" ":" [ "!" | ] term200 | test_name_colon name ":" [ "!" | ] term200 | term200 ] type_cstr: [ | ":" lconstr | ] let_type_cstr: [ | OPT [ ":" lconstr ] ] preident: [ | IDENT ] ident: [ | IDENT ] pattern_ident: [ | LEFTQMARK ident ] identref: [ | ident ] hyp: [ | identref ] field: [ | FIELD ] fields: [ | field fields | field ] fullyqualid: [ | ident fields | ident ] name: [ | "_" | ident ] reference: [ | ident fields | ident ] qualid: [ | reference ] by_notation: [ | ne_string OPT [ "%" IDENT ] ] smart_global: [ | reference | by_notation ] ne_string: [ | STRING ] ne_lstring: [ | ne_string ] dirpath: [ | ident LIST0 field ] string: [ | STRING ] lstring: [ | string ] integer: [ | bigint ] natural: [ | bignat ] bigint: [ | bignat | test_minus_nat "-" bignat ] bignat: [ | NUMBER ] bar_cbrace: [ | test_pipe_closedcurly "|" "}" ] strategy_level: [ | "expand" | "opaque" | integer | "transparent" ] vernac_toplevel: [ | "Drop" "." | "Quit" "." | "BackTo" natural "." | test_show_goal "Show" "Goal" natural "at" natural "." | "Show" "Proof" "Diffs" OPT "removed" "." | Pvernac.Vernac_.main_entry ] opt_hintbases: [ | | ":" LIST1 IDENT ] command: [ | "Goal" lconstr | "Proof" | "Proof" "using" G_vernac.section_subset_expr | "Proof" "Mode" string | "Proof" lconstr | "Abort" | "Abort" "All" | "Abort" identref | "Admitted" | "Qed" | "Save" identref | "Defined" | "Defined" identref | "Restart" | "Undo" | "Undo" natural | "Undo" "To" natural | "Focus" | "Focus" natural | "Unfocus" | "Unfocused" | "Show" | "Show" natural | "Show" ident | "Show" "Existentials" | "Show" "Universes" | "Show" "Conjectures" | "Show" "Proof" | "Show" "Intro" | "Show" "Intros" | "Show" "Match" reference | "Guarded" | "Create" "HintDb" IDENT; [ "discriminated" | ] | "Remove" "Hints" LIST1 global opt_hintbases | "Hint" hint opt_hintbases | "Comments" LIST0 comment | "Declare" "Instance" ident_decl binders ":" term200 hint_info | "Declare" "Scope" IDENT | "Pwd" | "Cd" | "Cd" ne_string | "Load" [ "Verbose" | ] [ ne_string | IDENT ] | "Declare" "ML" "Module" LIST1 ne_string | "Locate" locatable | "Add" "LoadPath" ne_string "as" dirpath | "Add" "Rec" "LoadPath" ne_string "as" dirpath | "Remove" "LoadPath" ne_string | "Type" lconstr | "Print" printable | "Print" smart_global OPT univ_name_list | "Print" "Module" "Type" global | "Print" "Module" global | "Print" "Namespace" dirpath | "Inspect" natural | "Add" "ML" "Path" ne_string | "Set" setting_name option_setting | "Unset" setting_name | "Print" "Table" setting_name | "Add" IDENT IDENT LIST1 table_value | "Add" IDENT LIST1 table_value | "Test" setting_name "for" LIST1 table_value | "Test" setting_name | "Remove" IDENT IDENT LIST1 table_value | "Remove" IDENT LIST1 table_value | "Reset" "Initial" | "Reset" identref | "Back" | "Back" natural | "Debug" "On" | "Debug" "Off" | "Declare" "Reduction" IDENT; ":=" red_expr | "Declare" "Custom" "Entry" IDENT | "Derive" identref "SuchThat" constr "As" identref (* derive plugin *) | "Extraction" global (* extraction plugin *) | "Recursive" "Extraction" LIST1 global (* extraction plugin *) | "Extraction" string LIST1 global (* extraction plugin *) | "Extraction" "TestCompile" LIST1 global (* extraction plugin *) | "Separate" "Extraction" LIST1 global (* extraction plugin *) | "Extraction" "Library" identref (* extraction plugin *) | "Recursive" "Extraction" "Library" identref (* extraction plugin *) | "Extraction" "Language" language (* extraction plugin *) | "Extraction" "Inline" LIST1 global (* extraction plugin *) | "Extraction" "NoInline" LIST1 global (* extraction plugin *) | "Print" "Extraction" "Inline" (* extraction plugin *) | "Reset" "Extraction" "Inline" (* extraction plugin *) | "Extraction" "Implicit" global "[" LIST0 int_or_id "]" (* extraction plugin *) | "Extraction" "Blacklist" LIST1 preident (* extraction plugin *) | "Print" "Extraction" "Blacklist" (* extraction plugin *) | "Reset" "Extraction" "Blacklist" (* extraction plugin *) | "Extract" "Constant" global LIST0 string "=>" mlname (* extraction plugin *) | "Extract" "Inlined" "Constant" global "=>" mlname (* extraction plugin *) | "Extract" "Inductive" global "=>" mlname "[" LIST0 mlname "]" OPT string (* extraction plugin *) | "Show" "Extraction" (* extraction plugin *) | "Set" "Firstorder" "Solver" tactic | "Print" "Firstorder" "Solver" | "Function" LIST1 function_fix_definition SEP "with" (* funind plugin *) | "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) | "Functional" "Case" fun_scheme_arg (* funind plugin *) | "Generate" "graph" "for" reference (* funind plugin *) | "Hint" "Rewrite" orient LIST1 constr ":" LIST0 preident | "Hint" "Rewrite" orient LIST1 constr "using" tactic ":" LIST0 preident | "Hint" "Rewrite" orient LIST1 constr | "Hint" "Rewrite" orient LIST1 constr "using" tactic | "Derive" "Inversion_clear" ident "with" constr "Sort" sort_family | "Derive" "Inversion_clear" ident "with" constr | "Derive" "Inversion" ident "with" constr "Sort" sort_family | "Derive" "Inversion" ident "with" constr | "Derive" "Dependent" "Inversion" ident "with" constr "Sort" sort_family | "Derive" "Dependent" "Inversion_clear" ident "with" constr "Sort" sort_family | "Declare" "Left" "Step" constr | "Declare" "Right" "Step" constr | "Unshelve" | "Declare" "Equivalent" "Keys" constr constr | "Print" "Equivalent" "Keys" | "Optimize" "Proof" | "Optimize" "Heap" | "infoH" tactic | "Hint" "Cut" "[" hints_path "]" opthints | "Typeclasses" "Transparent" LIST1 reference | "Typeclasses" "Opaque" LIST1 reference | "Typeclasses" "eauto" ":=" debug eauto_search_strategy OPT natural | "Proof" "with" Pltac.tactic OPT [ "using" G_vernac.section_subset_expr ] | "Proof" "using" G_vernac.section_subset_expr "with" Pltac.tactic | "Tactic" "Notation" OPT ltac_tactic_level LIST1 ltac_production_item ":=" tactic | "Print" "Ltac" reference | "Locate" "Ltac" reference | "Ltac" LIST1 ltac_tacdef_body SEP "with" | "Print" "Ltac" "Signatures" | "Obligation" natural "of" identref ":" lglob withtac | "Obligation" natural "of" identref withtac | "Obligation" natural ":" lglob withtac | "Obligation" natural withtac | "Next" "Obligation" "of" identref withtac | "Next" "Obligation" withtac | "Solve" "Obligation" natural "of" identref "with" tactic | "Solve" "Obligation" natural "with" tactic | "Solve" "Obligations" "of" identref "with" tactic | "Solve" "Obligations" "of" identref | "Solve" "Obligations" "with" tactic | "Solve" "Obligations" | "Solve" "All" "Obligations" "with" tactic | "Solve" "All" "Obligations" | "Admit" "Obligations" "of" identref | "Admit" "Obligations" | "Obligation" "Tactic" ":=" tactic | "Show" "Obligation" "Tactic" | "Obligations" "of" identref | "Obligations" | "Preterm" "of" identref | "Preterm" | "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "as" identref | "Add" "Relation" constr constr "symmetry" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Relation" constr constr "transitivity" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" identref | "Add" "Parametric" "Relation" binders ":" constr constr "transitivity" "proved" "by" constr "as" identref | "Add" "Setoid" constr constr constr "as" identref | "Add" "Parametric" "Setoid" binders ":" constr constr constr "as" identref | "Add" "Morphism" constr ":" identref | "Declare" "Morphism" constr ":" identref | "Add" "Morphism" constr "with" "signature" lconstr "as" identref | "Add" "Parametric" "Morphism" binders ":" constr "with" "signature" lconstr "as" identref | "Print" "Rewrite" "HintDb" preident | "Reset" "Ltac" "Profile" | "Show" "Ltac" "Profile" | "Show" "Ltac" "Profile" "CutOff" integer | "Show" "Ltac" "Profile" string | "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "Zify" "InjTyp" reference (* micromega plugin *) | "Add" "Zify" "BinOp" reference (* micromega plugin *) | "Add" "Zify" "UnOp" reference (* micromega plugin *) | "Add" "Zify" "CstOp" reference (* micromega plugin *) | "Add" "Zify" "BinRel" reference (* micromega plugin *) | "Add" "Zify" "PropOp" reference (* micromega plugin *) | "Add" "Zify" "PropBinOp" reference (* micromega plugin *) | "Add" "Zify" "PropUOp" reference (* micromega plugin *) | "Add" "Zify" "BinOpSpec" reference (* micromega plugin *) | "Add" "Zify" "UnOpSpec" reference (* micromega plugin *) | "Add" "Zify" "Saturate" reference (* micromega plugin *) | "Show" "Zify" "InjTyp" (* micromega plugin *) | "Show" "Zify" "BinOp" (* micromega plugin *) | "Show" "Zify" "UnOp" (* micromega plugin *) | "Show" "Zify" "CstOp" (* micromega plugin *) | "Show" "Zify" "BinRel" (* micromega plugin *) | "Show" "Zify" "UnOpSpec" (* micromega plugin *) | "Show" "Zify" "BinOpSpec" (* micromega plugin *) | "Add" "Ring" identref ":" constr OPT ring_mods (* ring plugin *) | "Print" "Rings" (* ring plugin *) | "Add" "Field" identref ":" constr OPT field_mods (* ring plugin *) | "Print" "Fields" (* ring plugin *) | "Prenex" "Implicits" LIST1 global (* SSR plugin *) | "Print" "Hint" "View" ssrviewpos (* SSR plugin *) | "Hint" "View" ssrviewposspc LIST1 ssrhintref (* SSR plugin *) | "Number" "Notation" reference reference reference OPT number_options ":" preident | "String" "Notation" reference reference reference OPT string_option ":" preident | "Ltac2" ltac2_entry (* ltac2 plugin *) | "Ltac2" "Eval" ltac2_expr6 (* ltac2 plugin *) | "Print" "Ltac2" reference (* ltac2 plugin *) ] reference_or_constr: [ | global | constr ] hint: [ | "Resolve" LIST1 reference_or_constr hint_info | "Resolve" "->" LIST1 global OPT natural | "Resolve" "<-" LIST1 global OPT natural | "Immediate" LIST1 reference_or_constr | "Variables" "Transparent" | "Variables" "Opaque" | "Constants" "Transparent" | "Constants" "Opaque" | "Transparent" LIST1 global | "Opaque" LIST1 global | "Mode" global mode | "Unfold" LIST1 global | "Constructors" LIST1 global | "Extern" natural OPT Constr.constr_pattern "=>" Pltac.tactic ] constr_body: [ | ":=" lconstr | ":" lconstr ":=" lconstr ] mode: [ | LIST1 [ "+" | "!" | "-" ] ] vernac_control: [ | "Time" vernac_control | "Redirect" ne_string vernac_control | "Timeout" natural vernac_control | "Fail" vernac_control | "Succeed" vernac_control | decorated_vernac ] decorated_vernac: [ | LIST0 quoted_attributes vernac ] quoted_attributes: [ | "#[" attribute_list "]" ] attribute_list: [ | LIST0 attribute SEP "," ] attribute: [ | ident attr_value | "using" attr_value ] attr_value: [ | "=" string | "=" IDENT | "(" attribute_list ")" | ] legacy_attr: [ | "Local" | "Global" | "Polymorphic" | "Monomorphic" | "Cumulative" | "NonCumulative" | "Private" | "Program" ] vernac: [ | LIST0 legacy_attr vernac_aux ] vernac_aux: [ | gallina "." | gallina_ext "." | command "." | syntax "." | subprf | command_entry ] noedit_mode: [ | query_command ] subprf: [ | BULLET | "{" | "}" ] gallina: [ | thm_token ident_decl binders ":" lconstr LIST0 [ "with" ident_decl binders ":" lconstr ] | assumption_token inline assum_list | assumptions_token inline assum_list | def_token ident_decl def_body | "Let" ident_decl def_body | finite_token LIST1 inductive_definition SEP "with" | "Fixpoint" LIST1 fix_definition SEP "with" | "Let" "Fixpoint" LIST1 fix_definition SEP "with" | "CoFixpoint" LIST1 cofix_definition SEP "with" | "Let" "CoFixpoint" LIST1 cofix_definition SEP "with" | "Scheme" LIST1 scheme SEP "with" | "Combined" "Scheme" identref "from" LIST1 identref SEP "," | "Register" global "as" qualid | "Register" "Inline" global | "Primitive" ident_decl OPT [ ":" lconstr ] ":=" register_token | "Universe" LIST1 identref | "Universes" LIST1 identref | "Constraint" LIST1 univ_constraint SEP "," ] register_token: [ | test_hash_ident "#" IDENT ] thm_token: [ | "Theorem" | "Lemma" | "Fact" | "Remark" | "Corollary" | "Proposition" | "Property" ] def_token: [ | "Definition" | "Example" | "SubClass" ] assumption_token: [ | "Hypothesis" | "Variable" | "Axiom" | "Parameter" | "Conjecture" ] assumptions_token: [ | "Hypotheses" | "Variables" | "Axioms" | "Parameters" | "Conjectures" ] inline: [ | "Inline" "(" natural ")" | "Inline" | ] univ_constraint: [ | universe_name [ "<" | "=" | "<=" ] universe_name ] univ_decl: [ | "@{" LIST0 identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] ] variance: [ | "+" | "=" | "*" ] variance_identref: [ | identref | test_variance_ident variance identref ] cumul_univ_decl: [ | "@{" LIST0 variance_identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] ] ident_decl: [ | identref OPT univ_decl ] cumul_ident_decl: [ | identref OPT cumul_univ_decl ] finite_token: [ | "Inductive" | "CoInductive" | "Variant" | "Record" | "Structure" | "Class" ] def_body: [ | binders ":=" reduce lconstr | binders ":" lconstr ":=" reduce lconstr | binders ":" lconstr ] reduce: [ | "Eval" red_expr "in" | ] decl_notation: [ | ne_lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] ] decl_sep: [ | "and" ] decl_notations: [ | "where" LIST1 decl_notation SEP decl_sep | ] opt_constructors_or_fields: [ | ":=" constructors_or_record | ] inductive_definition: [ | opt_coercion cumul_ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notations ] constructors_or_record: [ | "|" LIST1 constructor SEP "|" | identref constructor_type "|" LIST1 constructor SEP "|" | identref constructor_type | identref "{" record_fields "}" | "{" record_fields "}" | ] opt_coercion: [ | ">" | ] fix_definition: [ | ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations ] cofix_definition: [ | ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations ] scheme: [ | scheme_kind | identref ":=" scheme_kind ] scheme_kind: [ | "Induction" "for" smart_global "Sort" sort_family | "Minimality" "for" smart_global "Sort" sort_family | "Elimination" "for" smart_global "Sort" sort_family | "Case" "for" smart_global "Sort" sort_family | "Equality" "for" smart_global ] record_field: [ | LIST0 quoted_attributes record_binder OPT [ "|" natural ] decl_notations ] record_fields: [ | record_field ";" record_fields | record_field | ] field_body: [ | binders of_type lconstr | binders of_type lconstr ":=" lconstr | binders ":=" lconstr ] record_binder: [ | name | name field_body ] assum_list: [ | LIST1 assum_coe | assumpt ] assum_coe: [ | "(" assumpt ")" ] assumpt: [ | LIST1 ident_decl of_type lconstr ] constructor_type: [ | binders [ of_type lconstr | ] ] constructor: [ | identref constructor_type ] of_type: [ | ":>" | ":" ">" | ":" ] gallina_ext: [ | "Module" export_token identref LIST0 module_binder of_module_type is_module_expr | "Module" "Type" identref LIST0 module_binder check_module_types is_module_type | "Declare" "Module" export_token identref LIST0 module_binder ":" module_type_inl | "Section" identref | "End" identref | "Collection" identref ":=" section_subset_expr | "Require" export_token LIST1 global | "From" global "Require" export_token LIST1 global | "Import" OPT import_categories LIST1 filtered_import | "Export" OPT import_categories LIST1 filtered_import | "Include" module_type_inl LIST0 ext_module_expr | "Include" "Type" module_type_inl LIST0 ext_module_type | "Transparent" LIST1 smart_global | "Opaque" LIST1 smart_global | "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ] | "Canonical" OPT "Structure" global OPT [ OPT univ_decl def_body ] | "Canonical" OPT "Structure" by_notation | "Coercion" global OPT univ_decl def_body | "Identity" "Coercion" identref ":" class_rawexpr ">->" class_rawexpr | "Coercion" global ":" class_rawexpr ">->" class_rawexpr | "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr | "Context" LIST1 binder | "Instance" instance_name ":" term200 hint_info [ ":=" "{" record_declaration "}" | ":=" lconstr | ] | "Existing" "Instance" global hint_info | "Existing" "Instances" LIST1 global OPT [ "|" natural ] | "Existing" "Class" global | "Arguments" smart_global LIST0 arg_specs OPT [ "," LIST1 [ LIST0 implicits_alt ] SEP "," ] OPT [ ":" LIST1 args_modifier SEP "," ] | "Implicit" "Type" reserv_list | "Implicit" "Types" reserv_list | "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 identref ] | "Export" "Set" setting_name option_setting | "Export" "Unset" setting_name | "Import" "Prenex" "Implicits" (* SSR plugin *) ] import_categories: [ | OPT "-" "(" LIST1 qualid SEP "," ")" ] filtered_import: [ | global | global "(" LIST1 one_import_filter_name SEP "," ")" ] one_import_filter_name: [ | global OPT [ "(" ".." ")" ] ] export_token: [ | "Import" | "Export" | ] ext_module_type: [ | "<+" module_type_inl ] ext_module_expr: [ | "<+" module_expr_inl ] check_module_type: [ | "<:" module_type_inl ] check_module_types: [ | LIST0 check_module_type ] of_module_type: [ | ":" module_type_inl | check_module_types ] is_module_type: [ | ":=" module_type_inl LIST0 ext_module_type | ] is_module_expr: [ | ":=" module_expr_inl LIST0 ext_module_expr | ] functor_app_annot: [ | "[" "inline" "at" "level" natural "]" | "[" "no" "inline" "]" | ] module_expr_inl: [ | "!" module_expr | module_expr functor_app_annot ] module_type_inl: [ | "!" module_type | module_type functor_app_annot ] module_binder: [ | "(" export_token LIST1 identref ":" module_type_inl ")" ] module_expr: [ | module_expr_atom | module_expr module_expr_atom ] module_expr_atom: [ | qualid | "(" module_expr ")" ] with_declaration: [ | "Definition" fullyqualid OPT univ_decl ":=" Constr.lconstr | "Module" fullyqualid ":=" qualid ] module_type: [ | qualid | "(" module_type ")" | module_type module_expr_atom | module_type "with" with_declaration ] section_subset_expr: [ | test_only_starredidentrefs LIST0 starredidentref | ssexpr35 ] starredidentref: [ | identref | identref "*" | "Type" | "Type" "*" ] ssexpr35: [ | "-" ssexpr50 | ssexpr50 ] ssexpr50: [ | ssexpr0 "-" ssexpr0 | ssexpr0 "+" ssexpr0 | ssexpr0 ] ssexpr0: [ | starredidentref | "(" test_only_starredidentrefs LIST0 starredidentref ")" | "(" test_only_starredidentrefs LIST0 starredidentref ")" "*" | "(" ssexpr35 ")" | "(" ssexpr35 ")" "*" ] args_modifier: [ | "simpl" "nomatch" | "simpl" "never" | "default" "implicits" | "clear" "implicits" | "clear" "scopes" | "clear" "bidirectionality" "hint" | "rename" | "assert" | "extra" "scopes" | "clear" "scopes" "and" "implicits" | "clear" "implicits" "and" "scopes" ] scope_delimiter: [ | "%" IDENT ] argument_spec: [ | OPT "!" name OPT scope_delimiter ] arg_specs: [ | argument_spec | "/" | "&" | "(" LIST1 argument_spec ")" OPT scope_delimiter | "[" LIST1 argument_spec "]" OPT scope_delimiter | "{" LIST1 argument_spec "}" OPT scope_delimiter ] implicits_alt: [ | name | "[" LIST1 name "]" | "{" LIST1 name "}" ] instance_name: [ | ident_decl binders | ] hint_info: [ | "|" OPT natural OPT constr_pattern | ] reserv_list: [ | LIST1 reserv_tuple | simple_reserv ] reserv_tuple: [ | "(" simple_reserv ")" ] simple_reserv: [ | LIST1 identref ":" lconstr ] query_command: [ | "Eval" red_expr "in" lconstr "." | "Compute" lconstr "." | "Check" lconstr "." | "About" smart_global OPT univ_name_list "." | "SearchPattern" constr_pattern in_or_out_modules "." | "SearchRewrite" constr_pattern in_or_out_modules "." | "Search" search_query search_queries "." ] printable: [ | "Term" smart_global OPT univ_name_list | "All" | "Section" global | "Grammar" IDENT | "Custom" "Grammar" IDENT | "LoadPath" OPT dirpath | "Libraries" | "ML" "Path" | "ML" "Modules" | "Debug" "GC" | "Graph" | "Classes" | "TypeClasses" | "Instances" smart_global | "Coercions" | "Coercion" "Paths" class_rawexpr class_rawexpr | "Canonical" "Projections" LIST0 smart_global | "Typing" "Flags" | "Tables" | "Options" | "Hint" | "Hint" smart_global | "Hint" "*" | "HintDb" IDENT | "Scopes" | "Scope" IDENT | "Visibility" OPT IDENT | "Implicit" smart_global | [ "Sorted" | ] "Universes" OPT printunivs_subgraph OPT ne_string | "Assumptions" smart_global | "Opaque" "Dependencies" smart_global | "Transparent" "Dependencies" smart_global | "All" "Dependencies" smart_global | "Strategy" smart_global | "Strategies" | "Registered" ] printunivs_subgraph: [ | "Subgraph" "(" LIST0 reference ")" ] class_rawexpr: [ | "Funclass" | "Sortclass" | smart_global ] locatable: [ | smart_global | "Term" smart_global | "File" ne_string | "Library" global | "Module" global ] option_setting: [ | | integer | STRING ] table_value: [ | global | STRING ] setting_name: [ | LIST1 IDENT ] ne_in_or_out_modules: [ | "inside" LIST1 global | "in" LIST1 global | "outside" LIST1 global ] in_or_out_modules: [ | ne_in_or_out_modules | ] comment: [ | constr | STRING | natural ] positive_search_mark: [ | "-" | ] search_query: [ | positive_search_mark search_item | positive_search_mark "[" LIST1 ( LIST1 search_query ) SEP "|" "]" ] search_item: [ | test_id_colon search_where ":" ne_string OPT scope_delimiter | "is" ":" logical_kind | ne_string OPT scope_delimiter | test_id_colon search_where ":" constr_pattern | constr_pattern ] logical_kind: [ | thm_token | assumption_token | "Context" | extended_def_token | "Primitive" ] extended_def_token: [ | def_token | "Coercion" | "Instance" | "Scheme" | "Canonical" | "Field" | "Method" ] search_where: [ | "head" | "hyp" | "concl" | "headhyp" | "headconcl" ] search_queries: [ | ne_in_or_out_modules | search_query search_queries | ] univ_name_list: [ | "@{" LIST0 name "}" ] syntax: [ | "Open" "Scope" IDENT | "Close" "Scope" IDENT | "Delimit" "Scope" IDENT; "with" IDENT | "Undelimit" "Scope" IDENT | "Bind" "Scope" IDENT; "with" LIST1 class_rawexpr | "Infix" ne_lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] | "Notation" identref LIST0 ident ":=" constr syntax_modifiers | "Notation" lstring ":=" constr syntax_modifiers OPT [ ":" IDENT ] | "Format" "Notation" STRING STRING STRING | "Reserved" "Infix" ne_lstring syntax_modifiers | "Reserved" "Notation" ne_lstring syntax_modifiers ] level: [ | "level" natural | "next" "level" ] syntax_modifier: [ | "at" "level" natural | "in" "custom" IDENT | "in" "custom" IDENT; "at" "level" natural | "left" "associativity" | "right" "associativity" | "no" "associativity" | "only" "printing" | "only" "parsing" | "format" STRING OPT STRING | IDENT; "," LIST1 IDENT SEP "," [ "at" level | "in" "scope" IDENT ] | IDENT; "at" level OPT binder_interp | IDENT; "in" "scope" IDENT | IDENT binder_interp | IDENT explicit_subentry ] syntax_modifiers: [ | "(" LIST1 syntax_modifier SEP "," ")" | ] explicit_subentry: [ | "ident" | "name" | "global" | "bigint" | "binder" | "constr" | "constr" at_level_opt OPT binder_interp | "pattern" | "pattern" "at" "level" natural | "strict" "pattern" | "strict" "pattern" "at" "level" natural | "closed" "binder" | "custom" IDENT at_level_opt OPT binder_interp ] at_level_opt: [ | "at" level | ] binder_interp: [ | "as" "ident" | "as" "name" | "as" "pattern" | "as" "strict" "pattern" ] simple_tactic: [ | "btauto" | "congruence" OPT natural | "congruence" OPT natural "with" LIST1 constr | "simple" "congruence" OPT natural | "simple" "congruence" OPT natural "with" LIST1 constr | "f_equal" | "firstorder" OPT tactic firstorder_using | "firstorder" OPT tactic "with" LIST1 preident | "firstorder" OPT tactic firstorder_using "with" LIST1 preident | "gintuition" OPT tactic | "functional" "inversion" quantified_hypothesis OPT reference (* funind plugin *) | "functional" "induction" lconstr fun_ind_using with_names (* funind plugin *) | "soft" "functional" "induction" LIST1 constr fun_ind_using with_names (* funind plugin *) | "reflexivity" | "exact" uconstr | "assumption" | "etransitivity" | "cut" constr | "exact_no_check" constr | "vm_cast_no_check" constr | "native_cast_no_check" constr | "casetype" constr | "elimtype" constr | "lapply" constr | "transitivity" constr | "left" | "eleft" | "left" "with" bindings | "eleft" "with" bindings | "right" | "eright" | "right" "with" bindings | "eright" "with" bindings | "constructor" | "constructor" nat_or_var | "constructor" nat_or_var "with" bindings | "econstructor" | "econstructor" nat_or_var | "econstructor" nat_or_var "with" bindings | "specialize" constr_with_bindings | "specialize" constr_with_bindings "as" simple_intropattern | "symmetry" | "symmetry" "in" in_clause | "split" | "esplit" | "split" "with" bindings | "esplit" "with" bindings | "exists" | "exists" LIST1 bindings SEP "," | "eexists" | "eexists" LIST1 bindings SEP "," | "intros" "until" quantified_hypothesis | "intro" | "intro" ident | "intro" ident "at" "top" | "intro" ident "at" "bottom" | "intro" ident "after" hyp | "intro" ident "before" hyp | "intro" "at" "top" | "intro" "at" "bottom" | "intro" "after" hyp | "intro" "before" hyp | "move" hyp "at" "top" | "move" hyp "at" "bottom" | "move" hyp "after" hyp | "move" hyp "before" hyp | "rename" LIST1 rename SEP "," | "revert" LIST1 hyp | "simple" "induction" quantified_hypothesis | "simple" "destruct" quantified_hypothesis | "admit" | "fix" ident natural | "cofix" ident | "clear" LIST0 hyp | "clear" "-" LIST1 hyp | "clearbody" LIST1 hyp | "generalize" "dependent" constr | "replace" uconstr "with" constr clause by_arg_tac | "replace" "->" uconstr clause | "replace" "<-" uconstr clause | "replace" uconstr clause | "simplify_eq" | "simplify_eq" destruction_arg | "esimplify_eq" | "esimplify_eq" destruction_arg | "discriminate" | "discriminate" destruction_arg | "ediscriminate" | "ediscriminate" destruction_arg | "injection" | "injection" destruction_arg | "einjection" | "einjection" destruction_arg | "injection" "as" LIST0 simple_intropattern | "injection" destruction_arg "as" LIST0 simple_intropattern | "einjection" "as" LIST0 simple_intropattern | "einjection" destruction_arg "as" LIST0 simple_intropattern | "simple" "injection" | "simple" "injection" destruction_arg | "dependent" "rewrite" orient constr | "dependent" "rewrite" orient constr "in" hyp | "cutrewrite" orient constr | "cutrewrite" orient constr "in" hyp | "decompose" "sum" constr | "decompose" "record" constr | "absurd" constr | "contradiction" OPT constr_with_bindings | "autorewrite" "with" LIST1 preident clause | "autorewrite" "with" LIST1 preident clause "using" tactic | "autorewrite" "*" "with" LIST1 preident clause | "autorewrite" "*" "with" LIST1 preident clause "using" tactic | "rewrite" "*" orient uconstr "in" hyp "at" occurrences by_arg_tac | "rewrite" "*" orient uconstr "at" occurrences "in" hyp by_arg_tac | "rewrite" "*" orient uconstr "in" hyp by_arg_tac | "rewrite" "*" orient uconstr "at" occurrences by_arg_tac | "rewrite" "*" orient uconstr by_arg_tac | "refine" uconstr | "simple" "refine" uconstr | "notypeclasses" "refine" uconstr | "simple" "notypeclasses" "refine" uconstr | "solve_constraints" | "subst" LIST1 hyp | "subst" | "simple" "subst" | "evar" test_lpar_id_colon "(" ident ":" lconstr ")" | "evar" constr | "instantiate" "(" ident ":=" lglob ")" | "instantiate" "(" integer ":=" lglob ")" hloc | "instantiate" | "stepl" constr "by" tactic | "stepl" constr | "stepr" constr "by" tactic | "stepr" constr | "generalize_eqs" hyp | "dependent" "generalize_eqs" hyp | "generalize_eqs_vars" hyp | "dependent" "generalize_eqs_vars" hyp | "specialize_eqs" hyp | "hresolve_core" "(" ident ":=" constr ")" "at" nat_or_var "in" constr | "hresolve_core" "(" ident ":=" constr ")" "in" constr | "hget_evar" nat_or_var | "destauto" | "destauto" "in" hyp | "transparent_abstract" tactic3 | "transparent_abstract" tactic3 "using" ident | "constr_eq" constr constr | "constr_eq_strict" constr constr | "constr_eq_nounivs" constr constr | "is_evar" constr | "has_evar" constr | "is_var" constr | "is_fix" constr | "is_cofix" constr | "is_ind" constr | "is_constructor" constr | "is_proj" constr | "is_const" constr | "shelve" | "shelve_unifiable" | "unshelve" tactic1 | "give_up" | "cycle" int_or_var | "swap" int_or_var int_or_var | "revgoals" | "guard" test | "decompose" "[" LIST1 constr "]" constr | "optimize_heap" | "with_strategy" strategy_level_or_var "[" LIST1 smart_global "]" tactic3 | "eassumption" | "eexact" constr | "trivial" auto_using hintbases | "info_trivial" auto_using hintbases | "debug" "trivial" auto_using hintbases | "auto" OPT nat_or_var auto_using hintbases | "info_auto" OPT nat_or_var auto_using hintbases | "debug" "auto" OPT nat_or_var auto_using hintbases | "eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | "debug" "eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | "info_eauto" OPT nat_or_var OPT nat_or_var auto_using hintbases | "dfs" "eauto" OPT nat_or_var auto_using hintbases | "bfs" "eauto" OPT nat_or_var auto_using hintbases | "autounfold" hintbases clause_dft_concl | "autounfold_one" hintbases "in" hyp | "autounfold_one" hintbases | "unify" constr constr | "unify" constr constr "with" preident | "typeclasses" "eauto" "dfs" OPT nat_or_var "with" LIST1 preident | "typeclasses" "eauto" "bfs" OPT nat_or_var "with" LIST1 preident | "typeclasses" "eauto" "best_effort" OPT nat_or_var "with" LIST1 preident | "typeclasses" "eauto" OPT nat_or_var "with" LIST1 preident | "typeclasses" "eauto" "bfs" OPT nat_or_var | "typeclasses" "eauto" "dfs" OPT nat_or_var | "typeclasses" "eauto" "best_effort" OPT nat_or_var | "typeclasses" "eauto" OPT nat_or_var | "head_of_constr" ident constr | "not_evar" constr | "is_ground" constr | "autoapply" constr "with" preident | "progress_evars" tactic | "decide" "equality" | "compare" constr constr | "rewrite_strat" rewstrategy "in" hyp | "rewrite_strat" rewstrategy | "rewrite_db" preident "in" hyp | "rewrite_db" preident | "substitute" orient glob_constr_with_bindings | "setoid_rewrite" orient glob_constr_with_bindings | "setoid_rewrite" orient glob_constr_with_bindings "in" hyp | "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences | "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences "in" hyp | "setoid_rewrite" orient glob_constr_with_bindings "in" hyp "at" occurrences | "setoid_symmetry" | "setoid_symmetry" "in" hyp | "setoid_reflexivity" | "setoid_transitivity" constr | "setoid_etransitivity" | "intros" ne_intropatterns | "intros" | "eintros" ne_intropatterns | "eintros" | "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as | "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as | "simple" "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as | "simple" "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as | "elim" constr_with_bindings_arg OPT eliminator | "eelim" constr_with_bindings_arg OPT eliminator | "case" induction_clause_list | "ecase" induction_clause_list | "fix" ident natural "with" LIST1 fixdecl | "cofix" ident "with" LIST1 cofixdecl | "pose" bindings_with_parameters | "pose" constr as_name | "epose" bindings_with_parameters | "epose" constr as_name | "set" bindings_with_parameters clause_dft_concl | "set" constr as_name clause_dft_concl | "eset" bindings_with_parameters clause_dft_concl | "eset" constr as_name clause_dft_concl | "remember" constr as_name eqn_ipat clause_dft_all | "eremember" constr as_name eqn_ipat clause_dft_all | "assert" test_lpar_id_coloneq "(" identref ":=" lconstr ")" | "eassert" test_lpar_id_coloneq "(" identref ":=" lconstr ")" | "assert" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic | "eassert" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic | "enough" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic | "eenough" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic | "assert" constr as_ipat by_tactic | "eassert" constr as_ipat by_tactic | "pose" "proof" test_lpar_id_coloneq "(" identref ":=" lconstr ")" | "epose" "proof" test_lpar_id_coloneq "(" identref ":=" lconstr ")" | "pose" "proof" lconstr as_ipat | "epose" "proof" lconstr as_ipat | "enough" constr as_ipat by_tactic | "eenough" constr as_ipat by_tactic | "generalize" constr | "generalize" constr LIST1 constr | "generalize" constr lookup_at_as_comma occs as_name LIST0 [ "," pattern_occ as_name ] | "induction" induction_clause_list | "einduction" induction_clause_list | "destruct" induction_clause_list | "edestruct" induction_clause_list | "rewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic | "erewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic | "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" constr ] | "simple" "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion_clear" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion" quantified_hypothesis "using" constr in_hyp_list | "red" clause_dft_concl | "hnf" clause_dft_concl | "simpl" delta_flag OPT ref_or_pattern_occ clause_dft_concl | "cbv" strategy_flag clause_dft_concl | "cbn" strategy_flag clause_dft_concl | "lazy" strategy_flag clause_dft_concl | "compute" delta_flag clause_dft_concl | "vm_compute" OPT ref_or_pattern_occ clause_dft_concl | "native_compute" OPT ref_or_pattern_occ clause_dft_concl | "unfold" LIST1 unfold_occ SEP "," clause_dft_concl | "fold" LIST1 constr clause_dft_concl | "pattern" LIST1 pattern_occ SEP "," clause_dft_concl | "change" conversion clause_dft_concl | "change_no_check" conversion clause_dft_concl | "start" "ltac" "profiling" | "stop" "ltac" "profiling" | "reset" "ltac" "profile" | "show" "ltac" "profile" | "show" "ltac" "profile" "cutoff" integer | "show" "ltac" "profile" string | "restart_timer" OPT string | "finish_timing" OPT string | "finish_timing" "(" string ")" OPT string | "psatz_Z" nat_or_var tactic (* micromega plugin *) | "psatz_Z" tactic (* micromega plugin *) | "xlia" tactic (* micromega plugin *) | "xnlia" tactic (* micromega plugin *) | "xnra" tactic (* micromega plugin *) | "xnqa" tactic (* micromega plugin *) | "sos_Z" tactic (* micromega plugin *) | "sos_Q" tactic (* micromega plugin *) | "sos_R" tactic (* micromega plugin *) | "lra_Q" tactic (* micromega plugin *) | "lra_R" tactic (* micromega plugin *) | "psatz_R" nat_or_var tactic (* micromega plugin *) | "psatz_R" tactic (* micromega plugin *) | "psatz_Q" nat_or_var tactic (* micromega plugin *) | "psatz_Q" tactic (* micromega plugin *) | "zify_iter_specs" (* micromega plugin *) | "zify_op" (* micromega plugin *) | "zify_saturate" (* micromega plugin *) | "zify_iter_let" tactic (* micromega plugin *) | "zify_elim_let" (* micromega plugin *) | "nsatz_compute" constr (* nsatz plugin *) | "protect_fv" string "in" ident (* ring plugin *) | "protect_fv" string (* ring plugin *) | "ring_lookup" tactic0 "[" LIST0 constr "]" LIST1 constr (* ring plugin *) | "field_lookup" tactic "[" LIST0 constr "]" LIST1 constr (* ring plugin *) | "rtauto" | "by" ssrhintarg (* SSR plugin *) | "clear" natural (* SSR plugin *) | "move" ssrmovearg ssrrpat (* SSR plugin *) | "move" ssrmovearg ssrclauses (* SSR plugin *) | "move" ssrrpat (* SSR plugin *) | "move" (* SSR plugin *) | "case" ssrcasearg ssrclauses (* SSR plugin *) | "case" (* SSR plugin *) | "elim" ssrarg ssrclauses (* SSR plugin *) | "elim" (* SSR plugin *) | "apply" ssrapplyarg (* SSR plugin *) | "apply" (* SSR plugin *) | "exact" ssrexactarg (* SSR plugin *) | "exact" (* SSR plugin *) | "exact" "<:" lconstr (* SSR plugin *) | "congr" ssrcongrarg (* SSR plugin *) | "ssrinstancesofruleL2R" ssrterm (* SSR plugin *) | "ssrinstancesofruleR2L" ssrterm (* SSR plugin *) | "rewrite" ssrrwargs ssrclauses (* SSR plugin *) | "unlock" ssrunlockargs ssrclauses (* SSR plugin *) | "pose" ssrfixfwd (* SSR plugin *) | "pose" ssrcofixfwd (* SSR plugin *) | "pose" ssrfwdid ssrposefwd (* SSR plugin *) | "set" ssrfwdid ssrsetfwd ssrclauses (* SSR plugin *) | "abstract" ssrdgens (* SSR plugin *) | "have" ssrhavefwdwbinders (* SSR plugin *) | "have" "suff" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | "have" "suffices" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | "suff" "have" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | "suffices" "have" ssrhpats_nobs ssrhavefwd (* SSR plugin *) | "suff" ssrsufffwd (* SSR plugin *) | "suffices" ssrsufffwd (* SSR plugin *) | "wlog" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "wlog" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "wlog" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "without" "loss" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "without" "loss" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "without" "loss" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "gen" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "generally" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* SSR plugin *) | "under" ssrrwarg (* SSR plugin *) | "under" ssrrwarg ssrintros_ne (* SSR plugin *) | "under" ssrrwarg ssrintros_ne "do" ssrhint3arg (* SSR plugin *) | "under" ssrrwarg "do" ssrhint3arg (* SSR plugin *) | "ssrinstancesoftpat" G_SSRMATCHING_cpattern (* SSR plugin *) ] mlname: [ | preident (* extraction plugin *) | string (* extraction plugin *) ] int_or_id: [ | preident (* extraction plugin *) | integer (* extraction plugin *) ] language: [ | "OCaml" (* extraction plugin *) | "Haskell" (* extraction plugin *) | "Scheme" (* extraction plugin *) | "JSON" (* extraction plugin *) ] firstorder_using: [ | "using" LIST1 reference SEP "," | ] fun_ind_using: [ | "using" constr_with_bindings (* funind plugin *) | (* funind plugin *) ] with_names: [ | "as" simple_intropattern (* funind plugin *) | (* funind plugin *) ] constr_comma_sequence': [ | constr "," constr_comma_sequence' (* funind plugin *) | constr (* funind plugin *) ] auto_using': [ | "using" constr_comma_sequence' (* funind plugin *) | (* funind plugin *) ] function_fix_definition: [ | Vernac.fix_definition (* funind plugin *) ] fun_scheme_arg: [ | identref ":=" "Induction" "for" reference "Sort" sort_family (* funind plugin *) ] orient: [ | "->" | "<-" | ] EXTRAARGS_natural: [ | _natural ] occurrences: [ | LIST1 integer | hyp ] glob: [ | constr ] EXTRAARGS_lconstr: [ | l_constr ] lglob: [ | EXTRAARGS_lconstr ] hloc: [ | | "in" "|-" "*" | "in" ident | "in" "(" "type" "of" ident ")" | "in" "(" "value" "of" ident ")" ] rename: [ | ident "into" ident ] by_arg_tac: [ | "by" tactic3 | ] in_clause: [ | in_clause' | "*" occs | "*" "|-" concl_occ | "|-" concl_occ | LIST1 hypident_occ SEP "," "|-" concl_occ | LIST1 hypident_occ SEP "," ] test_lpar_id_colon: [ | local_test_lpar_id_colon ] EXTRAARGS_strategy_level: [ | strategy_level0 ] strategy_level_or_var: [ | EXTRAARGS_strategy_level | identref ] comparison: [ | "=" | "<" | "<=" | ">" | ">=" ] test: [ | int_or_var comparison int_or_var ] hintbases: [ | "with" "*" | "with" LIST1 preident | ] auto_using: [ | "using" LIST1 uconstr SEP "," | ] hints_path_atom: [ | LIST1 global | "_" ] hints_path: [ | "(" hints_path ")" | hints_path "*" | "emp" | "eps" | hints_path "|" hints_path | hints_path_atom | hints_path hints_path ] opthints: [ | ":" LIST1 preident | ] debug: [ | "debug" | ] eauto_search_strategy_name: [ | "bfs" | "dfs" ] eauto_search_strategy: [ | "(" eauto_search_strategy_name ")" | ] tactic_then_last: [ | "|" LIST0 ( OPT ltac_expr5 ) SEP "|" | ] for_each_goal: [ | ltac_expr5 "|" for_each_goal | ltac_expr5 ".." tactic_then_last | ".." tactic_then_last | ltac_expr5 | "|" for_each_goal | ] tactic_then_locality: [ | "[" OPT ">" ] ltac_expr5: [ | binder_tactic | ltac_expr4 ] ltac_expr4: [ | ltac_expr3 ";" binder_tactic | ltac_expr3 ";" ltac_expr3 | ltac_expr3 ";" tactic_then_locality for_each_goal "]" | ltac_expr3 | ltac_expr5 ";" "first" ssr_first_else (* SSR plugin *) | ltac_expr5 ";" "first" ssrseqarg (* SSR plugin *) | ltac_expr5 ";" "last" ssrseqarg (* SSR plugin *) ] ltac_expr3: [ | "try" ltac_expr3 | "do" nat_or_var ltac_expr3 | "timeout" nat_or_var ltac_expr3 | "time" OPT string ltac_expr3 | "repeat" ltac_expr3 | "progress" ltac_expr3 | "once" ltac_expr3 | "exactly_once" ltac_expr3 | "abstract" ltac_expr2 | "abstract" ltac_expr2 "using" ident | "only" selector ":" ltac_expr3 | ltac_expr2 | "do" ssrmmod ssrdotac ssrclauses (* SSR plugin *) | "do" ssrortacarg ssrclauses (* SSR plugin *) | "do" nat_or_var ssrmmod ssrdotac ssrclauses (* SSR plugin *) | "abstract" ssrdgens (* SSR plugin *) ] ltac_expr2: [ | ltac_expr1 "+" binder_tactic | ltac_expr1 "+" ltac_expr2 | "tryif" ltac_expr5 "then" ltac_expr5 "else" ltac_expr2 | ltac_expr1 "||" binder_tactic | ltac_expr1 "||" ltac_expr2 | ltac_expr1 ] ltac_expr1: [ | match_key "goal" "with" match_context_list "end" | match_key "reverse" "goal" "with" match_context_list "end" | match_key ltac_expr5 "with" match_list "end" | "first" "[" LIST0 ltac_expr5 SEP "|" "]" | "solve" "[" LIST0 ltac_expr5 SEP "|" "]" | "idtac" LIST0 message_token | failkw [ nat_or_var | ] LIST0 message_token | simple_tactic | tactic_value | reference LIST0 tactic_arg | ltac_expr0 | ltac_expr5 ssrintros_ne (* SSR plugin *) ] ltac_expr0: [ | "(" ltac_expr5 ")" | "[" ">" for_each_goal "]" | tactic_atom | ssrparentacarg (* SSR plugin *) ] failkw: [ | "fail" | "gfail" ] binder_tactic: [ | "fun" LIST1 input_fun "=>" ltac_expr5 | "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" ltac_expr5 ] tactic_arg: [ | tactic_value | Constr.constr | "()" ] tactic_value: [ | constr_eval | "fresh" LIST0 fresh_id | "type_term" uconstr | "numgoals" ] fresh_id: [ | STRING | qualid ] constr_eval: [ | "eval" red_expr "in" Constr.constr | "context" identref "[" Constr.lconstr "]" | "type" "of" Constr.constr ] constr_may_eval: [ | constr_eval | Constr.constr ] tactic_atom: [ | integer | reference | "()" ] match_key: [ | "match" | "lazymatch" | "multimatch" ] input_fun: [ | "_" | ident ] let_clause: [ | identref ":=" ltac_expr5 | "_" ":=" ltac_expr5 | identref LIST1 input_fun ":=" ltac_expr5 ] match_pattern: [ | "context" OPT Constr.ident "[" Constr.cpattern "]" | Constr.cpattern ] match_hyp: [ | name ":" match_pattern | name ":=" "[" match_pattern "]" ":" match_pattern | name ":=" match_pattern ] match_context_rule: [ | LIST0 match_hyp SEP "," "|-" match_pattern "=>" ltac_expr5 | "[" LIST0 match_hyp SEP "," "|-" match_pattern "]" "=>" ltac_expr5 | "_" "=>" ltac_expr5 ] match_context_list: [ | LIST1 match_context_rule SEP "|" | "|" LIST1 match_context_rule SEP "|" ] match_rule: [ | match_pattern "=>" ltac_expr5 | "_" "=>" ltac_expr5 ] match_list: [ | LIST1 match_rule SEP "|" | "|" LIST1 match_rule SEP "|" ] message_token: [ | identref | STRING | natural ] ltac_def_kind: [ | ":=" | "::=" ] tacdef_body: [ | Constr.global LIST1 input_fun ltac_def_kind ltac_expr5 | Constr.global ltac_def_kind ltac_expr5 ] tactic: [ | ltac_expr5 ] range_selector: [ | natural "-" natural | natural ] range_selector_or_nth: [ | natural "-" natural OPT [ "," LIST1 range_selector SEP "," ] | natural OPT [ "," LIST1 range_selector SEP "," ] ] selector: [ | range_selector_or_nth | test_bracket_ident "[" ident "]" ] toplevel_selector: [ | selector ":" | "!" ":" | "all" ":" ] tactic_mode: [ | OPT toplevel_selector G_vernac.query_command | OPT toplevel_selector "{" | OPT ltac_selector OPT ltac_info tactic ltac_use_default | "par" ":" OPT ltac_info tactic ltac_use_default ] ltac_selector: [ | toplevel_selector ] ltac_info: [ | "Info" natural ] ltac_use_default: [ | "." | "..." ] ltac_tactic_level: [ | "(" "at" "level" natural ")" ] ltac_production_sep: [ | "," string ] ltac_production_item: [ | string | ident "(" ident OPT ltac_production_sep ")" | ident ] ltac_tacdef_body: [ | tacdef_body ] withtac: [ | "with" Tactic.tactic | ] Constr.closed_binder: [ | "(" Prim.name ":" Constr.lconstr "|" Constr.lconstr ")" ] glob_constr_with_bindings: [ | constr_with_bindings ] rewstrategy: [ | glob | "<-" constr | "subterms" rewstrategy | "subterm" rewstrategy | "innermost" rewstrategy | "outermost" rewstrategy | "bottomup" rewstrategy | "topdown" rewstrategy | "id" | "fail" | "refl" | "progress" rewstrategy | "try" rewstrategy | "any" rewstrategy | "repeat" rewstrategy | rewstrategy ";" rewstrategy | "(" rewstrategy ")" | "choice" LIST1 rewstrategy | "old_hints" preident | "hints" preident | "terms" LIST0 constr | "eval" red_expr | "fold" constr ] int_or_var: [ | integer | identref ] nat_or_var: [ | natural | identref ] id_or_meta: [ | identref ] open_constr: [ | constr ] uconstr: [ | constr ] destruction_arg: [ | natural | test_lpar_id_rpar constr_with_bindings | constr_with_bindings_arg ] constr_with_bindings_arg: [ | ">" constr_with_bindings | constr_with_bindings ] quantified_hypothesis: [ | ident | natural ] conversion: [ | constr | constr "with" constr | constr "at" occs_nums "with" constr ] occs_nums: [ | LIST1 nat_or_var | "-" LIST1 nat_or_var ] occs: [ | "at" occs_nums | ] pattern_occ: [ | constr occs ] ref_or_pattern_occ: [ | smart_global occs | constr occs ] unfold_occ: [ | smart_global occs ] intropatterns: [ | LIST0 intropattern ] ne_intropatterns: [ | LIST1 intropattern ] or_and_intropattern: [ | "[" LIST1 intropatterns SEP "|" "]" | "()" | "(" simple_intropattern ")" | "(" simple_intropattern "," LIST1 simple_intropattern SEP "," ")" | "(" simple_intropattern "&" LIST1 simple_intropattern SEP "&" ")" ] equality_intropattern: [ | "->" | "<-" | "[=" intropatterns "]" ] naming_intropattern: [ | pattern_ident | "?" | ident ] intropattern: [ | simple_intropattern | "*" | "**" ] simple_intropattern: [ | simple_intropattern_closed LIST0 [ "%" term0 ] ] simple_intropattern_closed: [ | or_and_intropattern | equality_intropattern | "_" | naming_intropattern ] simple_binding: [ | "(" identref ":=" lconstr ")" | "(" natural ":=" lconstr ")" ] bindings: [ | test_lpar_idnum_coloneq LIST1 simple_binding | LIST1 constr ] constr_with_bindings: [ | constr with_bindings ] with_bindings: [ | "with" bindings | ] red_flag: [ | "beta" | "iota" | "match" | "fix" | "cofix" | "zeta" | "delta" delta_flag ] delta_flag: [ | "-" "[" LIST1 smart_global "]" | "[" LIST1 smart_global "]" | ] strategy_flag: [ | LIST1 red_flag | delta_flag ] red_expr: [ | "red" | "hnf" | "simpl" delta_flag OPT ref_or_pattern_occ | "cbv" strategy_flag | "cbn" strategy_flag | "lazy" strategy_flag | "compute" delta_flag | "vm_compute" OPT ref_or_pattern_occ | "native_compute" OPT ref_or_pattern_occ | "unfold" LIST1 unfold_occ SEP "," | "fold" LIST1 constr | "pattern" LIST1 pattern_occ SEP "," | IDENT ] hypident: [ | id_or_meta | "(" "type" "of" id_or_meta ")" | "(" "value" "of" id_or_meta ")" | "(" "type" "of" Prim.identref ")" (* SSR plugin *) | "(" "value" "of" Prim.identref ")" (* SSR plugin *) ] hypident_occ: [ | hypident occs ] clause_dft_concl: [ | "in" in_clause | occs | ] clause_dft_all: [ | "in" in_clause | ] opt_clause: [ | "in" in_clause | "at" occs_nums | ] concl_occ: [ | "*" occs | ] in_hyp_list: [ | "in" LIST1 id_or_meta | ] in_hyp_as: [ | "in" LIST1 [ id_or_meta as_ipat ] SEP "," | ] orient_rw: [ | "->" | "<-" | ] simple_binder: [ | name | "(" LIST1 name ":" lconstr ")" ] fixdecl: [ | "(" ident LIST0 simple_binder struct_annot ":" lconstr ")" ] struct_annot: [ | "{" "struct" name "}" | ] cofixdecl: [ | "(" ident LIST0 simple_binder ":" lconstr ")" ] bindings_with_parameters: [ | check_for_coloneq "(" ident LIST0 simple_binder ":=" lconstr ")" ] eliminator: [ | "using" constr_with_bindings ] as_ipat: [ | "as" simple_intropattern | ] or_and_intropattern_loc: [ | or_and_intropattern | identref ] as_or_and_ipat: [ | "as" or_and_intropattern_loc | "as" equality_intropattern | ] eqn_ipat: [ | "eqn" ":" naming_intropattern | ] as_name: [ | "as" ident | ] by_tactic: [ | "by" ltac_expr3 | ] rewriter: [ | "!" constr_with_bindings_arg | [ "?" | LEFTQMARK ] constr_with_bindings_arg | natural "!" constr_with_bindings_arg | natural [ "?" | LEFTQMARK ] constr_with_bindings_arg | natural constr_with_bindings_arg | constr_with_bindings_arg ] oriented_rewriter: [ | orient_rw rewriter ] induction_clause: [ | destruction_arg as_or_and_ipat eqn_ipat opt_clause ] induction_clause_list: [ | LIST1 induction_clause SEP "," OPT eliminator opt_clause ] ring_mod: [ | "decidable" constr (* ring plugin *) | "abstract" (* ring plugin *) | "morphism" constr (* ring plugin *) | "constants" "[" tactic "]" (* ring plugin *) | "closed" "[" LIST1 global "]" (* ring plugin *) | "preprocess" "[" tactic "]" (* ring plugin *) | "postprocess" "[" tactic "]" (* ring plugin *) | "setoid" constr constr (* ring plugin *) | "sign" constr (* ring plugin *) | "power" constr "[" LIST1 global "]" (* ring plugin *) | "power_tac" constr "[" tactic "]" (* ring plugin *) | "div" constr (* ring plugin *) ] ring_mods: [ | "(" LIST1 ring_mod SEP "," ")" (* ring plugin *) ] field_mod: [ | ring_mod (* ring plugin *) | "completeness" constr (* ring plugin *) ] field_mods: [ | "(" LIST1 field_mod SEP "," ")" (* ring plugin *) ] ssrtacarg: [ | ltac_expr5 (* SSR plugin *) ] ssrtac3arg: [ | ltac_expr3 (* SSR plugin *) ] ssrtclarg: [ | ssrtacarg (* SSR plugin *) ] ssrhyp: [ | ident (* SSR plugin *) ] ssrhoi_hyp: [ | ident (* SSR plugin *) ] ssrhoi_id: [ | ident (* SSR plugin *) ] ssrsimpl_ne: [ | "//=" (* SSR plugin *) | "/=" (* SSR plugin *) | test_ssrslashnum11 "/" natural "/" natural "=" (* SSR plugin *) | test_ssrslashnum10 "/" natural "/" (* SSR plugin *) | test_ssrslashnum10 "/" natural "=" (* SSR plugin *) | test_ssrslashnum10 "/" natural "/=" (* SSR plugin *) | test_ssrslashnum10 "/" natural "/" "=" (* SSR plugin *) | test_ssrslashnum01 "//" natural "=" (* SSR plugin *) | test_ssrslashnum00 "//" (* SSR plugin *) ] ssrclear_ne: [ | "{" LIST1 ssrhyp "}" (* SSR plugin *) ] ssrclear: [ | ssrclear_ne (* SSR plugin *) | (* SSR plugin *) ] ssrindex: [ ] ssrocc: [ | natural LIST0 natural (* SSR plugin *) | "-" LIST0 natural (* SSR plugin *) | "+" LIST0 natural (* SSR plugin *) ] ssrmmod: [ | "!" (* SSR plugin *) | LEFTQMARK (* SSR plugin *) | "?" (* SSR plugin *) ] ssrmult_ne: [ | natural ssrmmod (* SSR plugin *) | ssrmmod (* SSR plugin *) ] ssrmult: [ | ssrmult_ne (* SSR plugin *) | (* SSR plugin *) ] ssrdocc: [ | "{" ssrocc "}" (* SSR plugin *) | "{" LIST0 ssrhyp "}" (* SSR plugin *) ] ssrterm: [ | ssrtermkind Pcoq.Constr.constr (* SSR plugin *) ] ast_closure_term: [ | term_annotation constr (* SSR plugin *) ] ast_closure_lterm: [ | term_annotation lconstr (* SSR plugin *) ] ssrbwdview: [ | test_not_ssrslashnum "/" Pcoq.Constr.constr (* SSR plugin *) | test_not_ssrslashnum "/" Pcoq.Constr.constr ssrbwdview (* SSR plugin *) ] ssrfwdview: [ | test_not_ssrslashnum "/" ast_closure_term (* SSR plugin *) | test_not_ssrslashnum "/" ast_closure_term ssrfwdview (* SSR plugin *) ] ident_no_do: [ | test_ident_no_do IDENT (* SSR plugin *) ] ssripat: [ | "_" (* SSR plugin *) | "*" (* SSR plugin *) | ">" (* SSR plugin *) | ident_no_do (* SSR plugin *) | "?" (* SSR plugin *) | "+" (* SSR plugin *) | "++" (* SSR plugin *) | ssrsimpl_ne (* SSR plugin *) | ssrdocc "->" (* SSR plugin *) | ssrdocc "<-" (* SSR plugin *) | ssrdocc (* SSR plugin *) | "->" (* SSR plugin *) | "<-" (* SSR plugin *) | "-" (* SSR plugin *) | "-/" "=" (* SSR plugin *) | "-/=" (* SSR plugin *) | "-/" "/" (* SSR plugin *) | "-//" (* SSR plugin *) | "-/" integer "/" (* SSR plugin *) | "-/" "/=" (* SSR plugin *) | "-//" "=" (* SSR plugin *) | "-//=" (* SSR plugin *) | "-/" integer "/=" (* SSR plugin *) | "-/" integer "/" integer "=" (* SSR plugin *) | ssrfwdview (* SSR plugin *) | "[" ":" LIST0 ident "]" (* SSR plugin *) | "[:" LIST0 ident "]" (* SSR plugin *) | ssrcpat (* SSR plugin *) ] ssripats: [ | ssripat ssripats (* SSR plugin *) | (* SSR plugin *) ] ssriorpat: [ | ssripats "|" ssriorpat (* SSR plugin *) | ssripats "|-" ">" ssriorpat (* SSR plugin *) | ssripats "|-" ssriorpat (* SSR plugin *) | ssripats "|->" ssriorpat (* SSR plugin *) | ssripats "||" ssriorpat (* SSR plugin *) | ssripats "|||" ssriorpat (* SSR plugin *) | ssripats "||||" ssriorpat (* SSR plugin *) | ssripats (* SSR plugin *) ] ssrcpat: [ | test_nohidden "[" hat "]" (* SSR plugin *) | test_nohidden "[" ssriorpat "]" (* SSR plugin *) | test_nohidden "[=" ssriorpat "]" (* SSR plugin *) ] hat: [ | "^" ident (* SSR plugin *) | "^" "~" ident (* SSR plugin *) | "^" "~" natural (* SSR plugin *) | "^~" ident (* SSR plugin *) | "^~" natural (* SSR plugin *) ] ssripats_ne: [ | ssripat ssripats (* SSR plugin *) ] ssrhpats: [ | ssripats (* SSR plugin *) ] ssrhpats_wtransp: [ | ssripats (* SSR plugin *) | ssripats "@" ssripats (* SSR plugin *) ] ssrhpats_nobs: [ | ssripats (* SSR plugin *) ] ssrrpat: [ | "->" (* SSR plugin *) | "<-" (* SSR plugin *) ] ssrintros_ne: [ | "=>" ssripats_ne (* SSR plugin *) ] ssrintros: [ | ssrintros_ne (* SSR plugin *) | (* SSR plugin *) ] ssrintrosarg: [ ] ssrfwdid: [ | test_ssrfwdid Prim.ident (* SSR plugin *) ] ssrortacs: [ | ssrtacarg "|" ssrortacs (* SSR plugin *) | ssrtacarg "|" (* SSR plugin *) | ssrtacarg (* SSR plugin *) | "|" ssrortacs (* SSR plugin *) | "|" (* SSR plugin *) ] ssrhintarg: [ | "[" "]" (* SSR plugin *) | "[" ssrortacs "]" (* SSR plugin *) | ssrtacarg (* SSR plugin *) ] ssrhint3arg: [ | "[" "]" (* SSR plugin *) | "[" ssrortacs "]" (* SSR plugin *) | ssrtac3arg (* SSR plugin *) ] ssrortacarg: [ | "[" ssrortacs "]" (* SSR plugin *) ] ssrhint: [ | (* SSR plugin *) | "by" ssrhintarg (* SSR plugin *) ] ssrwgen: [ | ssrclear_ne (* SSR plugin *) | ssrhoi_hyp (* SSR plugin *) | "@" ssrhoi_hyp (* SSR plugin *) | "(" ssrhoi_id ":=" lcpattern ")" (* SSR plugin *) | "(" ssrhoi_id ")" (* SSR plugin *) | "(@" ssrhoi_id ":=" lcpattern ")" (* SSR plugin *) | "(" "@" ssrhoi_id ":=" lcpattern ")" (* SSR plugin *) ] ssrclausehyps: [ | ssrwgen "," ssrclausehyps (* SSR plugin *) | ssrwgen ssrclausehyps (* SSR plugin *) | ssrwgen (* SSR plugin *) ] ssrclauses: [ | "in" ssrclausehyps "|-" "*" (* SSR plugin *) | "in" ssrclausehyps "|-" (* SSR plugin *) | "in" ssrclausehyps "*" (* SSR plugin *) | "in" ssrclausehyps (* SSR plugin *) | "in" "|-" "*" (* SSR plugin *) | "in" "*" (* SSR plugin *) | "in" "*" "|-" (* SSR plugin *) | (* SSR plugin *) ] ssrfwd: [ | ":=" ast_closure_lterm (* SSR plugin *) | ":" ast_closure_lterm ":=" ast_closure_lterm (* SSR plugin *) ] ssrbvar: [ | ident (* SSR plugin *) | "_" (* SSR plugin *) ] ssrbinder: [ | ssrbvar (* SSR plugin *) | "(" ssrbvar ")" (* SSR plugin *) | "(" ssrbvar ":" lconstr ")" (* SSR plugin *) | "(" ssrbvar LIST1 ssrbvar ":" lconstr ")" (* SSR plugin *) | "(" ssrbvar ":" lconstr ":=" lconstr ")" (* SSR plugin *) | "(" ssrbvar ":=" lconstr ")" (* SSR plugin *) | [ "of" | "&" ] term99 (* SSR plugin *) ] ssrstruct: [ | "{" "struct" ident "}" (* SSR plugin *) | (* SSR plugin *) ] ssrposefwd: [ | LIST0 ssrbinder ssrfwd (* SSR plugin *) ] ssrfixfwd: [ | "fix" ssrbvar LIST0 ssrbinder ssrstruct ssrfwd (* SSR plugin *) ] ssrcofixfwd: [ | "cofix" ssrbvar LIST0 ssrbinder ssrfwd (* SSR plugin *) ] ssrsetfwd: [ | ":" ast_closure_lterm ":=" "{" ssrocc "}" cpattern (* SSR plugin *) | ":" ast_closure_lterm ":=" lcpattern (* SSR plugin *) | ":=" "{" ssrocc "}" cpattern (* SSR plugin *) | ":=" lcpattern (* SSR plugin *) ] ssrhavefwd: [ | ":" ast_closure_lterm ssrhint (* SSR plugin *) | ":" ast_closure_lterm ":=" ast_closure_lterm (* SSR plugin *) | ":" ast_closure_lterm ":=" (* SSR plugin *) | ":=" ast_closure_lterm (* SSR plugin *) ] ssrhavefwdwbinders: [ | ssrhpats_wtransp LIST0 ssrbinder ssrhavefwd (* SSR plugin *) ] ssrdoarg: [ ] ssrseqarg: [ | ssrswap (* SSR plugin *) | ssrseqidx ssrortacarg OPT ssrorelse (* SSR plugin *) | ssrseqidx ssrswap (* SSR plugin *) | ltac_expr3 (* SSR plugin *) ] ssrseqidx: [ | test_ssrseqvar Prim.ident (* SSR plugin *) | Prim.natural (* SSR plugin *) ] ssrswap: [ | "first" (* SSR plugin *) | "last" (* SSR plugin *) ] ssrorelse: [ | "||" ltac_expr2 (* SSR plugin *) ] Prim.ident: [ | IDENT ssr_null_entry (* SSR plugin *) ] ssrparentacarg: [ | "(" ltac_expr5 ")" (* SSR plugin *) ] ssrdotac: [ | ltac_expr3 (* SSR plugin *) | ssrortacarg (* SSR plugin *) ] ssrseqdir: [ ] ssr_first: [ | ssr_first ssrintros_ne (* SSR plugin *) | "[" LIST0 ltac_expr5 SEP "|" "]" (* SSR plugin *) ] ssr_first_else: [ | ssr_first ssrorelse (* SSR plugin *) | ssr_first (* SSR plugin *) ] ssrgen: [ | ssrdocc cpattern (* SSR plugin *) | cpattern (* SSR plugin *) ] ssrdgens_tl: [ | "{" LIST1 ssrhyp "}" cpattern ssrdgens_tl (* SSR plugin *) | "{" LIST1 ssrhyp "}" (* SSR plugin *) | "{" ssrocc "}" cpattern ssrdgens_tl (* SSR plugin *) | "/" ssrdgens_tl (* SSR plugin *) | cpattern ssrdgens_tl (* SSR plugin *) | (* SSR plugin *) ] ssrdgens: [ | ":" ssrgen ssrdgens_tl (* SSR plugin *) ] ssreqid: [ | test_ssreqid ssreqpat (* SSR plugin *) | test_ssreqid (* SSR plugin *) ] ssreqpat: [ | Prim.ident (* SSR plugin *) | "_" (* SSR plugin *) | "?" (* SSR plugin *) | "+" (* SSR plugin *) | ssrdocc "->" (* SSR plugin *) | ssrdocc "<-" (* SSR plugin *) | "->" (* SSR plugin *) | "<-" (* SSR plugin *) ] ssrarg: [ | ssrfwdview ssreqid ssrdgens ssrintros (* SSR plugin *) | ssrfwdview ssrclear ssrintros (* SSR plugin *) | ssreqid ssrdgens ssrintros (* SSR plugin *) | ssrclear_ne ssrintros (* SSR plugin *) | ssrintros_ne (* SSR plugin *) ] ssrmovearg: [ | ssrarg (* SSR plugin *) ] ssrcasearg: [ | ssrarg (* SSR plugin *) ] ssragen: [ | "{" LIST1 ssrhyp "}" ssrterm (* SSR plugin *) | ssrterm (* SSR plugin *) ] ssragens: [ | "{" LIST1 ssrhyp "}" ssrterm ssragens (* SSR plugin *) | "{" LIST1 ssrhyp "}" (* SSR plugin *) | ssrterm ssragens (* SSR plugin *) | (* SSR plugin *) ] ssrapplyarg: [ | ":" ssragen ssragens ssrintros (* SSR plugin *) | ssrclear_ne ssrintros (* SSR plugin *) | ssrintros_ne (* SSR plugin *) | ssrbwdview ":" ssragen ssragens ssrintros (* SSR plugin *) | ssrbwdview ssrclear ssrintros (* SSR plugin *) ] ssrexactarg: [ | ":" ssragen ssragens (* SSR plugin *) | ssrbwdview ssrclear (* SSR plugin *) | ssrclear_ne (* SSR plugin *) ] ssrcongrarg: [ | natural constr ssrdgens (* SSR plugin *) | natural constr (* SSR plugin *) | constr ssrdgens (* SSR plugin *) | constr (* SSR plugin *) ] ssrrwocc: [ | "{" LIST0 ssrhyp "}" (* SSR plugin *) | "{" ssrocc "}" (* SSR plugin *) | (* SSR plugin *) ] ssrrule_ne: [ | test_not_ssrslashnum [ "/" ssrterm | ssrterm | ssrsimpl_ne ] (* SSR plugin *) | ssrsimpl_ne (* SSR plugin *) ] ssrrule: [ | ssrrule_ne (* SSR plugin *) | (* SSR plugin *) ] ssrpattern_squarep: [ | "[" rpattern "]" (* SSR plugin *) | (* SSR plugin *) ] ssrpattern_ne_squarep: [ | "[" rpattern "]" (* SSR plugin *) ] ssrrwarg: [ | "-" ssrmult ssrrwocc ssrpattern_squarep ssrrule_ne (* SSR plugin *) | "-/" ssrterm (* SSR plugin *) | ssrmult_ne ssrrwocc ssrpattern_squarep ssrrule_ne (* SSR plugin *) | "{" LIST1 ssrhyp "}" ssrpattern_ne_squarep ssrrule_ne (* SSR plugin *) | "{" LIST1 ssrhyp "}" ssrrule (* SSR plugin *) | "{" ssrocc "}" ssrpattern_squarep ssrrule_ne (* SSR plugin *) | "{" "}" ssrpattern_squarep ssrrule_ne (* SSR plugin *) | ssrpattern_ne_squarep ssrrule_ne (* SSR plugin *) | ssrrule_ne (* SSR plugin *) ] ssrrwargs: [ | test_ssr_rw_syntax LIST1 ssrrwarg (* SSR plugin *) ] ssrunlockarg: [ | "{" ssrocc "}" ssrterm (* SSR plugin *) | ssrterm (* SSR plugin *) ] ssrunlockargs: [ | LIST0 ssrunlockarg (* SSR plugin *) ] ssrsufffwd: [ | ssrhpats LIST0 ssrbinder ":" ast_closure_lterm ssrhint (* SSR plugin *) ] ssrwlogfwd: [ | ":" LIST0 ssrwgen "/" ast_closure_lterm (* SSR plugin *) ] ssr_idcomma: [ | (* SSR plugin *) | test_idcomma [ IDENT | "_" ] "," (* SSR plugin *) ] ssr_rtype: [ | "return" term100 (* SSR plugin *) ] ssr_mpat: [ | pattern200 (* SSR plugin *) ] ssr_dpat: [ | ssr_mpat "in" pattern200 ssr_rtype (* SSR plugin *) | ssr_mpat ssr_rtype (* SSR plugin *) | ssr_mpat (* SSR plugin *) ] ssr_dthen: [ | ssr_dpat "then" lconstr (* SSR plugin *) ] ssr_elsepat: [ | "else" (* SSR plugin *) ] ssr_else: [ | ssr_elsepat lconstr (* SSR plugin *) ] ssrhintref: [ | constr (* SSR plugin *) | constr "|" natural (* SSR plugin *) ] ssrviewpos: [ | "for" "move" "/" (* SSR plugin *) | "for" "apply" "/" (* SSR plugin *) | "for" "apply" "/" "/" (* SSR plugin *) | "for" "apply" "//" (* SSR plugin *) | (* SSR plugin *) ] ssrviewposspc: [ | ssrviewpos (* SSR plugin *) ] rpattern: [ | lconstr (* SSR plugin *) | "in" lconstr (* SSR plugin *) | lconstr "in" lconstr (* SSR plugin *) | "in" lconstr "in" lconstr (* SSR plugin *) | lconstr "in" lconstr "in" lconstr (* SSR plugin *) | lconstr "as" lconstr "in" lconstr (* SSR plugin *) ] G_SSRMATCHING_cpattern: [ | "Qed" constr (* SSR plugin *) | ssrtermkind constr (* SSR plugin *) ] lcpattern: [ | "Qed" lconstr (* SSR plugin *) | ssrtermkind lconstr (* SSR plugin *) ] ssrpatternarg: [ | rpattern (* SSR plugin *) ] deprecated_number_modifier: [ | | "(" "warning" "after" bignat ")" | "(" "abstract" "after" bignat ")" ] number_string_mapping: [ | reference "=>" reference | "[" reference "]" "=>" reference ] number_string_via: [ | "via" reference "mapping" "[" LIST1 number_string_mapping SEP "," "]" ] number_modifier: [ | "warning" "after" bignat | "abstract" "after" bignat | number_string_via ] number_options: [ | "(" LIST1 number_modifier SEP "," ")" ] string_option: [ | "(" number_string_via ")" ] tac2pat1: [ | Prim.qualid LIST1 tac2pat0 (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) | "[" "]" (* ltac2 plugin *) | tac2pat0 "::" tac2pat0 (* ltac2 plugin *) | tac2pat0 (* ltac2 plugin *) ] tac2pat0: [ | "_" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) | "(" atomic_tac2pat ")" (* ltac2 plugin *) ] atomic_tac2pat: [ | (* ltac2 plugin *) | tac2pat1 ":" ltac2_type5 (* ltac2 plugin *) | tac2pat1 "," LIST0 tac2pat1 SEP "," (* ltac2 plugin *) | tac2pat1 (* ltac2 plugin *) ] ltac2_expr6: [ | ltac2_expr5 ";" ltac2_expr6 (* ltac2 plugin *) | ltac2_expr5 (* ltac2 plugin *) ] ltac2_expr5: [ | "fun" LIST1 G_LTAC2_input_fun type_cast "=>" ltac2_expr6 (* ltac2 plugin *) | "let" rec_flag LIST1 G_LTAC2_let_clause SEP "with" "in" ltac2_expr6 (* ltac2 plugin *) | "match" ltac2_expr5 "with" G_LTAC2_branches "end" (* ltac2 plugin *) | "if" ltac2_expr5 "then" ltac2_expr5 "else" ltac2_expr5 (* ltac2 plugin *) | ltac2_expr4 (* ltac2 plugin *) ] ltac2_expr4: [ | ltac2_expr3 (* ltac2 plugin *) ] ltac2_expr3: [ | ltac2_expr2 "," LIST1 ltac2_expr2 SEP "," (* ltac2 plugin *) | ltac2_expr2 (* ltac2 plugin *) ] ltac2_expr2: [ | ltac2_expr1 "::" ltac2_expr2 (* ltac2 plugin *) | ltac2_expr1 (* ltac2 plugin *) ] ltac2_expr1: [ | ltac2_expr0 LIST1 ltac2_expr0 (* ltac2 plugin *) | ltac2_expr0 ".(" Prim.qualid ")" (* ltac2 plugin *) | ltac2_expr0 ".(" Prim.qualid ")" ":=" ltac2_expr5 (* ltac2 plugin *) | ltac2_expr0 (* ltac2 plugin *) ] ltac2_expr0: [ | "(" ltac2_expr6 ")" (* ltac2 plugin *) | "(" ltac2_expr6 ":" ltac2_type5 ")" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | "(" ")" (* ltac2 plugin *) | "[" LIST0 ltac2_expr5 SEP ";" "]" (* ltac2 plugin *) | "{" tac2rec_fieldexprs "}" (* ltac2 plugin *) | G_LTAC2_tactic_atom (* ltac2 plugin *) ] G_LTAC2_branches: [ | (* ltac2 plugin *) | "|" LIST1 branch SEP "|" (* ltac2 plugin *) | LIST1 branch SEP "|" (* ltac2 plugin *) ] branch: [ | tac2pat1 "=>" ltac2_expr6 (* ltac2 plugin *) ] rec_flag: [ | "rec" (* ltac2 plugin *) | (* ltac2 plugin *) ] mut_flag: [ | "mutable" (* ltac2 plugin *) | (* ltac2 plugin *) ] ltac2_typevar: [ | "'" Prim.ident (* ltac2 plugin *) ] G_LTAC2_tactic_atom: [ | Prim.integer (* ltac2 plugin *) | Prim.string (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) | "@" Prim.ident (* ltac2 plugin *) | "&" lident (* ltac2 plugin *) | "'" Constr.constr (* ltac2 plugin *) | "constr" ":" "(" Constr.lconstr ")" (* ltac2 plugin *) | "open_constr" ":" "(" Constr.lconstr ")" (* ltac2 plugin *) | "ident" ":" "(" lident ")" (* ltac2 plugin *) | "pat" ":" "(" Constr.cpattern ")" (* ltac2 plugin *) | "reference" ":" "(" globref ")" (* ltac2 plugin *) | "ltac1" ":" "(" ltac1_expr_in_env ")" (* ltac2 plugin *) | "ltac1val" ":" "(" ltac1_expr_in_env ")" (* ltac2 plugin *) ] ltac1_expr_in_env: [ | test_ltac1_env LIST0 locident "|-" ltac_expr5 (* ltac2 plugin *) | ltac_expr5 (* ltac2 plugin *) ] tac2expr_in_env: [ | test_ltac1_env LIST0 locident "|-" ltac2_expr6 (* ltac2 plugin *) | ltac2_expr6 (* ltac2 plugin *) ] type_cast: [ | (* ltac2 plugin *) | ":" ltac2_type5 (* ltac2 plugin *) ] G_LTAC2_let_clause: [ | let_binder type_cast ":=" ltac2_expr6 (* ltac2 plugin *) ] let_binder: [ | LIST1 G_LTAC2_input_fun (* ltac2 plugin *) ] ltac2_type5: [ | ltac2_type2 "->" ltac2_type5 (* ltac2 plugin *) | ltac2_type2 (* ltac2 plugin *) ] ltac2_type2: [ | ltac2_type1 "*" LIST1 ltac2_type1 SEP "*" (* ltac2 plugin *) | ltac2_type1 (* ltac2 plugin *) ] ltac2_type1: [ | ltac2_type0 Prim.qualid (* ltac2 plugin *) | ltac2_type0 (* ltac2 plugin *) ] ltac2_type0: [ | "(" LIST1 ltac2_type5 SEP "," ")" OPT Prim.qualid (* ltac2 plugin *) | ltac2_typevar (* ltac2 plugin *) | "_" (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) ] locident: [ | Prim.ident (* ltac2 plugin *) ] G_LTAC2_binder: [ | "_" (* ltac2 plugin *) | Prim.ident (* ltac2 plugin *) ] G_LTAC2_input_fun: [ | tac2pat0 (* ltac2 plugin *) ] tac2def_body: [ | G_LTAC2_binder LIST0 G_LTAC2_input_fun type_cast ":=" ltac2_expr6 (* ltac2 plugin *) ] tac2def_val: [ | mut_flag rec_flag LIST1 tac2def_body SEP "with" (* ltac2 plugin *) ] tac2def_mut: [ | "Set" Prim.qualid OPT [ "as" locident ] ":=" ltac2_expr6 (* ltac2 plugin *) ] tac2typ_knd: [ | ltac2_type5 (* ltac2 plugin *) | "[" ".." "]" (* ltac2 plugin *) | "[" tac2alg_constructors "]" (* ltac2 plugin *) | "{" tac2rec_fields "}" (* ltac2 plugin *) ] tac2alg_constructors: [ | "|" LIST1 tac2alg_constructor SEP "|" (* ltac2 plugin *) | LIST0 tac2alg_constructor SEP "|" (* ltac2 plugin *) ] tac2alg_constructor: [ | Prim.ident (* ltac2 plugin *) | Prim.ident "(" LIST0 ltac2_type5 SEP "," ")" (* ltac2 plugin *) ] tac2rec_fields: [ | tac2rec_field ";" tac2rec_fields (* ltac2 plugin *) | tac2rec_field ";" (* ltac2 plugin *) | tac2rec_field (* ltac2 plugin *) | (* ltac2 plugin *) ] tac2rec_field: [ | mut_flag Prim.ident ":" ltac2_type5 (* ltac2 plugin *) ] tac2rec_fieldexprs: [ | tac2rec_fieldexpr ";" tac2rec_fieldexprs (* ltac2 plugin *) | tac2rec_fieldexpr ";" (* ltac2 plugin *) | tac2rec_fieldexpr (* ltac2 plugin *) | (* ltac2 plugin *) ] tac2rec_fieldexpr: [ | Prim.qualid ":=" ltac2_expr1 (* ltac2 plugin *) ] tac2typ_prm: [ | (* ltac2 plugin *) | ltac2_typevar (* ltac2 plugin *) | "(" LIST1 ltac2_typevar SEP "," ")" (* ltac2 plugin *) ] tac2typ_def: [ | tac2typ_prm Prim.qualid tac2type_body (* ltac2 plugin *) ] tac2type_body: [ | (* ltac2 plugin *) | ":=" tac2typ_knd (* ltac2 plugin *) | "::=" tac2typ_knd (* ltac2 plugin *) ] tac2def_typ: [ | "Type" rec_flag LIST1 tac2typ_def SEP "with" (* ltac2 plugin *) ] tac2def_ext: [ | "@" "external" locident ":" ltac2_type5 ":=" Prim.string Prim.string (* ltac2 plugin *) ] syn_node: [ | "_" (* ltac2 plugin *) | Prim.ident (* ltac2 plugin *) ] ltac2_scope: [ | Prim.string (* ltac2 plugin *) | Prim.integer (* ltac2 plugin *) | syn_node (* ltac2 plugin *) | syn_node "(" LIST1 ltac2_scope SEP "," ")" (* ltac2 plugin *) ] syn_level: [ | (* ltac2 plugin *) | ":" Prim.natural (* ltac2 plugin *) ] tac2def_syn: [ | "Notation" LIST1 ltac2_scope syn_level ":=" ltac2_expr6 (* ltac2 plugin *) ] lident: [ | Prim.ident (* ltac2 plugin *) ] globref: [ | "&" Prim.ident (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) ] anti: [ | "$" Prim.ident (* ltac2 plugin *) ] ident_or_anti: [ | lident (* ltac2 plugin *) | "$" Prim.ident (* ltac2 plugin *) ] lnatural: [ | Prim.natural (* ltac2 plugin *) ] q_ident: [ | ident_or_anti (* ltac2 plugin *) ] qhyp: [ | anti (* ltac2 plugin *) | lnatural (* ltac2 plugin *) | lident (* ltac2 plugin *) ] G_LTAC2_simple_binding: [ | "(" qhyp ":=" Constr.lconstr ")" (* ltac2 plugin *) ] G_LTAC2_bindings: [ | test_lpar_idnum_coloneq LIST1 G_LTAC2_simple_binding (* ltac2 plugin *) | LIST1 Constr.constr (* ltac2 plugin *) ] q_bindings: [ | G_LTAC2_bindings (* ltac2 plugin *) ] q_with_bindings: [ | G_LTAC2_with_bindings (* ltac2 plugin *) ] G_LTAC2_intropatterns: [ | LIST0 nonsimple_intropattern (* ltac2 plugin *) ] G_LTAC2_or_and_intropattern: [ | "[" LIST1 G_LTAC2_intropatterns SEP "|" "]" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | "(" G_LTAC2_simple_intropattern ")" (* ltac2 plugin *) | "(" G_LTAC2_simple_intropattern "," LIST1 G_LTAC2_simple_intropattern SEP "," ")" (* ltac2 plugin *) | "(" G_LTAC2_simple_intropattern "&" LIST1 G_LTAC2_simple_intropattern SEP "&" ")" (* ltac2 plugin *) ] G_LTAC2_equality_intropattern: [ | "->" (* ltac2 plugin *) | "<-" (* ltac2 plugin *) | "[=" G_LTAC2_intropatterns "]" (* ltac2 plugin *) ] G_LTAC2_naming_intropattern: [ | LEFTQMARK lident (* ltac2 plugin *) | "?$" lident (* ltac2 plugin *) | "?" (* ltac2 plugin *) | ident_or_anti (* ltac2 plugin *) ] nonsimple_intropattern: [ | G_LTAC2_simple_intropattern (* ltac2 plugin *) | "*" (* ltac2 plugin *) | "**" (* ltac2 plugin *) ] G_LTAC2_simple_intropattern: [ | G_LTAC2_simple_intropattern_closed (* ltac2 plugin *) ] G_LTAC2_simple_intropattern_closed: [ | G_LTAC2_or_and_intropattern (* ltac2 plugin *) | G_LTAC2_equality_intropattern (* ltac2 plugin *) | "_" (* ltac2 plugin *) | G_LTAC2_naming_intropattern (* ltac2 plugin *) ] q_intropatterns: [ | G_LTAC2_intropatterns (* ltac2 plugin *) ] q_intropattern: [ | G_LTAC2_simple_intropattern (* ltac2 plugin *) ] nat_or_anti: [ | lnatural (* ltac2 plugin *) | "$" Prim.ident (* ltac2 plugin *) ] G_LTAC2_eqn_ipat: [ | "eqn" ":" G_LTAC2_naming_intropattern (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_with_bindings: [ | "with" G_LTAC2_bindings (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_constr_with_bindings: [ | Constr.constr G_LTAC2_with_bindings (* ltac2 plugin *) ] G_LTAC2_destruction_arg: [ | lnatural (* ltac2 plugin *) | lident (* ltac2 plugin *) | G_LTAC2_constr_with_bindings (* ltac2 plugin *) ] q_destruction_arg: [ | G_LTAC2_destruction_arg (* ltac2 plugin *) ] G_LTAC2_as_or_and_ipat: [ | "as" G_LTAC2_or_and_intropattern (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_occs_nums: [ | LIST1 nat_or_anti (* ltac2 plugin *) | "-" nat_or_anti LIST0 nat_or_anti (* ltac2 plugin *) ] G_LTAC2_occs: [ | "at" G_LTAC2_occs_nums (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_hypident: [ | ident_or_anti (* ltac2 plugin *) | "(" "type" "of" ident_or_anti ")" (* ltac2 plugin *) | "(" "value" "of" ident_or_anti ")" (* ltac2 plugin *) ] G_LTAC2_hypident_occ: [ | G_LTAC2_hypident G_LTAC2_occs (* ltac2 plugin *) ] G_LTAC2_in_clause: [ | "*" G_LTAC2_occs (* ltac2 plugin *) | "*" "|-" G_LTAC2_concl_occ (* ltac2 plugin *) | LIST0 G_LTAC2_hypident_occ SEP "," "|-" G_LTAC2_concl_occ (* ltac2 plugin *) | LIST0 G_LTAC2_hypident_occ SEP "," (* ltac2 plugin *) ] clause: [ | "in" G_LTAC2_in_clause (* ltac2 plugin *) | "at" G_LTAC2_occs_nums (* ltac2 plugin *) ] q_clause: [ | clause (* ltac2 plugin *) ] G_LTAC2_concl_occ: [ | "*" G_LTAC2_occs (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_induction_clause: [ | G_LTAC2_destruction_arg G_LTAC2_as_or_and_ipat G_LTAC2_eqn_ipat OPT clause (* ltac2 plugin *) ] q_induction_clause: [ | G_LTAC2_induction_clause (* ltac2 plugin *) ] G_LTAC2_conversion: [ | Constr.constr (* ltac2 plugin *) | Constr.constr "with" Constr.constr (* ltac2 plugin *) ] q_conversion: [ | G_LTAC2_conversion (* ltac2 plugin *) ] ltac2_orient: [ | "->" (* ltac2 plugin *) | "<-" (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_rewriter: [ | "!" G_LTAC2_constr_with_bindings (* ltac2 plugin *) | [ "?" | LEFTQMARK ] G_LTAC2_constr_with_bindings (* ltac2 plugin *) | lnatural "!" G_LTAC2_constr_with_bindings (* ltac2 plugin *) | lnatural [ "?" | LEFTQMARK ] G_LTAC2_constr_with_bindings (* ltac2 plugin *) | lnatural G_LTAC2_constr_with_bindings (* ltac2 plugin *) | G_LTAC2_constr_with_bindings (* ltac2 plugin *) ] G_LTAC2_oriented_rewriter: [ | ltac2_orient G_LTAC2_rewriter (* ltac2 plugin *) ] q_rewriting: [ | G_LTAC2_oriented_rewriter (* ltac2 plugin *) ] G_LTAC2_tactic_then_last: [ | "|" LIST0 ( OPT ltac2_expr6 ) SEP "|" (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_for_each_goal: [ | ltac2_expr6 "|" G_LTAC2_for_each_goal (* ltac2 plugin *) | ltac2_expr6 ".." G_LTAC2_tactic_then_last (* ltac2 plugin *) | ".." G_LTAC2_tactic_then_last (* ltac2 plugin *) | ltac2_expr6 (* ltac2 plugin *) | "|" G_LTAC2_for_each_goal (* ltac2 plugin *) | (* ltac2 plugin *) ] q_dispatch: [ | G_LTAC2_for_each_goal (* ltac2 plugin *) ] q_occurrences: [ | G_LTAC2_occs (* ltac2 plugin *) ] ltac2_red_flag: [ | "beta" (* ltac2 plugin *) | "iota" (* ltac2 plugin *) | "match" (* ltac2 plugin *) | "fix" (* ltac2 plugin *) | "cofix" (* ltac2 plugin *) | "zeta" (* ltac2 plugin *) | "delta" G_LTAC2_delta_flag (* ltac2 plugin *) ] refglobal: [ | "&" Prim.ident (* ltac2 plugin *) | Prim.qualid (* ltac2 plugin *) | "$" Prim.ident (* ltac2 plugin *) ] q_reference: [ | refglobal (* ltac2 plugin *) ] refglobals: [ | LIST1 refglobal (* ltac2 plugin *) ] G_LTAC2_delta_flag: [ | "-" "[" refglobals "]" (* ltac2 plugin *) | "[" refglobals "]" (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_strategy_flag: [ | LIST1 ltac2_red_flag (* ltac2 plugin *) | G_LTAC2_delta_flag (* ltac2 plugin *) ] q_strategy_flag: [ | G_LTAC2_strategy_flag (* ltac2 plugin *) ] hintdb: [ | "*" (* ltac2 plugin *) | LIST1 ident_or_anti (* ltac2 plugin *) ] q_hintdb: [ | hintdb (* ltac2 plugin *) ] G_LTAC2_match_pattern: [ | "context" OPT Prim.ident "[" Constr.cpattern "]" (* ltac2 plugin *) | Constr.cpattern (* ltac2 plugin *) ] G_LTAC2_match_rule: [ | G_LTAC2_match_pattern "=>" ltac2_expr6 (* ltac2 plugin *) ] G_LTAC2_match_list: [ | LIST1 G_LTAC2_match_rule SEP "|" (* ltac2 plugin *) | "|" LIST1 G_LTAC2_match_rule SEP "|" (* ltac2 plugin *) ] q_constr_matching: [ | G_LTAC2_match_list (* ltac2 plugin *) ] gmatch_hyp_pattern: [ | Prim.name ":" G_LTAC2_match_pattern (* ltac2 plugin *) ] gmatch_pattern: [ | "[" LIST0 gmatch_hyp_pattern SEP "," "|-" G_LTAC2_match_pattern "]" (* ltac2 plugin *) ] gmatch_rule: [ | gmatch_pattern "=>" ltac2_expr6 (* ltac2 plugin *) ] goal_match_list: [ | LIST1 gmatch_rule SEP "|" (* ltac2 plugin *) | "|" LIST1 gmatch_rule SEP "|" (* ltac2 plugin *) ] q_goal_matching: [ | goal_match_list (* ltac2 plugin *) ] move_location: [ | "at" "top" (* ltac2 plugin *) | "at" "bottom" (* ltac2 plugin *) | "after" ident_or_anti (* ltac2 plugin *) | "before" ident_or_anti (* ltac2 plugin *) ] q_move_location: [ | move_location (* ltac2 plugin *) ] G_LTAC2_as_name: [ | (* ltac2 plugin *) | "as" ident_or_anti (* ltac2 plugin *) ] pose: [ | test_lpar_id_coloneq "(" ident_or_anti ":=" Constr.lconstr ")" (* ltac2 plugin *) | Constr.constr G_LTAC2_as_name (* ltac2 plugin *) ] q_pose: [ | pose (* ltac2 plugin *) ] G_LTAC2_as_ipat: [ | "as" G_LTAC2_simple_intropattern (* ltac2 plugin *) | (* ltac2 plugin *) ] G_LTAC2_by_tactic: [ | "by" ltac2_expr6 (* ltac2 plugin *) | (* ltac2 plugin *) ] assertion: [ | test_lpar_id_coloneq "(" ident_or_anti ":=" Constr.lconstr ")" (* ltac2 plugin *) | test_lpar_id_colon "(" ident_or_anti ":" Constr.lconstr ")" G_LTAC2_by_tactic (* ltac2 plugin *) | Constr.constr G_LTAC2_as_ipat G_LTAC2_by_tactic (* ltac2 plugin *) ] q_assert: [ | assertion (* ltac2 plugin *) ] ltac2_entry: [ | tac2def_val (* ltac2 plugin *) | tac2def_typ (* ltac2 plugin *) | tac2def_ext (* ltac2 plugin *) | tac2def_syn (* ltac2 plugin *) | tac2def_mut (* ltac2 plugin *) ] ltac2_expr: [ | _ltac2_expr (* ltac2 plugin *) ] tac2mode: [ | ltac2_expr6 ltac_use_default (* ltac2 plugin *) | G_vernac.query_command (* ltac2 plugin *) ] coq-8.15.0/doc/tools/docgram/orderedGrammar000066400000000000000000002031441417001151100205610ustar00rootroot00000000000000(* Defines the order to apply to editedGrammar to get the final grammar for the doc. doc_grammar will modify this file to add/remove nonterminals and productions to match editedGrammar, which will remove comments. Not compiled into Coq *) DOC_GRAMMAR term: [ | term_forall_or_fun | term_let | term_if | term_fix | term_cofix | term100 ] term100: [ | term_cast | term10 ] term10: [ | term_application | one_term ] one_term: [ | term_explicit | term1 ] term1: [ | term_projection | term_scope | term0 ] term0: [ | qualid_annotated | sort | primitive_notations | term_evar | term_match | term_record | term_generalizing | "[|" LIST0 term SEP ";" "|" term OPT ( ":" type ) "|]" OPT univ_annot | term_ltac | "(" term ")" ] qualid_annotated: [ | qualid OPT univ_annot ] term_ltac: [ | "ltac" ":" "(" ltac_expr ")" ] term_projection: [ | term0 ".(" qualid OPT univ_annot LIST0 arg ")" | term0 ".(" "@" qualid OPT univ_annot LIST0 ( term1 ) ")" ] term_scope: [ | term0 "%" scope_key ] term_evar: [ | "_" | "?[" ident "]" | "?[" "?" ident "]" | "?" ident OPT ( "@{" LIST1 ( ident ":=" term ) SEP ";" "}" ) ] dangling_pattern_extension_rule: [ | "@" "?" ident LIST1 ident ] term_application: [ | term1 LIST1 arg | "@" qualid_annotated LIST1 term1 ] arg: [ | "(" ident ":=" term ")" | "(" natural ":=" term ")" | term1 ] term_explicit: [ | "@" qualid_annotated ] primitive_notations: [ | number | string ] assumption_token: [ | [ "Axiom" | "Axioms" ] | [ "Conjecture" | "Conjectures" ] | [ "Parameter" | "Parameters" ] | [ "Hypothesis" | "Hypotheses" ] | [ "Variable" | "Variables" ] ] assumpt: [ | LIST1 ident_decl of_type ] ident_decl: [ | ident OPT univ_decl ] of_type: [ | [ ":" | ":>" ] type ] qualid: [ | ident LIST0 field_ident ] field_ident: [ | "." ident ] type: [ | term ] number: [ | OPT "-" decnat OPT ( "." LIST1 [ digit | "_" ] ) OPT ( [ "e" | "E" ] OPT [ "+" | "-" ] decnat ) | OPT "-" hexnat OPT ( "." LIST1 [ hexdigit | "_" ] ) OPT ( [ "p" | "P" ] OPT [ "+" | "-" ] decnat ) ] integer: [ | OPT "-" natural ] natural: [ | bignat ] bigint: [ | OPT "-" bignat ] bignat: [ | [ decnat | hexnat ] ] decnat: [ | digit LIST0 [ digit | "_" ] ] digit: [ | "0" ".." "9" ] hexnat: [ | [ "0x" | "0X" ] hexdigit LIST0 [ hexdigit | "_" ] ] hexdigit: [ | [ "0" ".." "9" | "a" ".." "f" | "A" ".." "F" ] ] ident: [ | first_letter LIST0 subsequent_letter ] first_letter: [ | [ "a" ".." "z" | "A" ".." "Z" | "_" | unicode_letter ] ] subsequent_letter: [ | [ first_letter | digit | "'" | unicode_id_part ] ] ssrarg: [ | OPT ssrfwdview OPT ssreqpat ssrdgens OPT ssrintros | ssrfwdview OPT ssrclear OPT ssrintros (* SSR plugin *) | ssrclear OPT ssrintros (* SSR plugin *) | ssrintros (* SSR plugin *) ] ssreqpat: [ | ident (* SSR plugin *) | "_" (* SSR plugin *) | "?" (* SSR plugin *) | "+" (* SSR plugin *) | ssrdocc "->" (* SSR plugin *) | ssrdocc "<-" (* SSR plugin *) | "->" (* SSR plugin *) | "<-" (* SSR plugin *) ] ssrapplyarg: [ | ssrclear OPT ssrintros (* SSR plugin *) | ssrintros (* SSR plugin *) | OPT ssrbwdview ":" ssragen OPT ssragens OPT ssrintros (* SSR plugin *) | ssrbwdview OPT ssrclear OPT ssrintros (* SSR plugin *) ] ssragen: [ | OPT ( "{" LIST1 ident "}" ) term (* SSR plugin *) ] ssragens: [ | "{" LIST1 ident "}" term OPT ssragens (* SSR plugin *) | "{" LIST1 ident "}" (* SSR plugin *) | term OPT ssragens (* SSR plugin *) ] ssrintros: [ | "=>" ssripats (* SSR plugin *) ] ssrbwdview: [ | "/" term (* SSR plugin *) | "/" term ssrbwdview (* SSR plugin *) ] ssrdgens: [ | ":" ssrgen OPT ( "/" ssrgen ) (* SSR plugin *) ] ssrgen: [ | cpattern LIST0 [ LIST1 ident | cpattern ] (* SSR plugin *) ] rewrite_item: [ | "-" OPT mult OPT occ_or_clear OPT ssrpattern_squarep r_item (* SSR plugin *) | mult OPT occ_or_clear OPT ssrpattern_squarep r_item (* SSR plugin *) | "-/" term (* SSR plugin *) | OPT ( OPT ( "{" LIST1 ident "}" ) ssrpattern_squarep ) r_item (* SSR plugin *) | "{" LIST1 ident "}" OPT r_item (* SSR plugin *) | "{" OPT ssr_occurrences "}" OPT ssrpattern_squarep r_item (* SSR plugin *) ] occ_or_clear: [ | clear_switch | "{" ssr_occurrences "}" (* SSR plugin *) ] clear_switch: [ | "{" LIST0 ident "}" ] ssr_occurrences: [ | [ natural | "+" | "-" ] LIST0 natural (* SSR plugin *) ] r_item: [ | [ OPT "/" term | s_item ] (* SSR plugin *) ] ssrpattern_squarep: [ | "[" rewrite_pattern "]" (* SSR plugin *) ] rewrite_pattern: [ | OPT ( OPT ( OPT ( OPT term "in" ) term ) "in" ) term (* SSR plugin *) | term "as" term "in" term (* SSR plugin *) ] ssr_in: [ | "in" ssrclausehyps OPT "|-" OPT "*" (* SSR plugin *) | "in" [ "*" | "*" "|-" | "|-" "*" ] (* SSR plugin *) ] ssrclausehyps: [ | gen_item LIST0 ( OPT "," gen_item ) (* SSR plugin *) ] gen_item: [ | ssrclear (* SSR plugin *) | OPT "@" ident (* SSR plugin *) | "(" ident OPT ( ":=" lcpattern ) ")" (* SSR plugin *) | "(@" ident ":=" lcpattern ")" (* SSR plugin *) ] ssrclear: [ | "{" LIST1 ident "}" (* SSR plugin *) ] lcpattern: [ | term ] ssrsufffwd: [ | OPT ssripats LIST0 ssrbinder ":" term OPT ( "by" ssrhintarg ) (* SSR plugin *) ] ssrviewpos: [ | "for" "move" "/" (* SSR plugin *) | "for" "apply" "/" (* SSR plugin *) | "for" "apply" "//" (* SSR plugin *) ] ssr_one_term_pattern: [ | one_term (* SSR plugin *) ] where: [ | "at" "top" | "at" "bottom" | "after" ident | "before" ident ] add_zify: [ | [ "InjTyp" | "BinOp" | "UnOp" | "CstOp" | "BinRel" | "UnOpSpec" | "BinOpSpec" ] (* Micromega plugin *) | [ "PropOp" | "PropBinOp" | "PropUOp" | "Saturate" ] (* Micromega plugin *) ] show_zify: [ | [ "InjTyp" | "BinOp" | "UnOp" | "CstOp" | "BinRel" | "UnOpSpec" | "BinOpSpec" | "Spec" ] (* Micromega plugin *) ] REACHABLE: [ | command | simple_tactic | NOTINRSTS ] NOTINRSTS: [ | simple_tactic | REACHABLE | NOTINRSTS | l1_tactic | l3_tactic | l2_tactic | binder_tactic | value_tactic | ltac2_entry | q_intropatterns | q_intropattern | q_ident | q_destruction_arg | q_with_bindings | q_bindings | q_reductions | q_reference | q_clause | q_occurrences | q_induction_clause | q_conversion | q_rewriting | q_dispatch | q_hintdb | q_move_location | q_pose | q_assert | q_constr_matching | q_goal_matching ] document: [ | LIST0 sentence ] nonterminal: [ ] sentence: [ | OPT attributes command "." | OPT attributes OPT ( natural ":" ) query_command "." | OPT attributes OPT ( toplevel_selector ":" ) ltac_expr [ "." | "..." ] | control_command ] control_command: [ ] query_command: [ ] attributes: [ | LIST0 ( "#[" LIST0 attribute SEP "," "]" ) LIST0 legacy_attr ] attribute: [ | ident OPT attr_value ] attr_value: [ | "=" string | "=" ident | "(" LIST0 attribute SEP "," ")" ] legacy_attr: [ | [ "Local" | "Global" ] | [ "Polymorphic" | "Monomorphic" ] | [ "Cumulative" | "NonCumulative" ] | "Private" | "Program" ] sort: [ | "Set" | "Prop" | "SProp" | "Type" | "Type" "@{" "_" "}" | "Type" "@{" universe "}" ] universe: [ | "max" "(" LIST1 universe_expr SEP "," ")" | universe_expr ] universe_expr: [ | universe_name OPT ( "+" natural ) ] universe_name: [ | qualid | "Set" | "Prop" ] univ_annot: [ | "@{" LIST0 universe_level "}" ] universe_level: [ | "Set" | "Prop" | "Type" | "_" | qualid ] univ_decl: [ | "@{" LIST0 ident OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" ] cumul_univ_decl: [ | "@{" LIST0 ( OPT [ "+" | "=" | "*" ] ident ) OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" ] univ_constraint: [ | universe_name [ "<" | "=" | "<=" ] universe_name ] term_fix: [ | "let" "fix" fix_decl "in" term | "fix" fix_decl OPT ( LIST1 ( "with" fix_decl ) "for" ident ) ] fix_decl: [ | ident LIST0 binder OPT fixannot OPT ( ":" type ) ":=" term ] fixannot: [ | "{" "struct" ident "}" | "{" "wf" one_term ident "}" | "{" "measure" one_term OPT ident OPT one_term "}" ] term_cofix: [ | "let" "cofix" cofix_body "in" term | "cofix" cofix_body OPT ( LIST1 ( "with" cofix_body ) "for" ident ) ] cofix_body: [ | ident LIST0 binder OPT ( ":" type ) ":=" term ] term_if: [ | "if" term OPT [ OPT [ "as" name ] "return" term100 ] "then" term "else" term ] ssr_dpat: [ | pattern OPT ( OPT ( "in" pattern ) "return" term100 ) (* SSR plugin *) ] term_let: [ | "let" name OPT ( ":" type ) ":=" term "in" term | "let" name LIST1 binder OPT ( ":" type ) ":=" term "in" term | destructuring_let ] destructuring_let: [ | "let" "(" LIST0 name SEP "," ")" OPT [ OPT [ "as" name ] "return" term100 ] ":=" term "in" term | "let" "'" pattern ":=" term OPT ( "return" term100 ) "in" term | "let" "'" pattern "in" pattern ":=" term "return" term100 "in" term ] term_forall_or_fun: [ | "forall" open_binders "," type | "fun" open_binders "=>" term ] open_binders: [ | LIST1 name ":" type | LIST1 binder ] name: [ | "_" | ident ] binder: [ | name | "(" LIST1 name ":" type ")" | "(" name OPT ( ":" type ) ":=" term ")" | implicit_binders | generalizing_binder | "(" name ":" type "|" term ")" | "'" pattern0 ] one_open_binder: [ | name | name ":" term | one_closed_binder ] one_closed_binder: [ | "(" name ":" term ")" | "{" name "}" | "{" name ":" term "}" | "[" name "]" | "[" name ":" term "]" | "'" pattern0 ] implicit_binders: [ | "{" LIST1 name OPT ( ":" type ) "}" | "[" LIST1 name OPT ( ":" type ) "]" ] generalizing_binder: [ | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" | "`[" LIST1 typeclass_constraint SEP "," "]" ] typeclass_constraint: [ | OPT "!" term | "{" name "}" ":" OPT "!" term | name ":" OPT "!" term ] term_generalizing: [ | "`{" term "}" | "`(" term ")" ] term_cast: [ | term10 ":" type | term10 "<:" type | term10 "<<:" type ] term_match: [ | "match" LIST1 case_item SEP "," OPT ( "return" term100 ) "with" OPT "|" LIST0 eqn SEP "|" "end" ] case_item: [ | term100 OPT ( "as" name ) OPT [ "in" pattern ] ] eqn: [ | LIST1 [ LIST1 pattern SEP "," ] SEP "|" "=>" term ] pattern: [ | pattern10 ":" term | pattern10 ] pattern10: [ | pattern1 "as" name | pattern1 LIST0 pattern1 | "@" qualid LIST0 pattern1 ] pattern1: [ | pattern0 "%" scope_key | pattern0 ] pattern0: [ | qualid | "{|" LIST0 ( qualid ":=" pattern ) "|}" | "_" | "(" LIST1 pattern SEP "|" ")" | number | string ] vernac_aux: [ | command "." | tactic_mode "." | subprf ] subprf: [ | "{" ] fix_definition: [ | ident_decl LIST0 binder OPT fixannot OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations ] thm_token: [ | "Theorem" | "Lemma" | "Fact" | "Remark" | "Corollary" | "Proposition" | "Property" ] def_body: [ | LIST0 binder OPT ( ":" type ) ":=" OPT reduce term | LIST0 binder ":" type ] reduce: [ | "Eval" red_expr "in" ] red_expr: [ | "lazy" OPT reductions | "cbv" OPT reductions | "compute" OPT delta_reductions | "vm_compute" OPT [ reference_occs | pattern_occs ] | "native_compute" OPT [ reference_occs | pattern_occs ] | "red" | "hnf" | "simpl" OPT delta_reductions OPT [ reference_occs | pattern_occs ] | "cbn" OPT reductions | "unfold" LIST1 reference_occs SEP "," | "fold" LIST1 one_term | "pattern" LIST1 pattern_occs SEP "," | ident ] reductions: [ | LIST1 reduction | delta_reductions ] reduction: [ | "beta" | "delta" OPT delta_reductions | "match" | "fix" | "cofix" | "iota" | "zeta" ] delta_reductions: [ | OPT "-" "[" LIST1 reference "]" ] reference_occs: [ | reference OPT ( "at" occs_nums ) ] pattern_occs: [ | one_term OPT ( "at" occs_nums ) ] variant_definition: [ | ident_decl LIST0 binder OPT [ "|" LIST0 binder ] OPT [ ":" type ] ":=" OPT "|" LIST1 constructor SEP "|" OPT decl_notations ] singleton_class_definition: [ | OPT ">" ident_decl LIST0 binder OPT [ ":" sort ] ":=" constructor ] record_definition: [ | OPT ">" ident_decl LIST0 binder OPT [ ":" sort ] OPT ( ":=" OPT ident "{" LIST0 record_field SEP ";" OPT ";" "}" ) ] record_field: [ | LIST0 ( "#[" LIST0 attribute SEP "," "]" ) name OPT field_body OPT [ "|" natural ] OPT decl_notations ] field_body: [ | LIST0 binder of_type | LIST0 binder of_type ":=" term | LIST0 binder ":=" term ] term_record: [ | "{|" LIST0 field_def SEP ";" OPT ";" "|}" ] field_def: [ | qualid LIST0 binder ":=" term ] inductive_definition: [ | OPT ">" ident OPT cumul_univ_decl LIST0 binder OPT [ "|" LIST0 binder ] OPT [ ":" type ] OPT ( ":=" OPT constructors_or_record ) OPT decl_notations ] constructors_or_record: [ | OPT "|" LIST1 constructor SEP "|" | OPT ident "{" LIST0 record_field SEP ";" OPT ";" "}" ] constructor: [ | ident LIST0 binder OPT of_type ] import_categories: [ | OPT "-" "(" LIST1 qualid SEP "," ")" ] filtered_import: [ | qualid OPT [ "(" LIST1 ( qualid OPT [ "(" ".." ")" ] ) SEP "," ")" ] ] cofix_definition: [ | ident_decl LIST0 binder OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations ] scheme_kind: [ | "Equality" "for" reference | [ "Induction" | "Minimality" | "Elimination" | "Case" ] "for" reference "Sort" sort_family ] sort_family: [ | "Set" | "Prop" | "SProp" | "Type" ] hint_info: [ | "|" OPT natural OPT one_pattern ] one_pattern: [ | one_term ] module_binder: [ | "(" OPT [ "Import" | "Export" ] LIST1 ident ":" module_type_inl ")" ] module_type_inl: [ | "!" module_type | module_type OPT functor_app_annot ] functor_app_annot: [ | "[" "inline" "at" "level" natural "]" | "[" "no" "inline" "]" ] module_type: [ | qualid | "(" module_type ")" | module_type module_expr_atom | module_type "with" with_declaration ] with_declaration: [ | "Definition" qualid OPT univ_decl ":=" term | "Module" qualid ":=" qualid ] module_expr_atom: [ | qualid | "(" LIST1 module_expr_atom ")" ] of_module_type: [ | ":" module_type_inl | LIST0 ( "<:" module_type_inl ) ] module_expr_inl: [ | "!" LIST1 module_expr_atom | LIST1 module_expr_atom OPT functor_app_annot ] reference: [ | qualid | string OPT [ "%" scope_key ] ] argument_spec: [ | OPT "!" name OPT ( "%" scope_key ) ] arg_specs: [ | argument_spec | "/" | "&" | "(" LIST1 argument_spec ")" OPT ( "%" scope_key ) | "[" LIST1 argument_spec "]" OPT ( "%" scope_key ) | "{" LIST1 argument_spec "}" OPT ( "%" scope_key ) ] implicits_alt: [ | name | "[" LIST1 name "]" | "{" LIST1 name "}" ] args_modifier: [ | "simpl" "nomatch" | "simpl" "never" | "default" "implicits" | "clear" "implicits" | "clear" "scopes" | "clear" "bidirectionality" "hint" | "rename" | "assert" | "extra" "scopes" | "clear" "scopes" "and" "implicits" | "clear" "implicits" "and" "scopes" ] scope: [ | scope_name | scope_key ] scope_name: [ | ident ] scope_key: [ | ident ] strategy_level: [ | "opaque" | integer | "expand" | "transparent" ] strategy_level_or_var: [ | strategy_level | ident ] reserv_list: [ | LIST1 ( "(" simple_reserv ")" ) | simple_reserv ] simple_reserv: [ | LIST1 ident ":" type ] command: [ | "Goal" type | "Pwd" | "Cd" OPT string | "Load" OPT "Verbose" [ string | ident ] | "Declare" "ML" "Module" LIST1 string | "Locate" reference | "Locate" "Term" reference | "Locate" "Module" qualid | "Info" natural ltac_expr | "Add" "Zify" add_zify qualid (* Micromega plugin *) | "Show" "Zify" show_zify (* Micromega plugin *) | "Locate" "Ltac" qualid | "Locate" "Library" qualid | "Locate" "File" string | "Add" "LoadPath" string "as" dirpath | "Add" "Rec" "LoadPath" string "as" dirpath | "Remove" "LoadPath" string | "Type" term | "Print" "All" | "Print" "Section" qualid | "Print" "Grammar" ident | "Print" "Custom" "Grammar" ident | "Print" "LoadPath" OPT dirpath | "Print" "Libraries" | "Print" "ML" "Path" | "Print" "ML" "Modules" | "Print" "Debug" "GC" | "Print" "Graph" | "Print" "Classes" | "Print" "TypeClasses" | "Print" "Instances" reference | "Print" "Coercions" | "Print" "Coercion" "Paths" class class | "Print" "Canonical" "Projections" LIST0 reference | "Print" "Typing" "Flags" | "Print" "Tables" | "Print" "Options" | "Print" "Hint" OPT [ "*" | reference ] | "Print" "HintDb" ident | "Print" "Scopes" | "Print" "Scope" scope_name | "Print" "Visibility" OPT scope_name | "Print" "Implicit" reference | "Print" OPT "Sorted" "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string | "Print" "Assumptions" reference | "Print" "Opaque" "Dependencies" reference | "Print" "Transparent" "Dependencies" reference | "Print" "All" "Dependencies" reference | "Print" "Strategy" reference | "Print" "Strategies" | "Print" "Registered" | "Print" OPT "Term" reference OPT univ_name_list | "Print" "Module" "Type" qualid | "Print" "Module" qualid | "Print" "Namespace" dirpath | "Inspect" natural | "Add" "ML" "Path" string | OPT "Export" "Set" setting_name | "Print" "Table" setting_name | "Add" setting_name LIST1 [ qualid | string ] | "Test" setting_name OPT ( "for" LIST1 [ qualid | string ] ) | "Remove" setting_name LIST1 [ qualid | string ] | "Reset" "Initial" | "Reset" ident | "Back" OPT natural | "Debug" [ "On" | "Off" ] | "Declare" "Reduction" ident ":=" red_expr | "Declare" "Custom" "Entry" ident | "Derive" ident "SuchThat" one_term "As" ident (* derive plugin *) | "Extraction" qualid (* extraction plugin *) | "Recursive" "Extraction" LIST1 qualid (* extraction plugin *) | "Extraction" string LIST1 qualid (* extraction plugin *) | "Extraction" "TestCompile" LIST1 qualid (* extraction plugin *) | "Separate" "Extraction" LIST1 qualid (* extraction plugin *) | "Extraction" "Library" ident (* extraction plugin *) | "Recursive" "Extraction" "Library" ident (* extraction plugin *) | "Extraction" "Language" language (* extraction plugin *) | "Extraction" "Inline" LIST1 qualid (* extraction plugin *) | "Extraction" "NoInline" LIST1 qualid (* extraction plugin *) | "Print" "Extraction" "Inline" (* extraction plugin *) | "Reset" "Extraction" "Inline" (* extraction plugin *) | "Extraction" "Implicit" qualid "[" LIST0 [ ident | integer ] "]" (* extraction plugin *) | "Extraction" "Blacklist" LIST1 ident (* extraction plugin *) | "Print" "Extraction" "Blacklist" (* extraction plugin *) | "Reset" "Extraction" "Blacklist" (* extraction plugin *) | "Extract" "Constant" qualid LIST0 string "=>" [ ident | string ] (* extraction plugin *) | "Extract" "Inlined" "Constant" qualid "=>" [ ident | string ] (* extraction plugin *) | "Extract" "Inductive" qualid "=>" [ ident | string ] "[" LIST0 [ ident | string ] "]" OPT string (* extraction plugin *) | "Show" "Extraction" (* extraction plugin *) | "Proof" | "Proof" "Mode" string | "Proof" term | "Abort" OPT [ "All" | ident ] | "Admitted" | "Qed" | "Save" ident | "Defined" OPT ident | "Restart" | "Undo" OPT ( OPT "To" natural ) | "Focus" OPT natural | "Unfocus" | "Unfocused" | "Show" OPT [ ident | natural ] | "Show" "Existentials" | "Show" "Universes" | "Show" "Conjectures" | "Show" "Proof" OPT ( "Diffs" OPT "removed" ) | "Show" "Intro" | "Show" "Intros" | "Show" "Match" qualid | "Guarded" | "Create" "HintDb" ident OPT "discriminated" | "Remove" "Hints" LIST1 qualid OPT ( ":" LIST1 ident ) | "Comments" LIST0 [ one_term | string | natural ] | "Declare" "Instance" ident_decl LIST0 binder ":" term OPT hint_info | "Declare" "Scope" scope_name | "Obligation" natural OPT ( "of" ident ) OPT ( ":" type OPT ( "with" ltac_expr ) ) | "Next" "Obligation" OPT ( "of" ident ) OPT ( "with" ltac_expr ) | "Solve" "Obligation" natural OPT ( "of" ident ) "with" ltac_expr | "Solve" "Obligations" OPT ( "of" ident ) OPT ( "with" ltac_expr ) | "Solve" "All" "Obligations" OPT ( "with" ltac_expr ) | "Admit" "Obligations" OPT ( "of" ident ) | "Obligation" "Tactic" ":=" ltac_expr | "Show" "Obligation" "Tactic" | "Obligations" OPT ( "of" ident ) | "Preterm" OPT ( "of" ident ) | "Add" "Relation" one_term one_term OPT ( "reflexivity" "proved" "by" one_term ) OPT ( "symmetry" "proved" "by" one_term ) OPT ( "transitivity" "proved" "by" one_term ) "as" ident | "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term OPT ( "reflexivity" "proved" "by" one_term ) OPT ( "symmetry" "proved" "by" one_term ) OPT ( "transitivity" "proved" "by" one_term ) "as" ident | "Add" "Setoid" one_term one_term one_term "as" ident | "Add" "Parametric" "Setoid" LIST0 binder ":" one_term one_term one_term "as" ident | "Add" "Morphism" one_term ":" ident | "Declare" "Morphism" one_term ":" ident | "Add" "Morphism" one_term "with" "signature" term "as" ident | "Add" "Parametric" "Morphism" LIST0 binder ":" one_term "with" "signature" term "as" ident | "Unshelve" | "Declare" "Equivalent" "Keys" one_term one_term | "Print" "Equivalent" "Keys" | "Optimize" "Proof" | "Optimize" "Heap" | "infoH" ltac_expr | "Reset" "Ltac" "Profile" | "Show" "Ltac" "Profile" OPT [ "CutOff" integer | string ] | "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "Ring" ident ":" one_term OPT ( "(" LIST1 ring_mod SEP "," ")" ) (* ring plugin *) | "Print" "Rings" (* ring plugin *) | "Add" "Field" ident ":" one_term OPT ( "(" LIST1 field_mod SEP "," ")" ) (* ring plugin *) | "Print" "Fields" (* ring plugin *) | "Hint" "Cut" "[" hints_regexp "]" OPT ( ":" LIST1 ident ) | "Prenex" "Implicits" LIST1 qualid (* SSR plugin *) | "Print" "Hint" "View" OPT ssrviewpos (* SSR plugin *) | "Hint" "View" OPT ssrviewpos LIST1 ( one_term OPT ( "|" natural ) ) (* SSR plugin *) | "Typeclasses" "Transparent" LIST1 qualid | "Typeclasses" "Opaque" LIST1 qualid | "Typeclasses" "eauto" ":=" OPT "debug" OPT ( "(" [ "bfs" | "dfs" ] ")" ) OPT natural | "Proof" "with" ltac_expr OPT [ "using" section_var_expr ] | "Proof" "using" section_var_expr OPT [ "with" ltac_expr ] | "Tactic" "Notation" OPT ( "(" "at" "level" natural ")" ) LIST1 ltac_production_item ":=" ltac_expr | "Print" "Rewrite" "HintDb" ident | "Print" "Ltac" qualid | "Ltac" tacdef_body LIST0 ( "with" tacdef_body ) | "Print" "Ltac" "Signatures" | "Set" "Firstorder" "Solver" ltac_expr | "Print" "Firstorder" "Solver" | "Function" fix_definition LIST0 ( "with" fix_definition ) | "Functional" "Scheme" func_scheme_def LIST0 ( "with" func_scheme_def ) | "Functional" "Case" func_scheme_def (* funind plugin *) | "Generate" "graph" "for" qualid (* funind plugin *) | "Hint" "Rewrite" OPT [ "->" | "<-" ] LIST1 one_term OPT ( "using" ltac_expr ) OPT ( ":" LIST0 ident ) | "Derive" "Inversion_clear" ident "with" one_term OPT ( "Sort" sort_family ) | "Derive" "Inversion" ident "with" one_term OPT ( "Sort" sort_family ) | "Derive" "Dependent" "Inversion" ident "with" one_term "Sort" sort_family | "Derive" "Dependent" "Inversion_clear" ident "with" one_term "Sort" sort_family | "Declare" "Left" "Step" one_term | "Declare" "Right" "Step" one_term | "Number" "Notation" qualid qualid qualid OPT ( "(" LIST1 number_modifier SEP "," ")" ) ":" scope_name | "String" "Notation" qualid qualid qualid OPT ( "(" number_string_via ")" ) ":" scope_name | "SubClass" ident_decl def_body | thm_token ident_decl LIST0 binder ":" type LIST0 [ "with" ident_decl LIST0 binder ":" type ] | assumption_token OPT ( "Inline" OPT ( "(" natural ")" ) ) [ LIST1 ( "(" assumpt ")" ) | assumpt ] | [ "Definition" | "Example" ] ident_decl def_body | "Let" ident_decl def_body | "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) | "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) | "Let" "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) | "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) | "Let" "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) | "Scheme" OPT ( ident ":=" ) scheme_kind LIST0 ( "with" OPT ( ident ":=" ) scheme_kind ) | "Combined" "Scheme" ident "from" LIST1 ident SEP "," | "Register" qualid "as" qualid | "Register" "Inline" qualid | "Primitive" ident_decl OPT [ ":" term ] ":=" "#" ident | "Universe" LIST1 ident | "Universes" LIST1 ident | "Constraint" LIST1 univ_constraint SEP "," | "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) | "Variant" variant_definition LIST0 ( "with" variant_definition ) | [ "Record" | "Structure" ] record_definition LIST0 ( "with" record_definition ) | "Class" record_definition | "Class" singleton_class_definition | "Module" OPT [ "Import" | "Export" ] ident LIST0 module_binder OPT of_module_type OPT ( ":=" LIST1 module_expr_inl SEP "<+" ) | "Module" "Type" ident LIST0 module_binder LIST0 ( "<:" module_type_inl ) OPT ( ":=" LIST1 module_type_inl SEP "<+" ) | "Declare" "Module" OPT [ "Import" | "Export" ] ident LIST0 module_binder ":" module_type_inl | "Section" ident | "End" ident | "Collection" ident ":=" section_var_expr | OPT [ "From" dirpath ] "Require" OPT [ "Import" | "Export" ] LIST1 qualid | "Import" OPT import_categories LIST1 filtered_import | "Export" OPT import_categories LIST1 filtered_import | "Include" module_type_inl LIST0 ( "<+" module_expr_inl ) | "Include" "Type" LIST1 module_type_inl SEP "<+" | "Transparent" LIST1 reference | "Opaque" LIST1 reference | "Strategy" LIST1 [ strategy_level "[" LIST1 reference "]" ] | "Canonical" OPT "Structure" ident_decl def_body | "Canonical" OPT "Structure" reference | "Coercion" ident OPT univ_decl def_body | "Identity" "Coercion" ident ":" class ">->" class | "Coercion" reference ":" class ">->" class | "Context" LIST1 binder | "Instance" OPT ( ident_decl LIST0 binder ) ":" type OPT hint_info OPT [ ":=" "{" LIST0 field_def "}" | ":=" term ] | "Existing" "Instance" qualid OPT hint_info | "Existing" "Instances" LIST1 qualid OPT [ "|" natural ] | "Existing" "Class" qualid | "Arguments" reference LIST0 arg_specs LIST0 [ "," LIST0 implicits_alt ] OPT [ ":" LIST1 args_modifier SEP "," ] | "Implicit" [ "Type" | "Types" ] reserv_list | "Generalizable" [ [ "Variable" | "Variables" ] LIST1 ident | "All" "Variables" | "No" "Variables" ] | "Set" setting_name OPT [ integer | string ] | "Unset" setting_name | "Import" "Prenex" "Implicits" (* SSR plugin *) | "Open" "Scope" scope | "Close" "Scope" scope | "Delimit" "Scope" scope_name "with" scope_key | "Undelimit" "Scope" scope_name | "Bind" "Scope" scope_name "with" LIST1 class | "Infix" string ":=" one_term OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) OPT [ ":" scope_name ] | "Notation" ident LIST0 ident ":=" one_term OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) | "Notation" string ":=" one_term OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) OPT [ ":" scope_name ] | "Format" "Notation" string string string | "Reserved" "Infix" string OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) | "Reserved" "Notation" string OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) | "Eval" red_expr "in" term | "Compute" term | "Check" term | "About" reference OPT univ_name_list | "SearchPattern" one_pattern OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) | "SearchRewrite" one_pattern OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) | "Search" LIST1 ( search_query ) OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) | "Ltac2" OPT "mutable" OPT "rec" tac2def_body LIST0 ( "with" tac2def_body ) | "Ltac2" "Type" OPT "rec" tac2typ_def LIST0 ( "with" tac2typ_def ) | "Ltac2" "@" "external" ident ":" ltac2_type ":=" string string | "Ltac2" "Notation" LIST1 ltac2_scope OPT ( ":" natural ) ":=" ltac2_expr | "Ltac2" "Set" qualid OPT [ "as" ident ] ":=" ltac2_expr | "Ltac2" "Notation" [ string | lident ] ":=" ltac2_expr (* Ltac2 plugin *) | "Ltac2" "Eval" ltac2_expr (* ltac2 plugin *) | "Print" "Ltac2" qualid (* ltac2 plugin *) | "Hint" "Resolve" LIST1 [ qualid | one_term ] OPT hint_info OPT ( ":" LIST1 ident ) | "Hint" "Resolve" [ "->" | "<-" ] LIST1 qualid OPT natural OPT ( ":" LIST1 ident ) | "Hint" "Immediate" LIST1 [ qualid | one_term ] OPT ( ":" LIST1 ident ) | "Hint" [ "Constants" | "Variables" ] [ "Transparent" | "Opaque" ] OPT ( ":" LIST1 ident ) | "Hint" [ "Transparent" | "Opaque" ] LIST1 qualid OPT ( ":" LIST1 ident ) | "Hint" "Mode" qualid LIST1 [ "+" | "!" | "-" ] OPT ( ":" LIST1 ident ) | "Hint" "Unfold" LIST1 qualid OPT ( ":" LIST1 ident ) | "Hint" "Constructors" LIST1 qualid OPT ( ":" LIST1 ident ) | "Hint" "Extern" natural OPT one_pattern "=>" ltac_expr OPT ( ":" LIST1 ident ) | "Time" sentence | "Redirect" string sentence | "Timeout" natural sentence | "Fail" sentence | "Succeed" sentence | "Drop" | "Quit" | "BackTo" natural | "Show" "Goal" natural "at" natural ] section_var_expr: [ | LIST0 starred_ident_ref | OPT "-" section_var_expr50 ] section_var_expr50: [ | section_var_expr0 "-" section_var_expr0 | section_var_expr0 "+" section_var_expr0 | section_var_expr0 ] section_var_expr0: [ | starred_ident_ref | "(" section_var_expr ")" OPT "*" ] starred_ident_ref: [ | ident OPT "*" | "Type" OPT "*" | "All" ] dirpath: [ | LIST0 ( ident "." ) ident ] setting_name: [ | LIST1 ident ] search_query: [ | search_item | "-" search_query | "[" LIST1 ( LIST1 search_query ) SEP "|" "]" ] search_item: [ | OPT ( [ "head" | "hyp" | "concl" | "headhyp" | "headconcl" ] ":" ) string OPT ( "%" scope_key ) | OPT ( [ "head" | "hyp" | "concl" | "headhyp" | "headconcl" ] ":" ) one_pattern | "is" ":" logical_kind ] logical_kind: [ | [ thm_token | assumption_token ] | [ "Definition" | "Example" | "Context" | "Primitive" ] | [ "Coercion" | "Instance" | "Scheme" | "Canonical" | "SubClass" ] | [ "Field" | "Method" ] ] univ_name_list: [ | "@{" LIST0 name "}" ] tacdef_body: [ | qualid LIST0 name [ ":=" | "::=" ] ltac_expr ] ltac_production_item: [ | string | ident OPT ( "(" ident OPT ( "," string ) ")" ) ] tac2expr_in_env: [ | LIST0 ident "|-" ltac2_expr (* ltac2 plugin *) | ltac2_expr (* ltac2 plugin *) ] ltac2_type: [ | ltac2_type2 "->" ltac2_type (* ltac2 plugin *) | ltac2_type2 (* ltac2 plugin *) ] ltac2_type2: [ | ltac2_type1 "*" LIST1 ltac2_type1 SEP "*" (* ltac2 plugin *) | ltac2_type1 (* ltac2 plugin *) ] ltac2_type1: [ | ltac2_type0 qualid (* ltac2 plugin *) | ltac2_type0 (* ltac2 plugin *) ] ltac2_type0: [ | "(" LIST1 ltac2_type SEP "," ")" OPT qualid (* ltac2 plugin *) | ltac2_typevar (* ltac2 plugin *) | "_" (* ltac2 plugin *) | qualid (* ltac2 plugin *) ] ltac2_typevar: [ | "'" ident (* ltac2 plugin *) ] lident: [ | ident (* ltac2 plugin *) ] occurrences: [ | "at" occs_nums | "in" goal_occurrences ] simple_occurrences: [ | occurrences ] occs_nums: [ | OPT "-" LIST1 nat_or_var ] nat_or_var: [ | [ natural | ident ] ] goal_occurrences: [ | LIST1 hyp_occs SEP "," OPT ( "|-" OPT concl_occs ) | "*" "|-" OPT concl_occs | "|-" OPT concl_occs | OPT concl_occs ] hyp_occs: [ | hypident OPT ( "at" occs_nums ) ] hypident: [ | ident | "(" "type" "of" ident ")" | "(" "value" "of" ident ")" ] concl_occs: [ | "*" OPT ( "at" occs_nums ) ] q_intropatterns: [ | ltac2_intropatterns (* ltac2 plugin *) ] ltac2_intropatterns: [ | LIST0 nonsimple_intropattern (* ltac2 plugin *) ] nonsimple_intropattern: [ | "*" (* ltac2 plugin *) | "**" (* ltac2 plugin *) | ltac2_simple_intropattern (* ltac2 plugin *) ] q_intropattern: [ | ltac2_simple_intropattern (* ltac2 plugin *) ] ltac2_simple_intropattern: [ | ltac2_naming_intropattern (* ltac2 plugin *) | "_" (* ltac2 plugin *) | ltac2_or_and_intropattern (* ltac2 plugin *) | ltac2_equality_intropattern (* ltac2 plugin *) ] ltac2_or_and_intropattern: [ | "[" LIST1 ltac2_intropatterns SEP "|" "]" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | "(" LIST1 ltac2_simple_intropattern SEP "," ")" (* Ltac2 plugin *) | "(" LIST1 ltac2_simple_intropattern SEP "&" ")" (* Ltac2 plugin *) ] ltac2_equality_intropattern: [ | "->" (* ltac2 plugin *) | "<-" (* ltac2 plugin *) | "[=" ltac2_intropatterns "]" (* ltac2 plugin *) ] ltac2_naming_intropattern: [ | "?" lident (* ltac2 plugin *) | "?$" lident (* ltac2 plugin *) | "?" (* ltac2 plugin *) | ident_or_anti (* ltac2 plugin *) ] q_ident: [ | ident_or_anti (* ltac2 plugin *) ] ident_or_anti: [ | lident (* ltac2 plugin *) | "$" ident (* ltac2 plugin *) ] q_destruction_arg: [ | ltac2_destruction_arg (* ltac2 plugin *) ] ltac2_destruction_arg: [ | natural (* ltac2 plugin *) | lident (* ltac2 plugin *) | ltac2_constr_with_bindings (* ltac2 plugin *) ] ltac2_constr_with_bindings: [ | term OPT ( "with" ltac2_bindings ) (* ltac2 plugin *) ] q_bindings: [ | ltac2_bindings (* ltac2 plugin *) ] q_with_bindings: [ | OPT ( "with" ltac2_bindings ) (* ltac2 plugin *) ] ltac2_bindings: [ | LIST1 ltac2_simple_binding (* ltac2 plugin *) | LIST1 term (* ltac2 plugin *) ] ltac2_simple_binding: [ | "(" qhyp ":=" term ")" (* ltac2 plugin *) ] qhyp: [ | "$" ident (* ltac2 plugin *) | natural (* ltac2 plugin *) | lident (* ltac2 plugin *) ] language: [ | "OCaml" (* extraction plugin *) | "Haskell" (* extraction plugin *) | "Scheme" (* extraction plugin *) | "JSON" (* extraction plugin *) ] ring_mod: [ | "decidable" one_term (* ring plugin *) | "abstract" (* ring plugin *) | "morphism" one_term (* ring plugin *) | "constants" "[" ltac_expr "]" (* ring plugin *) | "preprocess" "[" ltac_expr "]" (* ring plugin *) | "postprocess" "[" ltac_expr "]" (* ring plugin *) | "setoid" one_term one_term (* ring plugin *) | "sign" one_term (* ring plugin *) | "power" one_term "[" LIST1 qualid "]" (* ring plugin *) | "power_tac" one_term "[" ltac_expr "]" (* ring plugin *) | "div" one_term (* ring plugin *) | "closed" "[" LIST1 qualid "]" (* ring plugin *) ] field_mod: [ | ring_mod (* ring plugin *) | "completeness" one_term (* ring plugin *) ] ssrmmod: [ | "!" (* SSR plugin *) | "?" (* SSR plugin *) ] mult: [ | OPT natural ssrmmod (* SSR plugin *) ] ssrwlogfwd: [ | ":" LIST0 gen_item "/" term (* SSR plugin *) ] ssrhintarg: [ | "[" OPT ssrortacs "]" (* SSR plugin *) | ltac_expr (* SSR plugin *) ] ssrortacs: [ | OPT ltac_expr "|" OPT ssrortacs | ltac_expr (* SSR plugin *) ] ssrhint3arg: [ | "[" OPT ssrortacs "]" (* SSR plugin *) | ltac_expr3 (* SSR plugin *) ] ssrdefbody: [ | OPT ( ":" term ) ":=" term (* SSR plugin *) ] i_item: [ | "_" (* SSR plugin *) | "*" (* SSR plugin *) | ">" (* SSR plugin *) | ident | "?" (* SSR plugin *) | "+" (* SSR plugin *) | "++" (* SSR plugin *) | s_item (* SSR plugin *) | ssrdocc OPT [ "->" | "<-" ] (* SSR plugin *) | "-" (* SSR plugin *) | "-/=" (* SSR plugin *) | "-//" (* SSR plugin *) | "-//=" (* SSR plugin *) | "-/" integer [ "/=" | "/" | "/" integer "=" ] (* SSR plugin *) | ssrfwdview (* SSR plugin *) | "[:" LIST0 ident "]" (* SSR plugin *) | ssrblockpat (* SSR plugin *) ] ssrhpats_wtransp: [ | OPT ssripats (* SSR plugin *) | OPT ssripats "@" OPT ssripats (* SSR plugin *) ] ssripats: [ | LIST1 i_item (* SSR plugin *) ] s_item: [ | "//" (* SSR plugin *) | "/=" (* SSR plugin *) | "//=" (* SSR plugin *) | "/" natural "/" natural "=" (* SSR plugin *) | "/" natural "/=" (* SSR plugin *) ] ssrdocc: [ | "{" ssr_occurrences "}" (* SSR plugin *) | "{" LIST0 ident "}" (* SSR plugin *) ] ssrfwdview: [ | LIST1 ( "/" one_term ) (* SSR plugin *) ] hat: [ | "^" ident (* SSR plugin *) | "^~" ident (* SSR plugin *) | "^~" natural (* SSR plugin *) ] ssriorpat: [ | ssripats OPT ( [ "|" | "|-" ] ssriorpat ) (* SSR plugin *) ] ssrblockpat: [ | "[" hat "]" (* SSR plugin *) | "[" ssriorpat "]" (* SSR plugin *) | "[=" ssriorpat "]" (* SSR plugin *) ] ssrbinder: [ | ssrbvar (* SSR plugin *) | "(" LIST1 ssrbvar ":" term ")" (* SSR plugin *) | "(" ssrbvar OPT ( ":" term ) OPT ( ":=" term ) ")" (* SSR plugin *) | "of" term10 (* SSR plugin *) | "&" term10 (* SSR plugin *) ] ssrbvar: [ | ident (* SSR plugin *) | "_" (* SSR plugin *) ] ssrhavefwd: [ | ":" term OPT ( "by" ssrhintarg ) (* SSR plugin *) | ":" term ":=" OPT term (* SSR plugin *) ] deprecated_number_modifier: [ | "(" "warning" "after" bignat ")" | "(" "abstract" "after" bignat ")" ] number_modifier: [ | "warning" "after" bignat | "abstract" "after" bignat | number_string_via ] number_string_via: [ | "via" qualid "mapping" "[" LIST1 [ qualid "=>" qualid | "[" qualid "]" "=>" qualid ] SEP "," "]" ] hints_regexp: [ | LIST1 qualid | "_" | hints_regexp "|" hints_regexp | hints_regexp hints_regexp | hints_regexp "*" | "emp" | "eps" | "(" hints_regexp ")" ] class: [ | "Funclass" | "Sortclass" | reference ] syntax_modifier: [ | "at" "level" natural | "in" "custom" ident OPT ( "at" "level" natural ) | LIST1 ident SEP "," [ "at" level | "in" "scope" ident ] | ident "at" level OPT binder_interp | ident explicit_subentry | ident binder_interp | "left" "associativity" | "right" "associativity" | "no" "associativity" | "only" "parsing" | "only" "printing" | "format" string OPT string ] explicit_subentry: [ | "ident" | "name" | "global" | "bigint" | "strict" "pattern" OPT ( "at" "level" natural ) | "binder" | "closed" "binder" | "constr" OPT ( "at" level ) OPT binder_interp | "custom" ident OPT ( "at" level ) OPT binder_interp | "pattern" OPT ( "at" "level" natural ) ] binder_interp: [ | "as" "ident" | "as" "name" | "as" "pattern" | "as" "strict" "pattern" ] level: [ | "level" natural | "next" "level" ] decl_notations: [ | "where" decl_notation LIST0 ( "and" decl_notation ) ] decl_notation: [ | string ":=" one_term OPT ( "(" LIST1 syntax_modifier SEP "," ")" ) OPT [ ":" scope_name ] ] simple_tactic: [ | "reflexivity" | "exact" one_term | "assumption" | "etransitivity" | "cut" one_term | "exact_no_check" one_term | "vm_cast_no_check" one_term | "native_cast_no_check" one_term | "casetype" one_term | "elimtype" one_term | "lapply" one_term | "transitivity" one_term | "left" OPT ( "with" bindings ) | "eleft" OPT ( "with" bindings ) | "right" OPT ( "with" bindings ) | "eright" OPT ( "with" bindings ) | "constructor" OPT nat_or_var OPT ( "with" bindings ) | "econstructor" OPT ( nat_or_var OPT ( "with" bindings ) ) | "specialize" one_term OPT ( "with" bindings ) OPT ( "as" simple_intropattern ) | "symmetry" OPT simple_occurrences | "split" OPT ( "with" bindings ) | "esplit" OPT ( "with" bindings ) | "exists" LIST0 bindings SEP "," | "eexists" LIST0 bindings SEP "," | "intros" "until" [ ident | natural ] | "intro" OPT ident OPT where | "move" ident OPT where | "rename" LIST1 ( ident "into" ident ) SEP "," | "revert" LIST1 ident | "simple" "induction" [ ident | natural ] | "simple" "destruct" [ ident | natural ] | "admit" | "clear" LIST0 ident | "clear" "-" LIST1 ident | "clearbody" LIST1 ident | "generalize" "dependent" one_term | "replace" one_term "with" one_term OPT occurrences OPT ( "by" ltac_expr3 ) | "simplify_eq" OPT induction_arg | "esimplify_eq" OPT induction_arg | "discriminate" OPT induction_arg | "ediscriminate" OPT induction_arg | "injection" OPT induction_arg OPT ( "as" LIST0 simple_intropattern ) | "einjection" OPT induction_arg OPT ( "as" LIST0 simple_intropattern ) | "simple" "injection" OPT induction_arg | "replace" OPT [ "->" | "<-" ] one_term OPT occurrences | "typeclasses" "eauto" OPT [ "bfs" | "dfs" | "best_effort" ] OPT nat_or_var OPT ( "with" LIST1 ident ) | "setoid_replace" one_term "with" one_term OPT ( "using" "relation" one_term ) OPT ( "in" ident ) OPT ( "at" LIST1 int_or_var ) OPT ( "by" ltac_expr3 ) | OPT ( [ natural | "[" ident "]" ] ":" ) "{" | bullet | "}" | "try" ltac_expr3 | "do" nat_or_var ltac_expr3 | "timeout" nat_or_var ltac_expr3 | "time" OPT string ltac_expr3 | "repeat" ltac_expr3 | "progress" ltac_expr3 | "once" ltac_expr3 | "exactly_once" ltac_expr3 | "abstract" ltac_expr2 OPT ( "using" ident ) | "only" selector ":" ltac_expr3 | "tryif" ltac_expr "then" ltac_expr "else" ltac_expr2 | "first" "[" LIST0 ltac_expr SEP "|" "]" | "solve" "[" LIST0 ltac_expr SEP "|" "]" | "idtac" LIST0 [ ident | string | natural ] | [ "fail" | "gfail" ] OPT nat_or_var LIST0 [ ident | string | natural ] | ltac_expr ssrintros (* SSR plugin *) | "fun" LIST1 name "=>" ltac_expr | "eval" red_expr "in" term | "context" ident "[" term "]" | "type" "of" term | "fresh" LIST0 [ string | qualid ] | "type_term" one_term | "numgoals" | "uconstr" ":" "(" term ")" | "fun" LIST1 name "=>" ltac_expr | "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" ltac_expr | ltac_expr3 ";" [ ltac_expr3 | binder_tactic ] | ltac_expr3 ";" "[" for_each_goal "]" | ltac_expr1 "+" [ ltac_expr2 | binder_tactic ] | ltac_expr1 "||" [ ltac_expr2 | binder_tactic ] | "[>" for_each_goal "]" | toplevel_selector ":" ltac_expr | ltac2_match_key ltac2_expr "with" ltac2_match_list "end" | ltac2_match_key OPT "reverse" "goal" "with" goal_match_list "end" | "case_eq" one_term | "dependent" "rewrite" OPT [ "->" | "<-" ] one_term OPT ( "in" ident ) | "cutrewrite" OPT [ "->" | "<-" ] one_term OPT ( "in" ident ) | "decompose" "sum" one_term | "decompose" "record" one_term | "absurd" one_term | "contradiction" OPT ( one_term OPT ( "with" bindings ) ) | "autorewrite" OPT "*" "with" LIST1 ident OPT occurrences OPT ( "using" ltac_expr ) | "rewrite" "*" OPT [ "->" | "<-" ] one_term OPT ( "in" ident ) OPT ( "at" rewrite_occs ) OPT ( "by" ltac_expr3 ) | "rewrite" "*" OPT [ "->" | "<-" ] one_term "at" rewrite_occs "in" ident OPT ( "by" ltac_expr3 ) | "refine" one_term | "simple" "refine" one_term | "notypeclasses" "refine" one_term | "simple" "notypeclasses" "refine" one_term | "solve_constraints" | "subst" LIST0 ident | "simple" "subst" | "evar" "(" ident ":" term ")" | "evar" one_term | "instantiate" "(" ident ":=" term ")" | "instantiate" "(" integer ":=" term ")" OPT hloc | "instantiate" | "stepl" one_term OPT ( "by" ltac_expr ) | "stepr" one_term OPT ( "by" ltac_expr ) | "generalize_eqs" ident | "dependent" "generalize_eqs" ident | "generalize_eqs_vars" ident | "dependent" "generalize_eqs_vars" ident | "specialize_eqs" ident | "hresolve_core" "(" ident ":=" one_term ")" OPT ( "at" nat_or_var ) "in" one_term | "hget_evar" nat_or_var | "destauto" OPT ( "in" ident ) | "transparent_abstract" ltac_expr3 OPT ( "using" ident ) | "constr_eq" one_term one_term | "constr_eq_strict" one_term one_term | "constr_eq_nounivs" one_term one_term | "is_evar" one_term | "has_evar" one_term | "is_var" one_term | "is_fix" one_term | "is_cofix" one_term | "is_ind" one_term | "is_constructor" one_term | "is_proj" one_term | "is_const" one_term | "shelve" | "shelve_unifiable" | "unshelve" ltac_expr1 | "give_up" | "cycle" int_or_var | "swap" int_or_var int_or_var | "revgoals" | "guard" int_or_var comparison int_or_var | "decompose" "[" LIST1 one_term "]" one_term | "optimize_heap" | "with_strategy" strategy_level_or_var "[" LIST1 reference "]" ltac_expr3 | "start" "ltac" "profiling" | "stop" "ltac" "profiling" | "reset" "ltac" "profile" | "show" "ltac" "profile" OPT [ "cutoff" integer | string ] | "restart_timer" OPT string | "finish_timing" OPT ( "(" string ")" ) OPT string | "eassumption" | "eexact" one_term | "trivial" OPT auto_using OPT hintbases | "info_trivial" OPT auto_using OPT hintbases | "debug" "trivial" OPT auto_using OPT hintbases | "auto" OPT nat_or_var OPT auto_using OPT hintbases | "info_auto" OPT nat_or_var OPT auto_using OPT hintbases | "debug" "auto" OPT nat_or_var OPT auto_using OPT hintbases | "eauto" OPT nat_or_var OPT auto_using OPT hintbases | "debug" "eauto" OPT nat_or_var OPT auto_using OPT hintbases | "info_eauto" OPT nat_or_var OPT auto_using OPT hintbases | "dfs" "eauto" OPT nat_or_var OPT auto_using OPT hintbases | "bfs" "eauto" OPT nat_or_var OPT auto_using OPT hintbases | "autounfold" OPT hintbases OPT simple_occurrences | "autounfold_one" OPT hintbases OPT ( "in" ident ) | "unify" one_term one_term OPT ( "with" ident ) | "typeclasses" "eauto" "best_effort" OPT nat_or_var "with" LIST1 ident | "typeclasses" "eauto" "best_effort" OPT nat_or_var | "head_of_constr" ident one_term | "not_evar" one_term | "is_ground" one_term | "autoapply" one_term "with" ident | "progress_evars" ltac_expr | "rewrite_strat" rewstrategy OPT ( "in" ident ) | "rewrite_db" ident OPT ( "in" ident ) | "substitute" OPT [ "->" | "<-" ] one_term OPT ( "with" bindings ) | "setoid_rewrite" OPT [ "->" | "<-" ] one_term OPT ( "with" bindings ) OPT ( "at" rewrite_occs ) OPT ( "in" ident ) | "setoid_rewrite" OPT [ "->" | "<-" ] one_term OPT ( "with" bindings ) "in" ident "at" rewrite_occs | "setoid_symmetry" OPT ( "in" ident ) | "setoid_reflexivity" | "setoid_transitivity" one_term | "setoid_etransitivity" | "intros" LIST0 intropattern | "eintros" LIST0 intropattern | "decide" "equality" | "compare" one_term one_term | "apply" LIST1 one_term_with_bindings SEP "," OPT in_hyp_as | "eapply" LIST1 one_term_with_bindings SEP "," OPT in_hyp_as | "simple" "apply" LIST1 one_term_with_bindings SEP "," OPT in_hyp_as | "simple" "eapply" LIST1 one_term_with_bindings SEP "," OPT in_hyp_as | "elim" one_term_with_bindings OPT ( "using" one_term OPT ( "with" bindings ) ) | "eelim" one_term_with_bindings OPT ( "using" one_term OPT ( "with" bindings ) ) | "case" LIST1 induction_clause SEP "," OPT induction_principle | "ecase" LIST1 induction_clause SEP "," OPT induction_principle | "fix" ident natural OPT ( "with" LIST1 ( "(" ident LIST0 simple_binder OPT ( "{" "struct" name "}" ) ":" type ")" ) ) | "cofix" ident OPT ( "with" LIST1 ( "(" ident LIST0 simple_binder ":" type ")" ) ) | "pose" bindings_with_parameters | "pose" one_term OPT as_name | "epose" bindings_with_parameters | "epose" one_term OPT as_name | "set" bindings_with_parameters OPT occurrences | "set" one_term OPT as_name OPT occurrences | "eset" bindings_with_parameters OPT occurrences | "eset" one_term OPT as_name OPT occurrences | "remember" one_term OPT as_name OPT ( "eqn" ":" naming_intropattern ) OPT ( "in" goal_occurrences ) | "eremember" one_term OPT as_name OPT ( "eqn" ":" naming_intropattern ) OPT ( "in" goal_occurrences ) | "assert" "(" ident ":=" term ")" | "eassert" "(" ident ":=" term ")" | "assert" "(" ident ":" term ")" OPT ( "by" ltac_expr3 ) | "eassert" "(" ident ":" term ")" OPT ( "by" ltac_expr3 ) | "enough" "(" ident ":" term ")" OPT ( "by" ltac_expr3 ) | "eenough" "(" ident ":" term ")" OPT ( "by" ltac_expr3 ) | "assert" one_term OPT as_ipat OPT ( "by" ltac_expr3 ) | "eassert" one_term OPT as_ipat OPT ( "by" ltac_expr3 ) | "pose" "proof" "(" ident ":=" term ")" | "epose" "proof" "(" ident ":=" term ")" | "pose" "proof" term OPT as_ipat | "epose" "proof" term OPT as_ipat | "enough" one_term OPT as_ipat OPT ( "by" ltac_expr3 ) | "eenough" one_term OPT as_ipat OPT ( "by" ltac_expr3 ) | "generalize" one_term OPT ( LIST1 one_term ) | "generalize" one_term OPT ( "at" occs_nums ) OPT as_name LIST0 [ "," pattern_occs OPT as_name ] | "induction" LIST1 induction_clause SEP "," OPT induction_principle | "einduction" LIST1 induction_clause SEP "," OPT induction_principle | "destruct" LIST1 induction_clause SEP "," OPT induction_principle | "edestruct" LIST1 induction_clause SEP "," OPT induction_principle | "rewrite" LIST1 oriented_rewriter SEP "," OPT occurrences OPT ( "by" ltac_expr3 ) | "erewrite" LIST1 oriented_rewriter SEP "," OPT occurrences OPT ( "by" ltac_expr3 ) | "simple" "inversion" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT ( "in" LIST1 ident ) | "inversion" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT ( "in" LIST1 ident ) | "inversion_clear" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT ( "in" LIST1 ident ) | "inversion" [ ident | natural ] "using" one_term OPT ( "in" LIST1 ident ) | "red" simple_occurrences | "hnf" simple_occurrences | "simpl" OPT delta_reductions OPT [ reference_occs | pattern_occs ] simple_occurrences | "cbv" OPT reductions simple_occurrences | "cbn" OPT reductions simple_occurrences | "lazy" OPT reductions simple_occurrences | "compute" OPT delta_reductions simple_occurrences | "vm_compute" OPT [ reference_occs | pattern_occs ] OPT occurrences | "native_compute" OPT [ reference_occs | pattern_occs ] OPT occurrences | "unfold" LIST1 reference_occs SEP "," OPT occurrences | "fold" LIST1 one_term simple_occurrences | "pattern" LIST1 pattern_occs SEP "," OPT occurrences | "change" OPT ( one_term OPT ( "at" occs_nums ) "with" ) one_term OPT occurrences | "change_no_check" OPT ( one_term OPT ( "at" occs_nums ) "with" ) one_term OPT occurrences | "btauto" | "rtauto" | "congruence" OPT natural OPT ( "with" LIST1 one_term ) | "simple" "congruence" OPT natural OPT ( "with" LIST1 one_term ) | "f_equal" | "firstorder" OPT ltac_expr OPT ( "using" LIST1 qualid SEP "," ) OPT ( "with" LIST1 ident ) | "gintuition" OPT ltac_expr | "functional" "inversion" [ ident | natural ] OPT qualid (* funind plugin *) | "functional" "induction" term OPT ( "using" one_term OPT ( "with" bindings ) ) OPT ( "as" simple_intropattern ) (* funind plugin *) | "soft" "functional" "induction" LIST1 one_term OPT ( "using" one_term OPT ( "with" bindings ) ) OPT ( "as" simple_intropattern ) (* funind plugin *) | "psatz_Z" OPT nat_or_var ltac_expr | "xlia" ltac_expr (* micromega plugin *) | "xnlia" ltac_expr (* micromega plugin *) | "xnra" ltac_expr (* micromega plugin *) | "xnqa" ltac_expr (* micromega plugin *) | "sos_Z" ltac_expr (* micromega plugin *) | "sos_Q" ltac_expr (* micromega plugin *) | "sos_R" ltac_expr (* micromega plugin *) | "lra_Q" ltac_expr (* micromega plugin *) | "lra_R" ltac_expr (* micromega plugin *) | "psatz_R" OPT nat_or_var ltac_expr | "psatz_Q" OPT nat_or_var ltac_expr | "zify_iter_specs" (* micromega plugin *) | "zify_op" (* micromega plugin *) | "zify_saturate" (* micromega plugin *) | "zify_iter_let" ltac_expr (* micromega plugin *) | "zify_elim_let" (* micromega plugin *) | "nsatz_compute" one_term (* nsatz plugin *) | "protect_fv" string OPT ( "in" ident ) | "ring_lookup" ltac_expr0 "[" LIST0 one_term "]" LIST1 one_term (* ring plugin *) | "field_lookup" ltac_expr "[" LIST0 one_term "]" LIST1 one_term (* ring plugin *) | "ring_lookup" ltac_expr0 "[" LIST0 one_term "]" LIST1 one_term (* ring plugin *) | "field_lookup" ltac_expr "[" LIST0 one_term "]" LIST1 one_term (* ring plugin *) | "by" ssrhintarg (* SSR plugin *) | "clear" natural (* SSR plugin *) | "move" OPT ( OPT ssrarg [ "->" | "<-" ] ) (* SSR plugin *) | "move" ssrarg OPT ssr_in (* SSR plugin *) | "case" OPT ( ssrarg OPT ssr_in ) (* SSR plugin *) | "elim" OPT ( ssrarg OPT ssr_in ) (* SSR plugin *) | "apply" OPT ssrapplyarg (* SSR plugin *) | "exact" [ ":" ssragen OPT ssragens | ssrbwdview OPT ssrclear | ssrclear ] (* SSR plugin *) | "exact" (* SSR plugin *) | "exact" "<:" term (* SSR plugin *) | "congr" OPT natural one_term OPT ssrdgens (* SSR plugin *) | "ssrinstancesofruleL2R" term (* SSR plugin *) | "ssrinstancesofruleR2L" term (* SSR plugin *) | "rewrite" LIST1 rewrite_item OPT ssr_in (* SSR plugin *) | "unlock" LIST0 ( OPT ( "{" ssr_occurrences "}" ) term ) OPT ssr_in (* SSR plugin *) | "pose" "fix" ssrbvar LIST0 ssrbinder OPT ( "{" "struct" ident "}" ) ssrdefbody (* SSR plugin *) | "pose" "cofix" ssrbvar LIST0 ssrbinder ssrdefbody (* SSR plugin *) | "pose" ident LIST0 ssrbinder ssrdefbody (* SSR plugin *) | "set" ident OPT ( ":" term ) ":=" [ "{" ssr_occurrences "}" cpattern | lcpattern ] OPT ssr_in (* SSR plugin *) | "abstract" ssrdgens (* SSR plugin *) | "have" ssrhpats_wtransp LIST0 ssrbinder ssrhavefwd (* SSR plugin *) | "have" [ "suff" | "suffices" ] OPT ssripats ssrhavefwd (* SSR plugin *) | [ "suff" | "suffices" ] OPT ( "have" OPT ssripats ) ssrhavefwd (* SSR plugin *) | [ "suff" | "suffices" ] ssrsufffwd (* SSR plugin *) | [ "wlog" | "without loss" ] OPT [ "suff" | "suffices" ] OPT ssripats ssrwlogfwd OPT ( "by" ssrhintarg ) (* SSR plugin *) | [ "gen" | "generally" ] "have" OPT ssrclear OPT ( [ ident | "_" ] "," ) OPT ssripats ssrwlogfwd OPT ( "by" ssrhintarg ) (* SSR plugin *) | "under" rewrite_item OPT ssrintros OPT ( "do" ssrhint3arg ) (* SSR plugin *) | "ssrinstancesoftpat" ssr_one_term_pattern (* SSR plugin *) | ltac_expr ";" "first" ssr_first_else (* SSR plugin *) | ltac_expr ";" "first" ssrseqarg (* SSR plugin *) | ltac_expr ";" "last" ssrseqarg (* SSR plugin *) | match_key OPT "reverse" "goal" "with" OPT "|" LIST1 ( goal_pattern "=>" ltac_expr ) SEP "|" "end" | match_key ltac_expr "with" OPT "|" LIST1 ( match_pattern "=>" ltac_expr ) SEP "|" "end" | "dependent" "inversion" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT [ "with" one_term ] | "dependent" "simple" "inversion" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT [ "with" one_term ] | "dependent" "inversion_clear" [ ident | natural ] OPT ( "as" or_and_intropattern ) OPT [ "with" one_term ] | "classical_left" | "classical_right" | "contradict" ident | "dintuition" OPT ltac_expr | "discrR" | "dtauto" | "easy" | "exfalso" | "inversion_sigma" OPT ( ident OPT ( "as" simple_intropattern ) ) | "lia" | "lra" | "nia" | "now_show" one_term | "nra" | "over" (* SSR plugin *) | "split_Rabs" | "split_Rmult" | "tauto" | "time_constr" ltac_expr | "zify" | "assert_fails" ltac_expr3 | "assert_succeeds" ltac_expr3 | "dependent" "destruction" ident OPT ( "generalizing" LIST1 ident ) OPT ( "using" one_term ) | "dependent" "induction" ident OPT ( [ "generalizing" | "in" ] LIST1 ident ) OPT ( "using" one_term ) | "field" OPT ( "[" LIST1 one_term "]" ) | "field_simplify" OPT ( "[" LIST1 one_term "]" ) LIST1 one_term OPT ( "in" ident ) | "field_simplify_eq" OPT ( "[" LIST1 one_term "]" ) OPT ( "in" ident ) | "intuition" OPT ltac_expr | "now" ltac_expr | "nsatz" OPT ( "with" "radicalmax" ":=" one_term "strategy" ":=" one_term "parameters" ":=" one_term "variables" ":=" one_term ) | "psatz" one_term OPT nat_or_var | "ring" OPT ( "[" LIST1 one_term "]" ) | "ring_simplify" OPT ( "[" LIST1 one_term "]" ) LIST1 one_term OPT ( "in" ident ) | "match" ltac2_expr5 "with" OPT ltac2_branches "end" | "if" ltac2_expr5 "then" ltac2_expr5 "else" ltac2_expr5 | qualid LIST1 tactic_arg ] hloc: [ | "in" "|-" "*" | "in" ident | "in" "(" "type" "of" ident ")" | "in" "(" "value" "of" ident ")" ] as_ipat: [ | "as" simple_intropattern ] as_name: [ | "as" ident ] oriented_rewriter: [ | OPT [ "->" | "<-" ] OPT natural OPT [ "?" | "!" ] one_term_with_bindings ] one_term_with_bindings: [ | OPT ">" one_term OPT ( "with" bindings ) ] induction_clause: [ | induction_arg OPT ( "as" or_and_intropattern ) OPT ( "eqn" ":" naming_intropattern ) OPT occurrences ] induction_arg: [ | one_term_with_bindings | natural ] induction_principle: [ | "using" one_term OPT ( "with" bindings ) OPT occurrences ] auto_using: [ | "using" LIST1 one_term SEP "," ] hintbases: [ | "with" "*" | "with" LIST1 ident ] intropattern: [ | "*" | "**" | simple_intropattern ] simple_intropattern: [ | simple_intropattern_closed LIST0 [ "%" term0 ] ] simple_intropattern_closed: [ | naming_intropattern | "_" | or_and_intropattern | equality_intropattern ] naming_intropattern: [ | ident | "?" | "?" ident ] or_and_intropattern: [ | "[" LIST0 ( LIST0 intropattern ) SEP "|" "]" | "(" LIST0 simple_intropattern SEP "," ")" | "(" simple_intropattern "&" LIST1 simple_intropattern SEP "&" ")" ] equality_intropattern: [ | "->" | "<-" | "[=" LIST0 intropattern "]" ] bindings: [ | LIST1 ( "(" [ ident | natural ] ":=" term ")" ) | LIST1 one_term ] int_or_var: [ | [ integer | ident ] ] comparison: [ | "=" | "<" | "<=" | ">" | ">=" ] bindings_with_parameters: [ | "(" ident LIST0 simple_binder ":=" term ")" ] q_clause: [ | ltac2_clause (* Ltac2 plugin *) ] ltac2_clause: [ | "in" ltac2_in_clause (* ltac2 plugin *) | "at" ltac2_occs_nums (* ltac2 plugin *) ] ltac2_in_clause: [ | "*" OPT ltac2_occs (* ltac2 plugin *) | "*" "|-" OPT ltac2_concl_occ (* ltac2 plugin *) | LIST0 ltac2_hypident_occ SEP "," OPT ( "|-" OPT ltac2_concl_occ ) (* Ltac2 plugin *) ] q_occurrences: [ | OPT ltac2_occs (* ltac2 plugin *) ] ltac2_occs: [ | "at" ltac2_occs_nums (* ltac2 plugin *) ] ltac2_occs_nums: [ | OPT "-" LIST1 [ natural (* ltac2 plugin *) | "$" ident ] (* Ltac2 plugin *) ] ltac2_concl_occ: [ | "*" OPT ltac2_occs (* ltac2 plugin *) ] ltac2_hypident_occ: [ | ltac2_hypident OPT ltac2_occs (* ltac2 plugin *) ] ltac2_hypident: [ | ident_or_anti (* ltac2 plugin *) | "(" "type" "of" ident_or_anti ")" (* ltac2 plugin *) | "(" "value" "of" ident_or_anti ")" (* ltac2 plugin *) ] q_induction_clause: [ | ltac2_induction_clause (* ltac2 plugin *) ] ltac2_induction_clause: [ | ltac2_destruction_arg OPT ltac2_as_or_and_ipat OPT ltac2_eqn_ipat OPT ltac2_clause (* Ltac2 plugin *) ] ltac2_as_or_and_ipat: [ | "as" ltac2_or_and_intropattern (* ltac2 plugin *) ] ltac2_eqn_ipat: [ | "eqn" ":" ltac2_naming_intropattern (* ltac2 plugin *) ] q_conversion: [ | ltac2_conversion (* ltac2 plugin *) ] ltac2_conversion: [ | term (* ltac2 plugin *) | term "with" term (* ltac2 plugin *) ] q_rewriting: [ | ltac2_oriented_rewriter (* ltac2 plugin *) ] ltac2_oriented_rewriter: [ | OPT [ "->" | "<-" ] ltac2_rewriter (* ltac2 plugin *) ] ltac2_rewriter: [ | OPT natural OPT [ "?" | "!" ] ltac2_constr_with_bindings ] q_dispatch: [ | ltac2_for_each_goal (* ltac2 plugin *) ] ltac2_for_each_goal: [ | ltac2_goal_tactics (* Ltac2 plugin *) | OPT ( ltac2_goal_tactics "|" ) OPT ltac2_expr ".." OPT ( "|" ltac2_goal_tactics ) (* Ltac2 plugin *) ] ltac2_goal_tactics: [ | LIST0 ( OPT ltac2_expr ) SEP "|" (* Ltac2 plugin *) ] q_reductions: [ | ltac2_reductions (* ltac2 plugin *) ] ltac2_reductions: [ | LIST1 ltac2_red_flag (* ltac2 plugin *) | OPT ltac2_delta_reductions (* ltac2 plugin *) ] ltac2_red_flag: [ | "beta" (* ltac2 plugin *) | "iota" (* ltac2 plugin *) | "match" (* ltac2 plugin *) | "fix" (* ltac2 plugin *) | "cofix" (* ltac2 plugin *) | "zeta" (* ltac2 plugin *) | "delta" OPT ltac2_delta_reductions (* ltac2 plugin *) ] ltac2_delta_reductions: [ | OPT "-" "[" LIST1 refglobal "]" ] q_reference: [ | refglobal (* ltac2 plugin *) ] refglobal: [ | "&" ident (* ltac2 plugin *) | qualid (* ltac2 plugin *) | "$" ident (* ltac2 plugin *) ] q_hintdb: [ | hintdb (* ltac2 plugin *) ] hintdb: [ | "*" (* ltac2 plugin *) | LIST1 ident_or_anti (* ltac2 plugin *) ] q_constr_matching: [ | ltac2_match_list (* ltac2 plugin *) ] ltac2_match_key: [ | "lazy_match!" | "match!" | "multi_match!" ] ltac2_match_list: [ | OPT "|" LIST1 ltac2_match_rule SEP "|" ] ltac2_match_rule: [ | ltac2_match_pattern "=>" ltac2_expr (* ltac2 plugin *) ] ltac2_match_pattern: [ | cpattern (* ltac2 plugin *) | "context" OPT ident "[" cpattern "]" (* ltac2 plugin *) ] q_goal_matching: [ | goal_match_list (* ltac2 plugin *) ] goal_match_list: [ | OPT "|" LIST1 gmatch_rule SEP "|" ] gmatch_rule: [ | gmatch_pattern "=>" ltac2_expr (* ltac2 plugin *) ] gmatch_pattern: [ | "[" LIST0 gmatch_hyp_pattern SEP "," "|-" ltac2_match_pattern "]" (* ltac2 plugin *) ] gmatch_hyp_pattern: [ | name ":" ltac2_match_pattern (* ltac2 plugin *) ] q_move_location: [ | move_location (* ltac2 plugin *) ] move_location: [ | "at" "top" (* ltac2 plugin *) | "at" "bottom" (* ltac2 plugin *) | "after" ident_or_anti (* ltac2 plugin *) | "before" ident_or_anti (* ltac2 plugin *) ] q_pose: [ | pose (* ltac2 plugin *) ] pose: [ | "(" ident_or_anti ":=" term ")" (* ltac2 plugin *) | term OPT ltac2_as_name (* ltac2 plugin *) ] ltac2_as_name: [ | "as" ident_or_anti (* ltac2 plugin *) ] q_assert: [ | assertion (* ltac2 plugin *) ] assertion: [ | "(" ident_or_anti ":=" term ")" (* ltac2 plugin *) | "(" ident_or_anti ":" term ")" OPT ltac2_by_tactic (* ltac2 plugin *) | term OPT ltac2_as_ipat OPT ltac2_by_tactic (* ltac2 plugin *) ] ltac2_as_ipat: [ | "as" ltac2_simple_intropattern (* ltac2 plugin *) ] ltac2_by_tactic: [ | "by" ltac2_expr (* ltac2 plugin *) ] ltac2_entry: [ ] tac2def_body: [ | [ "_" | ident ] LIST0 tac2pat0 OPT ( ":" ltac2_type ) ":=" ltac2_expr (* ltac2 plugin *) ] tac2typ_def: [ | OPT tac2typ_prm qualid OPT ( [ ":=" | "::=" ] tac2typ_knd ) (* ltac2 plugin *) ] tac2typ_prm: [ | ltac2_typevar (* ltac2 plugin *) | "(" LIST1 ltac2_typevar SEP "," ")" (* ltac2 plugin *) ] tac2typ_knd: [ | ltac2_type (* ltac2 plugin *) | "[" OPT ( OPT "|" LIST1 tac2alg_constructor SEP "|" ) "]" (* ltac2 plugin *) | "[" ".." "]" (* ltac2 plugin *) | "{" OPT ( LIST1 tac2rec_field SEP ";" OPT ";" ) "}" (* ltac2 plugin *) ] tac2alg_constructor: [ | ident (* ltac2 plugin *) | ident "(" LIST0 ltac2_type SEP "," ")" (* ltac2 plugin *) ] tac2rec_field: [ | OPT "mutable" ident ":" ltac2_type (* ltac2 plugin *) ] ltac2_scope: [ | string (* ltac2 plugin *) | integer (* ltac2 plugin *) | name (* Ltac2 plugin *) | name "(" LIST1 ltac2_scope SEP "," ")" (* Ltac2 plugin *) ] ltac2_expr: [ | ltac2_expr5 ";" ltac2_expr (* ltac2 plugin *) | ltac2_expr5 (* ltac2 plugin *) ] ltac2_expr5: [ | "fun" LIST1 tac2pat0 OPT ( ":" ltac2_type ) "=>" ltac2_expr (* ltac2 plugin *) | "let" OPT "rec" ltac2_let_clause LIST0 ( "with" ltac2_let_clause ) "in" ltac2_expr (* Ltac2 plugin *) | ltac2_expr3 (* ltac2 plugin *) ] ltac2_let_clause: [ | LIST1 tac2pat0 OPT ( ":" ltac2_type ) ":=" ltac2_expr (* ltac2 plugin *) ] ltac2_expr3: [ | LIST1 ltac2_expr2 SEP "," (* Ltac2 plugin *) ] ltac2_expr2: [ | ltac2_expr1 "::" ltac2_expr2 (* ltac2 plugin *) | ltac2_expr1 (* ltac2 plugin *) ] ltac2_expr1: [ | ltac2_expr0 LIST1 ltac2_expr0 (* ltac2 plugin *) | ltac2_expr0 ".(" qualid ")" (* ltac2 plugin *) | ltac2_expr0 ".(" qualid ")" ":=" ltac2_expr5 (* ltac2 plugin *) | ltac2_expr0 (* ltac2 plugin *) ] tac2rec_fieldexpr: [ | qualid ":=" ltac2_expr1 (* ltac2 plugin *) ] ltac2_expr0: [ | "(" ltac2_expr ")" (* ltac2 plugin *) | "(" ltac2_expr ":" ltac2_type ")" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | "[" LIST0 ltac2_expr5 SEP ";" "]" (* ltac2 plugin *) | "{" OPT ( LIST1 tac2rec_fieldexpr OPT ";" ) "}" (* ltac2 plugin *) | ltac2_tactic_atom (* ltac2 plugin *) ] ltac2_tactic_atom: [ | integer (* ltac2 plugin *) | string (* ltac2 plugin *) | qualid (* ltac2 plugin *) | "@" ident (* ltac2 plugin *) | "&" lident (* ltac2 plugin *) | "'" term (* ltac2 plugin *) | ltac2_quotations ] ltac2_quotations: [ | "ident" ":" "(" lident ")" | "constr" ":" "(" term ")" | "open_constr" ":" "(" term ")" | "pat" ":" "(" cpattern ")" | "reference" ":" "(" [ "&" ident | qualid ] ")" | "ltac1" ":" "(" ltac1_expr_in_env ")" | "ltac1val" ":" "(" ltac1_expr_in_env ")" ] ltac1_expr_in_env: [ | ltac_expr (* ltac2 plugin *) | LIST0 ident "|-" ltac_expr (* ltac2 plugin *) ] ltac2_branches: [ | OPT "|" LIST1 ( tac2pat1 "=>" ltac2_expr ) SEP "|" ] tac2pat1: [ | qualid LIST1 tac2pat0 (* ltac2 plugin *) | qualid (* ltac2 plugin *) | "[" "]" (* ltac2 plugin *) | tac2pat0 "::" tac2pat0 (* ltac2 plugin *) | tac2pat0 (* ltac2 plugin *) ] tac2pat0: [ | "_" (* ltac2 plugin *) | "()" (* ltac2 plugin *) | qualid (* ltac2 plugin *) | "(" OPT atomic_tac2pat ")" (* ltac2 plugin *) ] atomic_tac2pat: [ | tac2pat1 ":" ltac2_type (* ltac2 plugin *) | tac2pat1 "," LIST0 tac2pat1 SEP "," (* ltac2 plugin *) | tac2pat1 (* ltac2 plugin *) ] tac2mode: [ | ltac2_expr [ "." | "..." ] (* ltac2 plugin *) | "Eval" red_expr "in" term | "Compute" term | "Check" term | "About" reference OPT univ_name_list | "SearchPattern" one_pattern OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) | "SearchRewrite" one_pattern OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) | "Search" LIST1 ( search_query ) OPT ( [ "inside" | "in" | "outside" ] LIST1 qualid ) ] in_hyp_as: [ | "in" LIST1 [ ident OPT as_ipat ] SEP "," ] simple_binder: [ | name | "(" LIST1 name ":" term ")" ] func_scheme_def: [ | ident ":=" "Induction" "for" qualid "Sort" sort_family (* funind plugin *) ] rewrite_occs: [ | LIST1 integer | ident ] rewstrategy: [ | one_term | "<-" one_term | "fail" | "id" | "refl" | "progress" rewstrategy | "try" rewstrategy | rewstrategy ";" rewstrategy | "choice" LIST1 rewstrategy | "repeat" rewstrategy | "any" rewstrategy | "subterm" rewstrategy | "subterms" rewstrategy | "innermost" rewstrategy | "outermost" rewstrategy | "bottomup" rewstrategy | "topdown" rewstrategy | "hints" ident | "terms" LIST0 one_term | "eval" red_expr | "fold" one_term | "(" rewstrategy ")" | "old_hints" ident ] l3_tactic: [ ] l2_tactic: [ ] l1_tactic: [ ] binder_tactic: [ ] value_tactic: [ ] syn_value: [ | ident ":" "(" nonterminal ")" ] ltac_expr: [ | [ ltac_expr4 | binder_tactic ] ] ltac_expr4: [ | ltac_expr3 ";" [ ltac_expr3 | binder_tactic ] | ltac_expr3 ";" "[" for_each_goal "]" | ltac_expr3 ] ltac_expr3: [ | l3_tactic | ltac_expr2 ] ltac_expr2: [ | ltac_expr1 "+" [ ltac_expr2 | binder_tactic ] | ltac_expr1 "||" [ ltac_expr2 | binder_tactic ] | l2_tactic | ltac_expr1 ] ltac_expr1: [ | tactic_value | qualid LIST1 tactic_arg | l1_tactic | ltac_expr0 ] tactic_value: [ | [ value_tactic | syn_value ] ] tactic_arg: [ | tactic_value | term | "()" ] ltac_expr0: [ | "(" ltac_expr ")" | "[>" for_each_goal "]" | tactic_atom ] tactic_atom: [ | integer | qualid | "()" ] ssrseqarg: [ | ssrseqidx "[" ssrortacs "]" OPT ssrorelse (* SSR plugin *) | OPT ssrseqidx ssrswap (* SSR plugin *) | ltac_expr3 (* SSR plugin *) ] ssrseqidx: [ | ident (* SSR plugin *) | natural (* SSR plugin *) ] ssrorelse: [ | "||" ltac_expr2 (* SSR plugin *) ] ssrswap: [ | "first" (* SSR plugin *) | "last" (* SSR plugin *) ] ssr_first_else: [ | ssr_first OPT ssrorelse (* SSR plugin *) ] ssr_first: [ | "[" LIST0 ltac_expr SEP "|" "]" LIST0 ssrintros (* SSR plugin *) ] let_clause: [ | name ":=" ltac_expr | ident LIST1 name ":=" ltac_expr ] for_each_goal: [ | goal_tactics | OPT ( goal_tactics "|" ) OPT ltac_expr ".." OPT ( "|" goal_tactics ) ] goal_tactics: [ | LIST0 ( OPT ltac_expr ) SEP "|" ] toplevel_selector: [ | selector | "all" | "!" | "par" ] selector: [ | LIST1 range_selector SEP "," | "[" ident "]" ] range_selector: [ | natural "-" natural | natural ] match_key: [ | "lazymatch" | "match" | "multimatch" ] match_pattern: [ | cpattern | "context" OPT ident "[" cpattern "]" ] cpattern: [ | term ] goal_pattern: [ | LIST0 match_hyp SEP "," "|-" match_pattern | "[" LIST0 match_hyp SEP "," "|-" match_pattern "]" | "_" ] match_hyp: [ | name ":" match_pattern | name ":=" match_pattern | name ":=" "[" match_pattern "]" ":" match_pattern ] coq-8.15.0/doc/tools/latex_filter000077500000000000000000000015201417001151100166710ustar00rootroot00000000000000#!/bin/sh # First argument is the number of lines to treat # Second argument is optional and, if it is "no", overfull are not displayed i=$1 nooverfull=$2 error=0 verbose=0 chapter="" file="" while : ; do read -r line; case $line in "! "*) echo $line $file; error=1 verbose=1 ;; "LaTeX Font Info"*|"LaTeX Info"*|"Underfull "*) verbose=0 ;; "Overfull "*) verbose=0 if [ "$nooverfull" != "no" ]; then echo $line $file; fi ;; "LaTeX "*) verbose=0 echo $line $chapter ;; "["*|"Chapter "*) verbose=0 ;; "(./"*) file="(file `echo $line | cut -b 4- | cut -d' ' -f 1`)" verbose=0 ;; *) if [ $verbose = 1 ]; then echo $line; fi esac; if [ "$i" = "0" ]; then break; else i=`expr $i - 1`; fi; done exit $error coq-8.15.0/doc/tools/show_latex_messages000077500000000000000000000002751417001151100202610ustar00rootroot00000000000000#!/bin/sh tooldir=$(dirname $0) if [ "$1" = "-no-overfull" ]; then cat $2 | "$tooldir"/latex_filter `cat $2 | wc -l` no else cat $1 | "$tooldir"/latex_filter `cat $1 | wc -l` yes fi coq-8.15.0/dune000066400000000000000000000025351417001151100132410ustar00rootroot00000000000000; Default flags for all Coq libraries. (env (dev (flags :standard -rectypes -w -9-27+40+60-70 \ -short-paths)) (release (flags :standard -rectypes) (ocamlopt_flags :standard -O3 -unbox-closures)) (ireport (flags :standard -rectypes -w -9-27-40+60-70) (ocamlopt_flags :standard -O3 -unbox-closures -inlining-report))) ; Information about flags for release mode: ; ; In #9665 we tried to add (c_flags -O3) to the release setup, ; unfortunately the resulting VM seems to be slower [5% slower on ; fourcolor, thus we keep the default C flags for now, which seem to ; be -O2. ; The _ profile could help factoring the above, however it doesn't ; seem to work like we'd expect/like: ; ; (_ (flags :standard -rectypes))) (alias (name default) (deps coq-core.install coq-stdlib.install coqide-server.install coqide.install)) (install (section lib) (package coq-core) (files revision)) (rule (targets revision) (mode fallback) (deps (:rev-script dev/tools/make_git_revision.sh)) (action (with-stdout-to revision (bash %{rev-script})))) ; Use summary.log as the target (alias (name runtest) (package coqide-server) (deps test-suite/summary.log)) ; For make compat (alias (name all-src) (deps (source_tree user-contrib) (source_tree theories) (source_tree plugins))) ; (dirs (:standard _build_ci)) (documentation (package coq)) coq-8.15.0/dune-project000066400000000000000000000113261417001151100147030ustar00rootroot00000000000000(lang dune 2.5) (name coq) (using coq 0.2) (formatting (enabled_for ocaml)) ; Pending on dune 2.8 as to avoid bug with dune subst ; see https://github.com/ocaml/dune/pull/3879 and ; https://github.com/ocaml/dune/pull/3879 ; (generate_opam_files true) (license LGPL-2.1-only) (maintainers "The Coq development team ") (authors "The Coq development team, INRIA, CNRS, and contributors") ; This generates bug-reports and dev-repo (source (github coq/coq)) (homepage https://coq.inria.fr/) (documentation "https://coq.github.io/doc/") (version dev) ; Note that we use coq.opam.template to have dune add the correct opam ; prefix for configure (package (name coq-core) (depends (ocaml (>= 4.05.0)) (ocamlfind (>= 1.8.1)) (zarith (>= 1.10)) (ounit2 :with-test)) (synopsis "The Coq Proof Assistant -- Core Binaries and Tools") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching. This package includes the Coq core binaries, plugins, and tools, but not the vernacular standard library. Note that in this setup, Coq needs to be started with the -boot and -noinit options, as will otherwise fail to find the regular Coq prelude, now living in the coq-stdlib package.")) (package (name coq-stdlib) (depends (coq-core (= :version))) (synopsis "The Coq Proof Assistant -- Standard Library") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching. This package includes the Coq Standard Library, that is to say, the set of modules usually bound to the Coq.* namespace.")) (package (name coqide-server) (depends (coq-core (= :version))) (synopsis "The Coq Proof Assistant, XML protocol server") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the `coqidetop` language server, an implementation of Coq's [XML protocol](https://github.com/coq/coq/blob/master/dev/doc/xml-protocol.md) which allows clients, such as CoqIDE, to interact with Coq in a structured way.")) (package (name coqide) (depends (coqide-server (= :version))) (synopsis "The Coq Proof Assistant --- GTK3 IDE") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the CoqIDE, a graphical user interface for the development of interactive proofs.")) (package (name coq-doc) (license "OPL-1.0") (depends (dune (and :build (>= 2.5.0))) (conf-python-3 :build) (coq (and :build (= :version)))) (synopsis "The Coq Proof Assistant --- Reference Manual") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. This package provides the Coq Reference Manual.")) (package (name coq) (depends (coq-core (= :version)) (coq-stdlib (= :version))) (synopsis "The Coq Proof Assistant") (description "Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs. Typical applications include the certification of properties of programming languages (e.g. the CompCert compiler certification project, or the Bedrock verified low-level programming library), the formalization of mathematics (e.g. the full formalization of the Feit-Thompson theorem or homotopy type theory) and teaching.")) coq-8.15.0/engine/000077500000000000000000000000001417001151100136235ustar00rootroot00000000000000coq-8.15.0/engine/dune000066400000000000000000000002761417001151100145060ustar00rootroot00000000000000(library (name engine) (synopsis "Coq's Tactic Engine") (public_name coq-core.engine) (wrapped false) ; until ocaml/dune#4892 fixed ; (private_modules univSubst) (libraries library)) coq-8.15.0/engine/eConstr.ml000066400000000000000000001007651417001151100156030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Id.equal id id' | _ -> false let isRelN sigma n c = match kind sigma c with Rel n' -> Int.equal n n' | _ -> false let isRef sigma c = match kind sigma c with | Const _ | Ind _ | Construct _ | Var _ -> true | _ -> false let isRefX sigma x c = let open GlobRef in match x, kind sigma c with | ConstRef c, Const (c', _) -> Constant.CanOrd.equal c c' | IndRef i, Ind (i', _) -> Ind.CanOrd.equal i i' | ConstructRef i, Construct (i', _) -> Construct.CanOrd.equal i i' | VarRef id, Var id' -> Id.equal id id' | _ -> false let destRel sigma c = match kind sigma c with | Rel p -> p | _ -> raise DestKO let destVar sigma c = match kind sigma c with | Var p -> p | _ -> raise DestKO let destInd sigma c = match kind sigma c with | Ind p -> p | _ -> raise DestKO let destEvar sigma c = match kind sigma c with | Evar p -> p | _ -> raise DestKO let destMeta sigma c = match kind sigma c with | Meta p -> p | _ -> raise DestKO let destSort sigma c = match kind sigma c with | Sort p -> p | _ -> raise DestKO let destCast sigma c = match kind sigma c with | Cast (c, k, t) -> (c, k, t) | _ -> raise DestKO let destApp sigma c = match kind sigma c with | App (f, a) -> (f, a) | _ -> raise DestKO let destLambda sigma c = match kind sigma c with | Lambda (na, t, c) -> (na, t, c) | _ -> raise DestKO let destLetIn sigma c = match kind sigma c with | LetIn (na, b, t, c) -> (na, b, t, c) | _ -> raise DestKO let destProd sigma c = match kind sigma c with | Prod (na, t, c) -> (na, t, c) | _ -> raise DestKO let destConst sigma c = match kind sigma c with | Const p -> p | _ -> raise DestKO let destConstruct sigma c = match kind sigma c with | Construct p -> p | _ -> raise DestKO let destFix sigma c = match kind sigma c with | Fix p -> p | _ -> raise DestKO let destCoFix sigma c = match kind sigma c with | CoFix p -> p | _ -> raise DestKO let destCase sigma c = match kind sigma c with | Case (ci, u, pms, t, iv, c, p) -> (ci, u, pms, t, iv, c, p) | _ -> raise DestKO let destProj sigma c = match kind sigma c with | Proj (p, c) -> (p, c) | _ -> raise DestKO let destRef sigma c = let open GlobRef in match kind sigma c with | Var x -> VarRef x, EInstance.empty | Const (c,u) -> ConstRef c, u | Ind (ind,u) -> IndRef ind, u | Construct (c,u) -> ConstructRef c, u | _ -> raise DestKO let decompose_app sigma c = match kind sigma c with | App (f,cl) -> (f, Array.to_list cl) | _ -> (c,[]) let decompose_lam sigma c = let rec lamdec_rec l c = match kind sigma c with | Lambda (x,t,c) -> lamdec_rec ((x,t)::l) c | Cast (c,_,_) -> lamdec_rec l c | _ -> l,c in lamdec_rec [] c let decompose_lam_assum sigma c = let open Rel.Declaration in let rec lamdec_rec l c = match kind sigma c with | Lambda (x,t,c) -> lamdec_rec (Context.Rel.add (LocalAssum (x,t)) l) c | LetIn (x,b,t,c) -> lamdec_rec (Context.Rel.add (LocalDef (x,b,t)) l) c | Cast (c,_,_) -> lamdec_rec l c | _ -> l,c in lamdec_rec Context.Rel.empty c let decompose_lam_n_assum sigma n c = let open Rel.Declaration in if n < 0 then user_err Pp.(str "decompose_lam_n_assum: integer parameter must be positive"); let rec lamdec_rec l n c = if Int.equal n 0 then l,c else match kind sigma c with | Lambda (x,t,c) -> lamdec_rec (Context.Rel.add (LocalAssum (x,t)) l) (n-1) c | LetIn (x,b,t,c) -> lamdec_rec (Context.Rel.add (LocalDef (x,b,t)) l) n c | Cast (c,_,_) -> lamdec_rec l n c | c -> user_err Pp.(str "decompose_lam_n_assum: not enough abstractions") in lamdec_rec Context.Rel.empty n c let decompose_lam_n_decls sigma n = let open Rel.Declaration in if n < 0 then user_err Pp.(str "decompose_lam_n_decls: integer parameter must be positive"); let rec lamdec_rec l n c = if Int.equal n 0 then l,c else match kind sigma c with | Lambda (x,t,c) -> lamdec_rec (Context.Rel.add (LocalAssum (x,t)) l) (n-1) c | LetIn (x,b,t,c) -> lamdec_rec (Context.Rel.add (LocalDef (x,b,t)) l) (n-1) c | Cast (c,_,_) -> lamdec_rec l n c | c -> user_err Pp.(str "decompose_lam_n_decls: not enough abstractions") in lamdec_rec Context.Rel.empty n let lamn n env b = let rec lamrec = function | (0, env, b) -> b | (n, ((v,t)::l), b) -> lamrec (n-1, l, mkLambda (v,t,b)) | _ -> assert false in lamrec (n,env,b) let compose_lam l b = lamn (List.length l) l b let rec to_lambda sigma n prod = if Int.equal n 0 then prod else match kind sigma prod with | Prod (na,ty,bd) -> mkLambda (na,ty,to_lambda sigma (n-1) bd) | Cast (c,_,_) -> to_lambda sigma n c | _ -> user_err (Pp.mt ()) let decompose_prod sigma c = let rec proddec_rec l c = match kind sigma c with | Prod (x,t,c) -> proddec_rec ((x,t)::l) c | Cast (c,_,_) -> proddec_rec l c | _ -> l,c in proddec_rec [] c let decompose_prod_assum sigma c = let open Rel.Declaration in let rec proddec_rec l c = match kind sigma c with | Prod (x,t,c) -> proddec_rec (Context.Rel.add (LocalAssum (x,t)) l) c | LetIn (x,b,t,c) -> proddec_rec (Context.Rel.add (LocalDef (x,b,t)) l) c | Cast (c,_,_) -> proddec_rec l c | _ -> l,c in proddec_rec Context.Rel.empty c let decompose_prod_n_assum sigma n c = let open Rel.Declaration in if n < 0 then user_err Pp.(str "decompose_prod_n_assum: integer parameter must be positive"); let rec prodec_rec l n c = if Int.equal n 0 then l,c else match kind sigma c with | Prod (x,t,c) -> prodec_rec (Context.Rel.add (LocalAssum (x,t)) l) (n-1) c | LetIn (x,b,t,c) -> prodec_rec (Context.Rel.add (LocalDef (x,b,t)) l) (n-1) c | Cast (c,_,_) -> prodec_rec l n c | c -> user_err Pp.(str "decompose_prod_n_assum: not enough assumptions") in prodec_rec Context.Rel.empty n c let existential_type = Evd.existential_type let lift n c = of_constr (Vars.lift n (unsafe_to_constr c)) let of_branches : Constr.case_branch array -> case_branch array = match Evd.MiniEConstr.unsafe_eq with | Refl -> fun x -> x let unsafe_to_branches : case_branch array -> Constr.case_branch array = match Evd.MiniEConstr.unsafe_eq with | Refl -> fun x -> x let of_return : Constr.case_return -> case_return = match Evd.MiniEConstr.unsafe_eq with | Refl -> fun x -> x let unsafe_to_return : case_return -> Constr.case_return = match Evd.MiniEConstr.unsafe_eq with | Refl -> fun x -> x let map_branches f br = let f c = unsafe_to_constr (f (of_constr c)) in of_branches (Constr.map_branches f (unsafe_to_branches br)) let map_return_predicate f p = let f c = unsafe_to_constr (f (of_constr c)) in of_return (Constr.map_return_predicate f (unsafe_to_return p)) let map sigma f c = let f c = unsafe_to_constr (f (of_constr c)) in of_constr (Constr.map f (unsafe_to_constr (whd_evar sigma c))) let map_with_binders sigma g f l c = let f l c = unsafe_to_constr (f l (of_constr c)) in of_constr (Constr.map_with_binders g f l (unsafe_to_constr (whd_evar sigma c))) let iter sigma f c = let f c = f (of_constr c) in Constr.iter f (unsafe_to_constr (whd_evar sigma c)) let expand_case env _sigma (ci, u, pms, p, iv, c, bl) = let u = EInstance.unsafe_to_instance u in let pms = unsafe_to_constr_array pms in let p = unsafe_to_return p in let iv = unsafe_to_case_invert iv in let c = unsafe_to_constr c in let bl = unsafe_to_branches bl in let (ci, p, iv, c, bl) = Inductive.expand_case env (ci, u, pms, p, iv, c, bl) in let p = of_constr p in let c = of_constr c in let iv = of_case_invert iv in let bl = of_constr_array bl in (ci, p, iv, c, bl) let annotate_case env sigma (ci, u, pms, p, iv, c, bl as case) = let (_, p, _, _, bl) = expand_case env sigma case in let p = (* Too bad we need to fetch this data in the environment, should be in the case_info instead. *) let (_, mip) = Inductive.lookup_mind_specif env ci.ci_ind in decompose_lam_n_decls sigma (mip.Declarations.mind_nrealdecls + 1) p in let mk_br c n = decompose_lam_n_decls sigma n c in let bl = Array.map2 mk_br bl ci.ci_cstr_ndecls in (ci, u, pms, p, iv, c, bl) let expand_branch env _sigma u pms (ind, i) (nas, _br) = let open Declarations in let u = EInstance.unsafe_to_instance u in let pms = unsafe_to_constr_array pms in let (mib, mip) = Inductive.lookup_mind_specif env ind in let paramdecl = Vars.subst_instance_context u mib.mind_params_ctxt in let paramsubst = Vars.subst_of_rel_context_instance paramdecl pms in let (ctx, _) = mip.mind_nf_lc.(i - 1) in let (ctx, _) = List.chop mip.mind_consnrealdecls.(i - 1) ctx in let ans = Inductive.instantiate_context u paramsubst nas ctx in let ans : rel_context = match Evd.MiniEConstr.unsafe_eq with Refl -> ans in ans let contract_case env _sigma (ci, p, iv, c, bl) = let p = unsafe_to_constr p in let iv = unsafe_to_case_invert iv in let c = unsafe_to_constr c in let bl = unsafe_to_constr_array bl in let (ci, u, pms, p, iv, c, bl) = Inductive.contract_case env (ci, p, iv, c, bl) in let u = EInstance.make u in let pms = of_constr_array pms in let p = of_return p in let iv = of_case_invert iv in let c = of_constr c in let bl = of_branches bl in (ci, u, pms, p, iv, c, bl) let iter_with_full_binders env sigma g f n c = let open Context.Rel.Declaration in match kind sigma c with | (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ | Int _ | Float _) -> () | Cast (c,_,t) -> f n c; f n t | Prod (na,t,c) -> f n t; f (g (LocalAssum (na, t)) n) c | Lambda (na,t,c) -> f n t; f (g (LocalAssum (na, t)) n) c | LetIn (na,b,t,c) -> f n b; f n t; f (g (LocalDef (na, b, t)) n) c | App (c,l) -> f n c; Array.Fun1.iter f n l | Evar (_,l) -> List.iter (fun c -> f n c) l | Case (ci,u,pms,p,iv,c,bl) -> let (ci, _, pms, p, iv, c, bl) = annotate_case env sigma (ci, u, pms, p, iv, c, bl) in let f_ctx (ctx, c) = f (List.fold_right g ctx n) c in Array.Fun1.iter f n pms; f_ctx p; iter_invert (f n) iv; f n c; Array.iter f_ctx bl | Proj (p,c) -> f n c | Fix (_,(lna,tl,bl)) -> Array.iter (f n) tl; let n' = Array.fold_left2_i (fun i n na t -> g (LocalAssum (na, lift i t)) n) n lna tl in Array.iter (f n') bl | CoFix (_,(lna,tl,bl)) -> Array.iter (f n) tl; let n' = Array.fold_left2_i (fun i n na t -> g (LocalAssum (na,lift i t)) n) n lna tl in Array.iter (f n') bl | Array (_u,t,def,ty) -> Array.Fun1.iter f n t; f n def; f n ty let iter_with_binders sigma g f n c = let f l c = f l (of_constr c) in Constr.iter_with_binders g f n (unsafe_to_constr (whd_evar sigma c)) let fold sigma f acc c = let f acc c = f acc (of_constr c) in Constr.fold f acc (unsafe_to_constr (whd_evar sigma c)) let fold_with_binders sigma g f acc e c = let f e acc c = f e acc (of_constr c) in Constr.fold_constr_with_binders g f acc e (unsafe_to_constr (whd_evar sigma c)) let compare_gen k eq_inst eq_sort eq_constr nargs c1 c2 = (c1 == c2) || Constr.compare_head_gen_with k k eq_inst eq_sort eq_constr nargs c1 c2 let eq_constr sigma c1 c2 = let kind c = kind sigma c in let eq_inst _ i1 i2 = EInstance.equal sigma i1 i2 in let eq_sorts s1 s2 = ESorts.equal sigma s1 s2 in let rec eq_constr nargs c1 c2 = compare_gen kind eq_inst eq_sorts eq_constr nargs c1 c2 in eq_constr 0 c1 c2 let eq_constr_nounivs sigma c1 c2 = let kind c = kind sigma c in let rec eq_constr nargs c1 c2 = compare_gen kind (fun _ _ _ -> true) (fun _ _ -> true) eq_constr nargs c1 c2 in eq_constr 0 c1 c2 let compare_constr sigma cmp c1 c2 = let kind c = kind sigma c in let eq_inst _ i1 i2 = EInstance.equal sigma i1 i2 in let eq_sorts s1 s2 = ESorts.equal sigma s1 s2 in let cmp nargs c1 c2 = cmp c1 c2 in compare_gen kind eq_inst eq_sorts cmp 0 c1 c2 let compare_cumulative_instances cv_pb nargs_ok variances u u' cstrs = let open UnivProblem in if not nargs_ok then enforce_eq_instances_univs false u u' cstrs else CArray.fold_left3 (fun cstrs v u u' -> let open Univ.Variance in match v with | Irrelevant -> Set.add (UWeak (u,u')) cstrs | Covariant -> let u = Univ.Universe.make u in let u' = Univ.Universe.make u' in (match cv_pb with | Reduction.CONV -> Set.add (UEq (u,u')) cstrs | Reduction.CUMUL -> Set.add (ULe (u,u')) cstrs) | Invariant -> let u = Univ.Universe.make u in let u' = Univ.Universe.make u' in Set.add (UEq (u,u')) cstrs) cstrs variances (Univ.Instance.to_array u) (Univ.Instance.to_array u') let cmp_inductives cv_pb (mind,ind as spec) nargs u1 u2 cstrs = let open UnivProblem in match mind.Declarations.mind_variance with | None -> enforce_eq_instances_univs false u1 u2 cstrs | Some variances -> let num_param_arity = Reduction.inductive_cumulativity_arguments spec in compare_cumulative_instances cv_pb (Int.equal num_param_arity nargs) variances u1 u2 cstrs let cmp_constructors (mind, ind, cns as spec) nargs u1 u2 cstrs = let open UnivProblem in match mind.Declarations.mind_variance with | None -> enforce_eq_instances_univs false u1 u2 cstrs | Some _ -> let num_cnstr_args = Reduction.constructor_cumulativity_arguments spec in if not (Int.equal num_cnstr_args nargs) then enforce_eq_instances_univs false u1 u2 cstrs else Array.fold_left2 (fun cstrs u1 u2 -> UnivProblem.(Set.add (UWeak (u1,u2)) cstrs)) cstrs (Univ.Instance.to_array u1) (Univ.Instance.to_array u2) let eq_universes env sigma cstrs cv_pb refargs l l' = if EInstance.is_empty l then (assert (EInstance.is_empty l'); true) else let l = EInstance.kind sigma l and l' = EInstance.kind sigma l' in let open GlobRef in let open UnivProblem in match refargs with | Some (ConstRef c, 1) when Environ.is_array_type env c -> cstrs := compare_cumulative_instances cv_pb true [|Univ.Variance.Irrelevant|] l l' !cstrs; true | None | Some (ConstRef _, _) -> cstrs := enforce_eq_instances_univs true l l' !cstrs; true | Some (VarRef _, _) -> assert false (* variables don't have instances *) | Some (IndRef ind, nargs) -> let mind = Environ.lookup_mind (fst ind) env in cstrs := cmp_inductives cv_pb (mind,snd ind) nargs l l' !cstrs; true | Some (ConstructRef ((mi,ind),ctor), nargs) -> let mind = Environ.lookup_mind mi env in cstrs := cmp_constructors (mind,ind,ctor) nargs l l' !cstrs; true let test_constr_universes env sigma leq ?(nargs=0) m n = let open UnivProblem in let kind c = kind sigma c in if m == n then Some Set.empty else let cstrs = ref Set.empty in let cv_pb = if leq then Reduction.CUMUL else Reduction.CONV in let eq_universes refargs l l' = eq_universes env sigma cstrs Reduction.CONV refargs l l' and leq_universes refargs l l' = eq_universes env sigma cstrs cv_pb refargs l l' in let eq_sorts s1 s2 = let s1 = ESorts.kind sigma s1 in let s2 = ESorts.kind sigma s2 in if Sorts.equal s1 s2 then true else (cstrs := Set.add (UEq (Sorts.univ_of_sort s1,Sorts.univ_of_sort s2)) !cstrs; true) in let leq_sorts s1 s2 = let s1 = ESorts.kind sigma s1 in let s2 = ESorts.kind sigma s2 in if Sorts.equal s1 s2 then true else (cstrs := Set.add (ULe (Sorts.univ_of_sort s1,Sorts.univ_of_sort s2)) !cstrs; true) in let rec eq_constr' nargs m n = compare_gen kind eq_universes eq_sorts eq_constr' nargs m n in let res = if leq then let rec compare_leq nargs m n = Constr.compare_head_gen_leq_with kind kind leq_universes leq_sorts eq_constr' leq_constr' nargs m n and leq_constr' nargs m n = m == n || compare_leq nargs m n in compare_leq nargs m n else Constr.compare_head_gen_with kind kind eq_universes eq_sorts eq_constr' nargs m n in if res then Some !cstrs else None let eq_constr_universes env sigma ?nargs m n = test_constr_universes env sigma false ?nargs m n let leq_constr_universes env sigma ?nargs m n = test_constr_universes env sigma true ?nargs m n let compare_head_gen_proj env sigma equ eqs eqc' nargs m n = let kind c = kind sigma c in match kind m, kind n with | Proj (p, c), App (f, args) | App (f, args), Proj (p, c) -> (match kind f with | Const (p', u) when Environ.QConstant.equal env (Projection.constant p) p' -> let npars = Projection.npars p in if Array.length args == npars + 1 then eqc' 0 c args.(npars) else false | _ -> false) | _ -> Constr.compare_head_gen_with kind kind equ eqs eqc' nargs m n let eq_constr_universes_proj env sigma m n = let open UnivProblem in if m == n then Some Set.empty else let cstrs = ref Set.empty in let eq_universes ref l l' = eq_universes env sigma cstrs Reduction.CONV ref l l' in let eq_sorts s1 s2 = let s1 = ESorts.kind sigma s1 in let s2 = ESorts.kind sigma s2 in if Sorts.equal s1 s2 then true else (cstrs := Set.add (UEq (Sorts.univ_of_sort s1, Sorts.univ_of_sort s2)) !cstrs; true) in let rec eq_constr' nargs m n = m == n || compare_head_gen_proj env sigma eq_universes eq_sorts eq_constr' nargs m n in let res = eq_constr' 0 m n in if res then Some !cstrs else None let universes_of_constr sigma c = let open Univ in let rec aux s c = match kind sigma c with | Const (c, u) -> Level.Set.fold Level.Set.add (Instance.levels (EInstance.kind sigma u)) s | Ind ((mind,_), u) | Construct (((mind,_),_), u) -> Level.Set.fold Level.Set.add (Instance.levels (EInstance.kind sigma u)) s | Sort u -> let sort = ESorts.kind sigma u in if Sorts.is_small sort then s else let u = Sorts.univ_of_sort sort in Level.Set.fold Level.Set.add (Universe.levels u) s | Evar (k, args) -> let concl = Evd.evar_concl (Evd.find sigma k) in fold sigma aux (aux s concl) c | Array (u,_,_,_) -> let s = Level.Set.fold Level.Set.add (Instance.levels (EInstance.kind sigma u)) s in fold sigma aux s c | Case (_,u,_,_,_,_,_) -> let s = Level.Set.fold Level.Set.add (Instance.levels (EInstance.kind sigma u)) s in fold sigma aux s c | _ -> fold sigma aux s c in aux Level.Set.empty c open Context open Environ let cast_list : type a b. (a,b) eq -> a list -> b list = fun Refl x -> x let cast_list_snd : type a b. (a,b) eq -> ('c * a) list -> ('c * b) list = fun Refl x -> x let cast_vect : type a b. (a,b) eq -> a array -> b array = fun Refl x -> x let cast_rel_decl : type a b. (a,b) eq -> (a, a) Rel.Declaration.pt -> (b, b) Rel.Declaration.pt = fun Refl x -> x let cast_rel_context : type a b. (a,b) eq -> (a, a) Rel.pt -> (b, b) Rel.pt = fun Refl x -> x let cast_rec_decl : type a b. (a,b) eq -> (a, a) Constr.prec_declaration -> (b, b) Constr.prec_declaration = fun Refl x -> x let cast_named_decl : type a b. (a,b) eq -> (a, a) Named.Declaration.pt -> (b, b) Named.Declaration.pt = fun Refl x -> x let cast_named_context : type a b. (a,b) eq -> (a, a) Named.pt -> (b, b) Named.pt = fun Refl x -> x module Vars = struct exception LocalOccur let to_constr = unsafe_to_constr let to_rel_decl = unsafe_to_rel_decl type instance = t array type instance_list = t list type substl = t list (** Operations that commute with evar-normalization *) let lift = lift let liftn n m c = of_constr (Vars.liftn n m (to_constr c)) let substnl subst n c = of_constr (Vars.substnl (cast_list unsafe_eq subst) n (to_constr c)) let substl subst c = of_constr (Vars.substl (cast_list unsafe_eq subst) (to_constr c)) let subst1 c r = of_constr (Vars.subst1 (to_constr c) (to_constr r)) let substnl_decl subst n d = of_rel_decl (Vars.substnl_decl (cast_list unsafe_eq subst) n (to_rel_decl d)) let substl_decl subst d = of_rel_decl (Vars.substl_decl (cast_list unsafe_eq subst) (to_rel_decl d)) let subst1_decl c d = of_rel_decl (Vars.subst1_decl (to_constr c) (to_rel_decl d)) let replace_vars subst c = of_constr (Vars.replace_vars (cast_list_snd unsafe_eq subst) (to_constr c)) let substn_vars n subst c = of_constr (Vars.substn_vars n subst (to_constr c)) let subst_vars subst c = of_constr (Vars.subst_vars subst (to_constr c)) let subst_var subst c = of_constr (Vars.subst_var subst (to_constr c)) let subst_univs_level_constr subst c = of_constr (Vars.subst_univs_level_constr subst (to_constr c)) let subst_instance_context subst ctx = cast_rel_context (sym unsafe_eq) (Vars.subst_instance_context subst (cast_rel_context unsafe_eq ctx)) let subst_instance_constr subst c = of_constr (Vars.subst_instance_constr subst (to_constr c)) (** Operations that dot NOT commute with evar-normalization *) let noccurn sigma n term = let rec occur_rec n c = match kind sigma c with | Rel m -> if Int.equal m n then raise LocalOccur | _ -> iter_with_binders sigma succ occur_rec n c in try occur_rec n term; true with LocalOccur -> false let noccur_between sigma n m term = let rec occur_rec n c = match kind sigma c with | Rel p -> if n<=p && p iter_with_binders sigma succ occur_rec n c in try occur_rec n term; true with LocalOccur -> false let closedn sigma n c = let rec closed_rec n c = match kind sigma c with | Rel m -> if m>n then raise LocalOccur | _ -> iter_with_binders sigma succ closed_rec n c in try closed_rec n c; true with LocalOccur -> false let closed0 sigma c = closedn sigma 0 c let subst_of_rel_context_instance ctx subst = cast_list (sym unsafe_eq) (Vars.subst_of_rel_context_instance (cast_rel_context unsafe_eq ctx) (cast_vect unsafe_eq subst)) let subst_of_rel_context_instance_list ctx subst = cast_list (sym unsafe_eq) (Vars.subst_of_rel_context_instance_list (cast_rel_context unsafe_eq ctx) (cast_list unsafe_eq subst)) let liftn_rel_context n k ctx = cast_rel_context (sym unsafe_eq) (Vars.liftn_rel_context n k (cast_rel_context unsafe_eq ctx)) let lift_rel_context n ctx = cast_rel_context (sym unsafe_eq) (Vars.lift_rel_context n (cast_rel_context unsafe_eq ctx)) let substnl_rel_context subst n ctx = cast_rel_context (sym unsafe_eq) (Vars.substnl_rel_context (cast_list unsafe_eq subst) n (cast_rel_context unsafe_eq ctx)) let substl_rel_context subst ctx = cast_rel_context (sym unsafe_eq) (Vars.substl_rel_context (cast_list unsafe_eq subst) (cast_rel_context unsafe_eq ctx)) let smash_rel_context ctx = cast_rel_context (sym unsafe_eq) (Vars.smash_rel_context (cast_rel_context unsafe_eq ctx)) let esubst : (int -> 'a -> t) -> 'a Esubst.subs -> t -> t = match unsafe_eq with | Refl -> Vars.esubst type substituend = Vars.substituend let make_substituend c = Vars.make_substituend (unsafe_to_constr c) let lift_substituend n s = of_constr (Vars.lift_substituend n s) end let rec isArity sigma c = match kind sigma c with | Prod (_,_,c) -> isArity sigma c | LetIn (_,b,_,c) -> isArity sigma (Vars.subst1 b c) | Cast (c,_,_) -> isArity sigma c | Sort _ -> true | _ -> false type arity = rel_context * ESorts.t let destArity sigma = let open Context.Rel.Declaration in let rec prodec_rec l c = match kind sigma c with | Prod (x,t,c) -> prodec_rec (LocalAssum (x,t) :: l) c | LetIn (x,b,t,c) -> prodec_rec (LocalDef (x,b,t) :: l) c | Cast (c,_,_) -> prodec_rec l c | Sort s -> l,s | _ -> anomaly ~label:"destArity" (Pp.str "not an arity.") in prodec_rec [] let mkProd_or_LetIn decl c = let open Context.Rel.Declaration in match decl with | LocalAssum (na,t) -> mkProd (na, t, c) | LocalDef (na,b,t) -> mkLetIn (na, b, t, c) let mkLambda_or_LetIn decl c = let open Context.Rel.Declaration in match decl with | LocalAssum (na,t) -> mkLambda (na, t, c) | LocalDef (na,b,t) -> mkLetIn (na, b, t, c) let mkNamedProd id typ c = mkProd (map_annot Name.mk_name id, typ, Vars.subst_var id.binder_name c) let mkNamedLambda id typ c = mkLambda (map_annot Name.mk_name id, typ, Vars.subst_var id.binder_name c) let mkNamedLetIn id c1 t c2 = mkLetIn (map_annot Name.mk_name id, c1, t, Vars.subst_var id.binder_name c2) let mkNamedProd_or_LetIn decl c = let open Context.Named.Declaration in match decl with | LocalAssum (id,t) -> mkNamedProd id t c | LocalDef (id,b,t) -> mkNamedLetIn id b t c let mkNamedLambda_or_LetIn decl c = let open Context.Named.Declaration in match decl with | LocalAssum (id,t) -> mkNamedLambda id t c | LocalDef (id,b,t) -> mkNamedLetIn id b t c let it_mkProd_or_LetIn t ctx = List.fold_left (fun c d -> mkProd_or_LetIn d c) t ctx let it_mkLambda_or_LetIn t ctx = List.fold_left (fun c d -> mkLambda_or_LetIn d c) t ctx let push_rel d e = push_rel (cast_rel_decl unsafe_eq d) e let push_rel_context d e = push_rel_context (cast_rel_context unsafe_eq d) e let push_rec_types d e = push_rec_types (cast_rec_decl unsafe_eq d) e let push_named d e = push_named (cast_named_decl unsafe_eq d) e let push_named_context d e = push_named_context (cast_named_context unsafe_eq d) e let push_named_context_val d e = push_named_context_val (cast_named_decl unsafe_eq d) e let rel_context e = cast_rel_context (sym unsafe_eq) (rel_context e) let named_context e = cast_named_context (sym unsafe_eq) (named_context e) let val_of_named_context e = val_of_named_context (cast_named_context unsafe_eq e) let named_context_of_val e = cast_named_context (sym unsafe_eq) (named_context_of_val e) let of_existential : Constr.existential -> existential = let gen : type a b. (a,b) eq -> 'c * b list -> 'c * a list = fun Refl x -> x in gen unsafe_eq let lookup_rel i e = cast_rel_decl (sym unsafe_eq) (lookup_rel i e) let lookup_named n e = cast_named_decl (sym unsafe_eq) (lookup_named n e) let lookup_named_val n e = cast_named_decl (sym unsafe_eq) (lookup_named_ctxt n e) let map_rel_context_in_env f env sign = let rec aux env acc = function | d::sign -> aux (push_rel d env) (Context.Rel.Declaration.map_constr (f env) d :: acc) sign | [] -> acc in aux env [] (List.rev sign) let match_named_context_val : named_context_val -> (named_declaration * lazy_val * named_context_val) option = match unsafe_eq with | Refl -> match_named_context_val let identity_subst_val : named_context_val -> t list = match unsafe_eq with Refl -> fun ctx -> ctx.env_named_var let fresh_global ?loc ?rigid ?names env sigma reference = let (evd,t) = Evd.fresh_global ?loc ?rigid ?names env sigma reference in evd, t let is_global = isRefX (** Kind of type *) type kind_of_type = | SortType of ESorts.t | CastType of types * t | ProdType of Name.t Context.binder_annot * t * t | LetInType of Name.t Context.binder_annot * t * t * t | AtomicType of t * t array let kind_of_type sigma t = match kind sigma t with | Sort s -> SortType s | Cast (c,_,t) -> CastType (c, t) | Prod (na,t,c) -> ProdType (na, t, c) | LetIn (na,b,t,c) -> LetInType (na, b, t, c) | App (c,l) -> AtomicType (c, l) | (Rel _ | Meta _ | Var _ | Evar _ | Const _ | Proj _ | Case _ | Fix _ | CoFix _ | Ind _) -> AtomicType (t,[||]) | (Lambda _ | Construct _ | Int _ | Float _ | Array _) -> failwith "Not a type" module Unsafe = struct let to_sorts = ESorts.unsafe_to_sorts let to_instance = EInstance.unsafe_to_instance let to_constr = unsafe_to_constr let to_constr_array = unsafe_to_constr_array let to_rel_decl = unsafe_to_rel_decl let to_named_decl = unsafe_to_named_decl let to_named_context = let gen : type a b. (a, b) eq -> (a,a) Context.Named.pt -> (b,b) Context.Named.pt = fun Refl x -> x in gen unsafe_eq let to_case_invert = unsafe_to_case_invert let eq = unsafe_eq end coq-8.15.0/engine/eConstr.mli000066400000000000000000000367131417001151100157550ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t (** Turn a sort into an up-to sort. *) val kind : Evd.evar_map -> t -> Sorts.t (** Returns the view into the current sort. Note that the kind of a variable may change if the unification state of the evar map changes. *) end module EInstance : sig type t (** Type of universe instances up-to universe unification. Similar to {ESorts.t} for {Univ.Instance.t}. *) val make : Univ.Instance.t -> t val kind : Evd.evar_map -> t -> Univ.Instance.t val empty : t val is_empty : t -> bool end type case_invert = t pcase_invert type case = (t, t, EInstance.t) pcase type 'a puniverses = 'a * EInstance.t (** {5 Destructors} *) val kind : Evd.evar_map -> t -> (t, t, ESorts.t, EInstance.t) Constr.kind_of_term (** Same as {!Constr.kind} except that it expands evars and normalizes universes on the fly. *) val kind_upto : Evd.evar_map -> Constr.t -> (Constr.t, Constr.t, Sorts.t, Univ.Instance.t) Constr.kind_of_term val to_constr : ?abort_on_undefined_evars:bool -> Evd.evar_map -> t -> Constr.t (** Returns the evar-normal form of the argument. Note that this function is supposed to be called when the original term has not more free-evars anymore. If you need compatibility with the old semantics, set [abort_on_undefined_evars] to [false]. For getting the evar-normal form of a term with evars see {!Evarutil.nf_evar}. *) val to_constr_opt : Evd.evar_map -> t -> Constr.t option (** Same as [to_constr], but returns [None] if some unresolved evars remain *) type kind_of_type = | SortType of ESorts.t | CastType of types * t | ProdType of Name.t Context.binder_annot * t * t | LetInType of Name.t Context.binder_annot * t * t * t | AtomicType of t * t array val kind_of_type : Evd.evar_map -> t -> kind_of_type (** {5 Constructors} *) val of_kind : (t, t, ESorts.t, EInstance.t) Constr.kind_of_term -> t (** Construct a term from a view. *) val of_constr : Constr.t -> t (** Translate a kernel term into an incomplete term in O(1). *) (** {5 Insensitive primitives} Evar-insensitive versions of the corresponding functions. See the {!Constr} module for more information. *) (** {6 Constructors} *) val mkRel : int -> t val mkVar : Id.t -> t val mkMeta : metavariable -> t val mkEvar : t pexistential -> t val mkSort : Sorts.t -> t val mkSProp : t val mkProp : t val mkSet : t val mkType : Univ.Universe.t -> t val mkCast : t * cast_kind * t -> t val mkProd : Name.t Context.binder_annot * t * t -> t val mkLambda : Name.t Context.binder_annot * t * t -> t val mkLetIn : Name.t Context.binder_annot * t * t * t -> t val mkApp : t * t array -> t val mkConst : Constant.t -> t val mkConstU : Constant.t * EInstance.t -> t val mkProj : (Projection.t * t) -> t val mkInd : inductive -> t val mkIndU : inductive * EInstance.t -> t val mkConstruct : constructor -> t val mkConstructU : constructor * EInstance.t -> t val mkConstructUi : (inductive * EInstance.t) * int -> t val mkCase : case -> t val mkFix : (t, t) pfixpoint -> t val mkCoFix : (t, t) pcofixpoint -> t val mkArrow : t -> Sorts.relevance -> t -> t val mkArrowR : t -> t -> t val mkInt : Uint63.t -> t val mkFloat : Float64.t -> t val mkArray : EInstance.t * t array * t * t -> t val mkRef : GlobRef.t * EInstance.t -> t val type1 : t val applist : t * t list -> t val applistc : t -> t list -> t val mkProd_or_LetIn : rel_declaration -> t -> t val mkLambda_or_LetIn : rel_declaration -> t -> t val it_mkProd_or_LetIn : t -> rel_context -> t val it_mkLambda_or_LetIn : t -> rel_context -> t val mkNamedLambda : Id.t Context.binder_annot -> types -> constr -> constr val mkNamedLetIn : Id.t Context.binder_annot -> constr -> types -> constr -> constr val mkNamedProd : Id.t Context.binder_annot -> types -> types -> types val mkNamedLambda_or_LetIn : named_declaration -> types -> types val mkNamedProd_or_LetIn : named_declaration -> types -> types (** {6 Simple case analysis} *) val isRel : Evd.evar_map -> t -> bool val isVar : Evd.evar_map -> t -> bool val isInd : Evd.evar_map -> t -> bool val isRef : Evd.evar_map -> t -> bool val isEvar : Evd.evar_map -> t -> bool val isMeta : Evd.evar_map -> t -> bool val isSort : Evd.evar_map -> t -> bool val isCast : Evd.evar_map -> t -> bool val isApp : Evd.evar_map -> t -> bool val isLambda : Evd.evar_map -> t -> bool val isLetIn : Evd.evar_map -> t -> bool val isProd : Evd.evar_map -> t -> bool val isConst : Evd.evar_map -> t -> bool val isConstruct : Evd.evar_map -> t -> bool val isFix : Evd.evar_map -> t -> bool val isCoFix : Evd.evar_map -> t -> bool val isCase : Evd.evar_map -> t -> bool val isProj : Evd.evar_map -> t -> bool val isType : Evd.evar_map -> constr -> bool type arity = rel_context * ESorts.t val destArity : Evd.evar_map -> types -> arity val isArity : Evd.evar_map -> t -> bool val isVarId : Evd.evar_map -> Id.t -> t -> bool val isRelN : Evd.evar_map -> int -> t -> bool val isRefX : Evd.evar_map -> GlobRef.t -> t -> bool val destRel : Evd.evar_map -> t -> int val destMeta : Evd.evar_map -> t -> metavariable val destVar : Evd.evar_map -> t -> Id.t val destSort : Evd.evar_map -> t -> ESorts.t val destCast : Evd.evar_map -> t -> t * cast_kind * t val destProd : Evd.evar_map -> t -> Name.t Context.binder_annot * types * types val destLambda : Evd.evar_map -> t -> Name.t Context.binder_annot * types * t val destLetIn : Evd.evar_map -> t -> Name.t Context.binder_annot * t * types * t val destApp : Evd.evar_map -> t -> t * t array val destConst : Evd.evar_map -> t -> Constant.t * EInstance.t val destEvar : Evd.evar_map -> t -> t pexistential val destInd : Evd.evar_map -> t -> inductive * EInstance.t val destConstruct : Evd.evar_map -> t -> constructor * EInstance.t val destCase : Evd.evar_map -> t -> case val destProj : Evd.evar_map -> t -> Projection.t * t val destFix : Evd.evar_map -> t -> (t, t) pfixpoint val destCoFix : Evd.evar_map -> t -> (t, t) pcofixpoint val destRef : Evd.evar_map -> t -> GlobRef.t * EInstance.t val decompose_app : Evd.evar_map -> t -> t * t list (** Pops lambda abstractions until there are no more, skipping casts. *) val decompose_lam : Evd.evar_map -> t -> (Name.t Context.binder_annot * t) list * t (** Pops lambda abstractions and letins until there are no more, skipping casts. *) val decompose_lam_assum : Evd.evar_map -> t -> rel_context * t (** Pops [n] lambda abstractions, and pop letins only if needed to expose enough lambdas, skipping casts. @raise UserError if the term doesn't have enough lambdas. *) val decompose_lam_n_assum : Evd.evar_map -> int -> t -> rel_context * t (** Pops [n] lambda abstractions and letins, skipping casts. @raise UserError if the term doesn't have enough lambdas/letins. *) val decompose_lam_n_decls : Evd.evar_map -> int -> t -> rel_context * t val compose_lam : (Name.t Context.binder_annot * t) list -> t -> t val to_lambda : Evd.evar_map -> int -> t -> t val decompose_prod : Evd.evar_map -> t -> (Name.t Context.binder_annot * t) list * t val decompose_prod_assum : Evd.evar_map -> t -> rel_context * t val decompose_prod_n_assum : Evd.evar_map -> int -> t -> rel_context * t val existential_type : Evd.evar_map -> existential -> types val whd_evar : Evd.evar_map -> constr -> constr (** {6 Equality} *) val eq_constr : Evd.evar_map -> t -> t -> bool val eq_constr_nounivs : Evd.evar_map -> t -> t -> bool val eq_constr_universes : Environ.env -> Evd.evar_map -> ?nargs:int -> t -> t -> UnivProblem.Set.t option val leq_constr_universes : Environ.env -> Evd.evar_map -> ?nargs:int -> t -> t -> UnivProblem.Set.t option (** [eq_constr_universes_proj] can equate projections and their eta-expanded constant form. *) val eq_constr_universes_proj : Environ.env -> Evd.evar_map -> t -> t -> UnivProblem.Set.t option val compare_constr : Evd.evar_map -> (t -> t -> bool) -> t -> t -> bool (** {6 Iterators} *) val map : Evd.evar_map -> (t -> t) -> t -> t val map_with_binders : Evd.evar_map -> ('a -> 'a) -> ('a -> t -> t) -> 'a -> t -> t val map_branches : (t -> t) -> case_branch array -> case_branch array val map_return_predicate : (t -> t) -> case_return -> case_return val iter : Evd.evar_map -> (t -> unit) -> t -> unit val iter_with_binders : Evd.evar_map -> ('a -> 'a) -> ('a -> t -> unit) -> 'a -> t -> unit val iter_with_full_binders : Environ.env -> Evd.evar_map -> (rel_declaration -> 'a -> 'a) -> ('a -> t -> unit) -> 'a -> t -> unit val fold : Evd.evar_map -> ('a -> t -> 'a) -> 'a -> t -> 'a val fold_with_binders : Evd.evar_map -> ('a -> 'a) -> ('a -> 'b -> t -> 'b) -> 'a -> 'b -> t -> 'b (** Gather the universes transitively used in the term, including in the type of evars appearing in it. *) val universes_of_constr : Evd.evar_map -> t -> Univ.Level.Set.t (** {6 Substitutions} *) module Vars : sig (** See vars.mli for the documentation of the functions below *) type instance = t array type instance_list = t list type substl = t list val lift : int -> t -> t val liftn : int -> int -> t -> t val substnl : substl -> int -> t -> t val substl : substl -> t -> t val subst1 : t -> t -> t val substnl_decl : substl -> int -> rel_declaration -> rel_declaration val substl_decl : substl -> rel_declaration -> rel_declaration val subst1_decl : t -> rel_declaration -> rel_declaration val replace_vars : (Id.t * t) list -> t -> t val substn_vars : int -> Id.t list -> t -> t val subst_vars : Id.t list -> t -> t val subst_var : Id.t -> t -> t val noccurn : Evd.evar_map -> int -> t -> bool val noccur_between : Evd.evar_map -> int -> int -> t -> bool val closedn : Evd.evar_map -> int -> t -> bool val closed0 : Evd.evar_map -> t -> bool val subst_univs_level_constr : Univ.universe_level_subst -> t -> t val subst_instance_context : Univ.Instance.t -> rel_context -> rel_context val subst_instance_constr : Univ.Instance.t -> t -> t val subst_of_rel_context_instance : rel_context -> instance -> substl val subst_of_rel_context_instance_list : rel_context -> instance_list -> substl val liftn_rel_context : int -> int -> rel_context -> rel_context val lift_rel_context : int -> rel_context -> rel_context val substnl_rel_context : substl -> int -> rel_context -> rel_context val substl_rel_context : substl -> rel_context -> rel_context val smash_rel_context : rel_context -> rel_context val esubst : (int -> 'a -> t) -> 'a Esubst.subs -> t -> t type substituend val make_substituend : t -> substituend val lift_substituend : int -> substituend -> t end (** {5 Environment handling} *) val push_rel : rel_declaration -> env -> env val push_rel_context : rel_context -> env -> env val push_rec_types : (t, t) Constr.prec_declaration -> env -> env val push_named : named_declaration -> env -> env val push_named_context : named_context -> env -> env val push_named_context_val : named_declaration -> named_context_val -> named_context_val val rel_context : env -> rel_context val named_context : env -> named_context val val_of_named_context : named_context -> named_context_val val named_context_of_val : named_context_val -> named_context val lookup_rel : int -> env -> rel_declaration val lookup_named : variable -> env -> named_declaration val lookup_named_val : variable -> named_context_val -> named_declaration val map_rel_context_in_env : (env -> constr -> constr) -> env -> rel_context -> rel_context val match_named_context_val : named_context_val -> (named_declaration * lazy_val * named_context_val) option val identity_subst_val : named_context_val -> t list (* XXX Missing Sigma proxy *) val fresh_global : ?loc:Loc.t -> ?rigid:Evd.rigid -> ?names:Univ.Instance.t -> Environ.env -> Evd.evar_map -> GlobRef.t -> Evd.evar_map * t val is_global : Evd.evar_map -> GlobRef.t -> t -> bool [@@ocaml.deprecated "Use [EConstr.isRefX] instead."] val expand_case : Environ.env -> Evd.evar_map -> case -> (case_info * t * case_invert * t * t array) val annotate_case : Environ.env -> Evd.evar_map -> case -> case_info * EInstance.t * t array * (rel_context * t) * case_invert * t * (rel_context * t) array (** Same as above, but doesn't turn contexts into binders *) val expand_branch : Environ.env -> Evd.evar_map -> EInstance.t -> t array -> constructor -> case_branch -> rel_context (** Given a universe instance and parameters for the inductive type, constructs the typed context in which the branch lives. *) val contract_case : Environ.env -> Evd.evar_map -> (case_info * t * case_invert * t * t array) -> case (** {5 Extra} *) val of_existential : Constr.existential -> existential val of_named_decl : (Constr.t, Constr.types) Context.Named.Declaration.pt -> (t, types) Context.Named.Declaration.pt val of_rel_decl : (Constr.t, Constr.types) Context.Rel.Declaration.pt -> (t, types) Context.Rel.Declaration.pt val to_rel_decl : Evd.evar_map -> (t, types) Context.Rel.Declaration.pt -> (Constr.t, Constr.types) Context.Rel.Declaration.pt val of_named_context : Constr.named_context -> named_context val of_rel_context : Constr.rel_context -> rel_context val of_case_invert : Constr.case_invert -> case_invert val of_constr_array : Constr.t array -> t array (** {5 Unsafe operations} *) module Unsafe : sig val to_constr : t -> Constr.t (** Physical identity. Does not care for defined evars. *) val to_constr_array : t array -> Constr.t array (** Physical identity. Does not care for defined evars. *) val to_rel_decl : (t, types) Context.Rel.Declaration.pt -> (Constr.t, Constr.types) Context.Rel.Declaration.pt (** Physical identity. Does not care for defined evars. *) val to_named_decl : (t, types) Context.Named.Declaration.pt -> (Constr.t, Constr.types) Context.Named.Declaration.pt (** Physical identity. Does not care for defined evars. *) val to_named_context : (t, types) Context.Named.pt -> Constr.named_context val to_sorts : ESorts.t -> Sorts.t (** Physical identity. Does not care for normalization. *) val to_instance : EInstance.t -> Univ.Instance.t (** Physical identity. Does not care for normalization. *) val to_case_invert : case_invert -> Constr.case_invert val eq : (t, Constr.t) eq (** Use for transparent cast between types. *) end coq-8.15.0/engine/evar_kinds.ml000066400000000000000000000036431417001151100163100ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let varsc = EConstr.universes_of_constr sigma c in let c = EConstr.to_constr ?abort_on_undefined_evars sigma c in uvars := Univ.Level.Set.union !uvars varsc; c) in let sigma = restrict_universe_context sigma !uvars in sigma, v (* flush_and_check_evars fails if an existential is undefined *) exception Uninstantiated_evar of Evar.t let rec flush_and_check_evars sigma c = match kind c with | Evar (evk,_ as ev) -> (match existential_opt_value0 sigma ev with | None -> raise (Uninstantiated_evar evk) | Some c -> flush_and_check_evars sigma c) | _ -> Constr.map (flush_and_check_evars sigma) c let flush_and_check_evars sigma c = flush_and_check_evars sigma (EConstr.Unsafe.to_constr c) (** Term exploration up to instantiation. *) let kind_of_term_upto = EConstr.kind_upto let nf_evars_universes sigma t = EConstr.to_constr ~abort_on_undefined_evars:false sigma (EConstr.of_constr t) let whd_evar = EConstr.whd_evar let nf_evar sigma c = EConstr.of_constr (EConstr.to_constr ~abort_on_undefined_evars:false sigma c) let j_nf_evar sigma j = { uj_val = nf_evar sigma j.uj_val; uj_type = nf_evar sigma j.uj_type } let jl_nf_evar sigma jl = List.map (j_nf_evar sigma) jl let jv_nf_evar sigma = Array.map (j_nf_evar sigma) let tj_nf_evar sigma {utj_val=v;utj_type=t} = {utj_val=nf_evar sigma v;utj_type=t} let nf_named_context_evar sigma ctx = Context.Named.map (nf_evars_universes sigma) ctx let nf_rel_context_evar sigma ctx = Context.Rel.map (nf_evar sigma) ctx let nf_env_evar sigma env = let nc' = nf_named_context_evar sigma (Environ.named_context env) in let rel' = nf_rel_context_evar sigma (EConstr.rel_context env) in EConstr.push_rel_context rel' (reset_with_named_context (val_of_named_context nc') env) let nf_evar_info evc info = map_evar_info (nf_evar evc) info let nf_evar_map evm = Evd.raw_map (fun _ evi -> nf_evar_info evm evi) evm let nf_evar_map_undefined evm = Evd.raw_map_undefined (fun _ evi -> nf_evar_info evm evi) evm (*-------------------*) (* Auxiliary functions for the conversion algorithms modulo evars *) let has_undefined_evars evd t = let rec has_ev t = match EConstr.kind evd t with | Evar _ -> raise NotInstantiatedEvar | _ -> EConstr.iter evd has_ev t in try let _ = has_ev t in false with (Not_found | NotInstantiatedEvar) -> true let is_ground_term evd t = not (has_undefined_evars evd t) let is_ground_env evd env = let is_ground_rel_decl = function | RelDecl.LocalDef (_,b,_) -> is_ground_term evd (EConstr.of_constr b) | _ -> true in let is_ground_named_decl = function | NamedDecl.LocalDef (_,b,_) -> is_ground_term evd (EConstr.of_constr b) | _ -> true in List.for_all is_ground_rel_decl (rel_context env) && List.for_all is_ground_named_decl (named_context env) (* Memoization is safe since evar_map and environ are applicative structures *) let memo f = let module E = Ephemeron.K2 in let m = E.create () in fun x y -> match E.get_key1 m, E.get_key2 m with | Some x', Some y' when x == x' && y == y' -> Option.get (E.get_data m) | _ -> let r = f x y in E.set_key1 m x; E.set_key2 m y; E.set_data m r; r let is_ground_env = memo is_ground_env (* Return the head evar if any *) exception NoHeadEvar let head_evar sigma c = (* FIXME: this breaks if using evar-insensitive code *) let c = EConstr.Unsafe.to_constr c in let rec hrec c = match kind c with | Evar (evk,_) -> evk | Case (_, _, _, _, _, c, _) -> hrec c | App (c,_) -> hrec c | Cast (c,_,_) -> hrec c | Proj (p, c) -> hrec c | _ -> raise NoHeadEvar in hrec c (* Expand head evar if any (currently consider only applications but I guess it should consider Case too) *) let whd_head_evar_stack sigma c = let rec whrec (c, l) = match EConstr.kind sigma c with | Cast (c,_,_) -> whrec (c, l) | App (f,args) -> whrec (f, args :: l) | c -> (EConstr.of_kind c, l) in whrec (c, []) let whd_head_evar sigma c = let open EConstr in let (f, args) = whd_head_evar_stack sigma c in match args with | [arg] -> mkApp (f, arg) | _ -> mkApp (f, Array.concat args) (**********************) (* Creating new metas *) (**********************) let meta_counter_summary_name = "meta counter" (* Generator of metavariables *) let meta_ctr, meta_counter_summary_tag = Summary.ref_tag 0 ~name:meta_counter_summary_name let new_meta () = incr meta_ctr; !meta_ctr (* The list of non-instantiated existential declarations (order is important) *) let non_instantiated sigma = let listev = Evd.undefined_map sigma in Evar.Map.Smart.map (fun evi -> nf_evar_info sigma evi) listev (************************) (* Manipulating filters *) (************************) let make_pure_subst evi args = snd (List.fold_right (fun decl (args,l) -> match args with | a::rest -> (rest, (NamedDecl.get_id decl, a)::l) | _ -> anomaly (Pp.str "Instance does not match its signature.")) (evar_filtered_context evi) (List.rev args,[])) (*------------------------------------* * functional operations on evar sets * *------------------------------------*) (* [push_rel_context_to_named_context] builds the defining context and the * initial instance of an evar. If the evar is to be used in context * * Gamma = a1 ... an xp ... x1 * \- named part -/ \- de Bruijn part -/ * * then the x1...xp are turned into variables so that the evar is declared in * context * * a1 ... an xp ... x1 * \----------- named part ------------/ * * but used applied to the initial instance "a1 ... an Rel(p) ... Rel(1)" * so that ev[a1:=a1 ... an:=an xp:=Rel(p) ... x1:=Rel(1)] is correctly typed * in context Gamma. * * Remark 1: The instance is reverted in practice (i.e. Rel(1) comes first) * Remark 2: If some of the ai or xj are definitions, we keep them in the * instance. This is necessary so that no unfolding of local definitions * happens when inferring implicit arguments (consider e.g. the problem * "x:nat; x':=x; f:forall y, y=y -> Prop |- f _ (refl_equal x')" which * produces the equation "?y[x,x']=?y[x,x']" =? "x'=x'": we want * the hole to be instantiated by x', not by x (which would have been * the case in [invert_definition] if x' had disappeared from the instance). * Note that at any time, if, in some context env, the instance of * declaration x:A is t and the instance of definition x':=phi(x) is u, then * we have the property that u and phi(t) are convertible in env. *) let next_ident_away id avoid = let avoid id = Id.Set.mem id avoid in next_ident_away_from id avoid let next_name_away na avoid = let avoid id = Id.Set.mem id avoid in let id = match na with Name id -> id | Anonymous -> default_non_dependent_ident in next_ident_away_from id avoid type subst_val = | SRel of int | SVar of Id.t type csubst = { csubst_len : int; (** Cardinal of [csubst_rel] *) csubst_var : Constr.t Id.Map.t; (** A mapping of variables to variables. We use the more general [Constr.t] to share allocations, but all values are of shape [Var _]. *) csubst_rel : Constr.t Int.Map.t; (** A contiguous mapping of integers to variables. Same remark for values. *) csubst_rev : subst_val Id.Map.t; (** Reverse mapping of the substitution *) } (** This type represents a name substitution for the named and De Bruijn parts of an environment. For efficiency we also store the reverse substitution. Invariant: all identifiers in the codomain of [csubst_var] and [csubst_rel] must be pairwise distinct. *) let empty_csubst = { csubst_len = 0; csubst_rel = Int.Map.empty; csubst_var = Id.Map.empty; csubst_rev = Id.Map.empty; } let csubst_subst { csubst_len = k; csubst_var = v; csubst_rel = s } c = (* Safe because this is a substitution *) let c = EConstr.Unsafe.to_constr c in let rec subst n c = match Constr.kind c with | Rel m -> if m <= n then c else if m - n <= k then Int.Map.find (k - m + n) s else mkRel (m - k) | Var id -> begin try Id.Map.find id v with Not_found -> c end | _ -> Constr.map_with_binders succ subst n c in let c = if k = 0 && Id.Map.is_empty v then c else subst 0 c in EConstr.of_constr c type ext_named_context = csubst * Id.Set.t * named_context_val let push_var id { csubst_len = n; csubst_var = v; csubst_rel = s; csubst_rev = r } = let s = Int.Map.add n (Constr.mkVar id) s in let r = Id.Map.add id (SRel n) r in { csubst_len = succ n; csubst_var = v; csubst_rel = s; csubst_rev = r } (** Post-compose the substitution with the generator [src ↦ tgt] *) let update_var src tgt subst = let cur = try Some (Id.Map.find src subst.csubst_rev) with Not_found -> None in match cur with | None -> (* Missing keys stand for identity substitution [src ↦ src] *) let csubst_var = Id.Map.add src (Constr.mkVar tgt) subst.csubst_var in let csubst_rev = Id.Map.add tgt (SVar src) subst.csubst_rev in { subst with csubst_var; csubst_rev } | Some bnd -> let csubst_rev = Id.Map.add tgt bnd (Id.Map.remove src subst.csubst_rev) in match bnd with | SRel m -> let csubst_rel = Int.Map.add m (Constr.mkVar tgt) subst.csubst_rel in { subst with csubst_rel; csubst_rev } | SVar id -> let csubst_var = Id.Map.add id (Constr.mkVar tgt) subst.csubst_var in { subst with csubst_var; csubst_rev } type naming_mode = | KeepUserNameAndRenameExistingButSectionNames | KeepUserNameAndRenameExistingEvenSectionNames | KeepExistingNames | FailIfConflict | ProgramNaming let push_rel_decl_to_named_context ?(hypnaming=KeepUserNameAndRenameExistingButSectionNames) sigma decl ((subst, avoid, nc) : ext_named_context) = let open EConstr in let open Vars in let map_decl f d = NamedDecl.map_constr f d in let rec replace_var_named_declaration id0 id nc = match match_named_context_val nc with | None -> empty_named_context_val | Some (decl, _, nc) -> if Id.equal id0 (NamedDecl.get_id decl) then (* Stop here, the variable cannot occur before its definition *) push_named_context_val (NamedDecl.set_id id decl) nc else let nc = replace_var_named_declaration id0 id nc in let vsubst = [id0 , mkVar id] in push_named_context_val (map_decl (fun c -> replace_vars vsubst c) decl) nc in let extract_if_neq id = function | Anonymous -> None | Name id' when Id.compare id id' = 0 -> None | Name id' -> Some id' in let na = RelDecl.get_name decl in let id = (* ppedrot: we want to infer nicer names for the refine tactic, but keeping at the same time backward compatibility in other code using this function. For now, we only attempt to preserve the old behaviour of Program, but ultimately, one should do something about this whole name generation problem. *) if hypnaming = ProgramNaming then next_name_away na avoid else (* id_of_name_using_hdchar only depends on the rel context which is empty here *) next_ident_away (id_of_name_using_hdchar empty_env sigma (RelDecl.get_type decl) na) avoid in match extract_if_neq id na with | Some id0 when hypnaming = KeepUserNameAndRenameExistingEvenSectionNames || (hypnaming = KeepUserNameAndRenameExistingButSectionNames || hypnaming = ProgramNaming) && not (is_section_variable (Global.env ()) id0) -> (* spiwack: if [id<>id0], rather than introducing a new binding named [id], we will keep [id0] (the name given by the user) and rename [id0] into [id] in the named context. Unless [id] is a section variable. *) let subst = update_var id0 id subst in let d = decl |> NamedDecl.of_rel_decl (fun _ -> id0) |> map_decl (csubst_subst subst) in let nc = replace_var_named_declaration id0 id nc in let avoid = Id.Set.add id (Id.Set.add id0 avoid) in (push_var id0 subst, avoid, push_named_context_val d nc) | Some id0 when hypnaming = FailIfConflict -> user_err Pp.(Id.print id0 ++ str " is already used.") | _ -> (* spiwack: if [id0] is a section variable renaming it is incorrect. We revert to a less robust behaviour where the new binder has name [id]. Which amounts to the same behaviour than when [id=id0]. *) let d = decl |> NamedDecl.of_rel_decl (fun _ -> id) |> map_decl (csubst_subst subst) in (push_var id subst, Id.Set.add id avoid, push_named_context_val d nc) let push_rel_context_to_named_context ?hypnaming env sigma typ = (* compute the instances relative to the named context and rel_context *) let open EConstr in let inst_vars = EConstr.identity_subst_val (named_context_val env) in if List.is_empty (Environ.rel_context env) then (named_context_val env, typ, inst_vars, empty_csubst) else let avoid = Environ.ids_of_named_context_val (named_context_val env) in let inst_rels = List.rev (rel_list 0 (nb_rel env)) in (* move the rel context to a named context and extend the named instance *) (* with vars of the rel context *) (* We do keep the instances corresponding to local definition (see above) *) let (subst, _, env) = Context.Rel.fold_outside (fun d acc -> push_rel_decl_to_named_context ?hypnaming sigma d acc) (rel_context env) ~init:(empty_csubst, avoid, named_context_val env) in (env, csubst_subst subst typ, inst_rels@inst_vars, subst) (*------------------------------------* * Entry points to define new evars * *------------------------------------*) let default_source = Loc.tag @@ Evar_kinds.InternalHole let new_pure_evar ?(src=default_source) ?(filter = Filter.identity) ?identity ?(abstract_arguments = Abstraction.identity) ?candidates ?(naming = IntroAnonymous) ?typeclass_candidate ?(principal=false) sign evd typ = let name = match naming with | IntroAnonymous -> None | IntroIdentifier id -> Some id | IntroFresh id -> let has_name id = try let _ = Evd.evar_key id evd in true with Not_found -> false in let id = Namegen.next_ident_away_from id has_name in Some id in let identity = match identity with | None -> Identity.none () | Some inst -> inst in let evi = { evar_hyps = sign; evar_concl = typ; evar_body = Evar_empty; evar_filter = filter; evar_abstract_arguments = abstract_arguments; evar_source = src; evar_candidates = candidates; evar_identity = identity; } in let typeclass_candidate = if principal then Some false else typeclass_candidate in let (evd, newevk) = Evd.new_evar evd ?name ?typeclass_candidate evi in let evd = if principal then Evd.declare_principal_goal newevk evd else Evd.declare_future_goal newevk evd in (evd, newevk) (* [new_evar] declares a new existential in an env env with type typ *) (* Converting the env into the sign of the evar to define *) let new_evar ?src ?filter ?abstract_arguments ?candidates ?naming ?typeclass_candidate ?principal ?hypnaming env evd typ = let sign,typ',instance,subst = push_rel_context_to_named_context ?hypnaming env evd typ in let map c = csubst_subst subst c in let candidates = Option.map (fun l -> List.map map l) candidates in let instance = match filter with | None -> instance | Some filter -> Filter.filter_list filter instance in let identity = if Int.equal (Environ.nb_rel env) 0 then Some (Identity.make instance) else None in let (evd, evk) = new_pure_evar sign evd typ' ?src ?filter ?identity ?abstract_arguments ?candidates ?naming ?typeclass_candidate ?principal in (evd, EConstr.mkEvar (evk, instance)) let new_type_evar ?src ?filter ?naming ?principal ?hypnaming env evd rigid = let (evd', s) = new_sort_variable rigid evd in let (evd', e) = new_evar env evd' ?src ?filter ?naming ~typeclass_candidate:false ?principal ?hypnaming (EConstr.mkSort s) in evd', (e, s) let new_Type ?(rigid=Evd.univ_flexible) evd = let open EConstr in let (evd, s) = new_sort_variable rigid evd in (evd, mkSort s) (* Safe interface to unification problems *) type unification_pb = conv_pb * env * EConstr.constr * EConstr.constr let eq_unification_pb evd (pbty,env,t1,t2) (pbty',env',t1',t2') = pbty == pbty' && env == env' && EConstr.eq_constr evd t1 t1' && EConstr.eq_constr evd t2 t2' let add_unification_pb ?(tail=false) pb evd = let conv_pbs = Evd.conv_pbs evd in if not (List.exists (eq_unification_pb evd pb) conv_pbs) then let (pbty,env,t1,t2) = pb in Evd.add_conv_pb ~tail (pbty,env,t1,t2) evd else evd (* This assumes an evar with identity instance and generalizes it over only the de Bruijn part of the context *) let generalize_evar_over_rels sigma (ev,args) = let open EConstr in let evi = Evd.find sigma ev in let sign = named_context_of_val evi.evar_hyps in List.fold_left2 (fun (c,inst as x) a d -> if isRel sigma a then (mkNamedProd_or_LetIn d c,a::inst) else x) (evi.evar_concl,[]) args sign (************************************) (* Removing a dependency in an evar *) (************************************) type clear_dependency_error = | OccurHypInSimpleClause of Id.t option | EvarTypingBreak of existential | NoCandidatesLeft of Evar.t exception ClearDependencyError of Id.t * clear_dependency_error * GlobRef.t option exception Depends of Id.t let set_of_evctx l = List.fold_left (fun s decl -> Id.Set.add (NamedDecl.get_id decl) s) Id.Set.empty l let filter_effective_candidates evd evi filter candidates = let ids = set_of_evctx (Filter.filter_list filter (evar_context evi)) in List.filter (fun a -> Id.Set.subset (collect_vars evd a) ids) candidates let restrict_evar evd evk filter ?src candidates = let evar_info = Evd.find_undefined evd evk in let candidates = Option.map (filter_effective_candidates evd evar_info filter) candidates in match candidates with | Some [] -> raise (ClearDependencyError (*FIXME*)(Id.of_string "blah", (NoCandidatesLeft evk), None)) | _ -> Evd.restrict evk filter ?candidates ?src evd let rec check_and_clear_in_constr env evdref err ids global c = (* returns a new constr where all the evars have been 'cleaned' (ie the hypotheses ids have been removed from the contexts of evars). [global] should be true iff there is some variable of [ids] which is a section variable *) match kind c with | Var id' -> if Id.Set.mem id' ids then raise (ClearDependencyError (id', err, None)) else c | ( Const _ | Ind _ | Construct _ ) -> let () = if global then let check id' = if Id.Set.mem id' ids then raise (ClearDependencyError (id',err,Some (fst @@ destRef c))) in Id.Set.iter check (Environ.vars_of_global env (fst @@ destRef c)) in c | Evar (evk,l as ev) -> if Evd.is_defined !evdref evk then (* If evk is already defined we replace it by its definition *) let nc = Evd.existential_value !evdref (EConstr.of_existential ev) in let nc = EConstr.Unsafe.to_constr nc in (check_and_clear_in_constr env evdref err ids global nc) else (* We check for dependencies to elements of ids in the evar_info corresponding to e and in the instance of arguments. Concurrently, we build a new evar corresponding to e where hypotheses of ids have been removed *) let evi = Evd.find_undefined !evdref evk in let ctxt = Evd.evar_filtered_context evi in let (rids,filter) = List.fold_right2 (fun h a (ri,filter) -> try (* Check if some id to clear occurs in the instance a of rid in ev and remember the dependency *) let check id = if Id.Set.mem id ids then raise (Depends id) in let () = Id.Set.iter check (collect_vars !evdref (EConstr.of_constr a)) in (* Check if some rid to clear in the context of ev has dependencies in another hyp of the context of ev and transitively remember the dependency *) let check id _ = if occur_var_in_decl env !evdref id h then raise (Depends id) in let () = Id.Map.iter check ri in (* No dependency at all, we can keep this ev's context hyp *) (ri, true::filter) with Depends id -> (Id.Map.add (NamedDecl.get_id h) id ri, false::filter)) ctxt l (Id.Map.empty,[]) in (* Check if some rid to clear in the context of ev has dependencies in the type of ev and adjust the source of the dependency *) let _nconcl = try let nids = Id.Map.domain rids in let global = Id.Set.exists (fun id -> is_section_variable (Global.env ()) id) nids in let concl = EConstr.Unsafe.to_constr (evar_concl evi) in check_and_clear_in_constr env evdref (EvarTypingBreak ev) nids global concl with ClearDependencyError (rid,err,where) -> raise (ClearDependencyError (Id.Map.find rid rids,err,where)) in if Id.Map.is_empty rids then c else let origfilter = Evd.evar_filter evi in let filter = Evd.Filter.apply_subfilter origfilter filter in let evd = !evdref in let candidates = Evd.evar_candidates evi in let candidates = Option.map (List.map EConstr.of_constr) candidates in let (evd,_) = restrict_evar evd evk filter candidates in evdref := evd; Evd.existential_value0 !evdref ev | _ -> Constr.map (check_and_clear_in_constr env evdref err ids global) c let clear_hyps_in_evi_main env sigma hyps terms ids = (* clear_hyps_in_evi erases hypotheses ids in hyps, checking if some hypothesis does not depend on a element of ids, and erases ids in the contexts of the evars occurring in evi *) let evdref = ref sigma in let terms = List.map EConstr.Unsafe.to_constr terms in let global = Id.Set.exists (fun id -> is_section_variable (Global.env ()) id) ids in let terms = List.map (check_and_clear_in_constr env evdref (OccurHypInSimpleClause None) ids global) terms in let nhyps = let check_context decl = let err = OccurHypInSimpleClause (Some (NamedDecl.get_id decl)) in NamedDecl.map_constr (check_and_clear_in_constr env evdref err ids global) decl in let check_value vk = match force_lazy_val vk with | None -> vk | Some (_, d) -> if (Id.Set.for_all (fun e -> not (Id.Set.mem e d)) ids) then (* v does depend on any of ids, it's ok *) vk else (* v depends on one of the cleared hyps: we forget the computed value *) dummy_lazy_val () in remove_hyps ids check_context check_value hyps in (!evdref, nhyps,List.map EConstr.of_constr terms) let clear_hyps_in_evi env sigma hyps concl ids = match clear_hyps_in_evi_main env sigma hyps [concl] ids with | (sigma,nhyps,[nconcl]) -> (sigma,nhyps,nconcl) | _ -> assert false let clear_hyps2_in_evi env sigma hyps t concl ids = match clear_hyps_in_evi_main env sigma hyps [t;concl] ids with | (sigma,nhyps,[t;nconcl]) -> (sigma,nhyps,t,nconcl) | _ -> assert false (* spiwack: a few functions to gather evars on which goals depend. *) let queue_set q is_dependent set = Evar.Set.iter (fun a -> Queue.push (is_dependent,a) q) set let queue_term q is_dependent c = queue_set q is_dependent (evar_nodes_of_term c) let process_dependent_evar q acc evm is_dependent e = let evi = Evd.find evm e in (* Queues evars appearing in the types of the goal (conclusion, then hypotheses), they are all dependent. *) queue_term q true evi.evar_concl; List.iter begin fun decl -> let open NamedDecl in queue_term q true (NamedDecl.get_type decl); match decl with | LocalAssum _ -> () | LocalDef (_,b,_) -> queue_term q true b end (EConstr.named_context_of_val evi.evar_hyps); match evi.evar_body with | Evar_empty -> if is_dependent then Evar.Map.add e None acc else acc | Evar_defined b -> let subevars = evar_nodes_of_term b in (* evars appearing in the definition of an evar [e] are marked as dependent when [e] is dependent itself: if [e] is a non-dependent goal, then, unless they are reach from another path, these evars are just other non-dependent goals. *) queue_set q is_dependent subevars; if is_dependent then Evar.Map.add e (Some subevars) acc else acc let gather_dependent_evars q evm = let acc = ref Evar.Map.empty in while not (Queue.is_empty q) do let (is_dependent,e) = Queue.pop q in (* checks if [e] has already been added to [!acc] *) begin if not (Evar.Map.mem e !acc) then acc := process_dependent_evar q !acc evm is_dependent e end done; !acc let gather_dependent_evars evm l = let q = Queue.create () in List.iter (fun a -> Queue.add (false,a) q) l; gather_dependent_evars q evm (* /spiwack *) (** [advance sigma g] returns [Some g'] if [g'] is undefined and is the current avatar of [g] (for instance [g] was changed by [clear] into [g']). It returns [None] if [g] has been (partially) solved. *) (* spiwack: [advance] is probably performance critical, and the good behaviour of its definition may depend sensitively to the actual definition of [Evd.find]. Currently, [Evd.find] starts looking for a value in the heap of undefined variable, which is small. Hence in the most common case, where [advance] is applied to an unsolved goal ([advance] is used to figure if a side effect has modified the goal) it terminates quickly. *) let rec advance sigma evk = let evi = Evd.find sigma evk in match evi.evar_body with | Evar_empty -> Some evk | Evar_defined v -> match is_aliased_evar sigma evk with | Some evk -> advance sigma evk | None -> None let reachable_from_evars sigma evars = let aliased = Evd.get_aliased_evars sigma in let rec search evk visited = if Evar.Set.mem evk visited then visited else let visited = Evar.Set.add evk visited in match Evar.Map.find evk aliased with | evk' -> search evk' visited | exception Not_found -> visited in Evar.Set.fold (fun evk visited -> search evk visited) evars Evar.Set.empty (** The following functions return the set of undefined evars contained in the object, the defined evars being traversed. This is roughly a combination of the previous functions and [nf_evar]. *) let undefined_evars_of_term evd t = let rec evrec acc c = match EConstr.kind evd c with | Evar (n, l) -> let acc = Evar.Set.add n acc in List.fold_left evrec acc l | _ -> EConstr.fold evd evrec acc c in evrec Evar.Set.empty t let undefined_evars_of_named_context evd nc = Context.Named.fold_outside (NamedDecl.fold_constr (fun c s -> Evar.Set.union s (undefined_evars_of_term evd (EConstr.of_constr c)))) nc ~init:Evar.Set.empty let undefined_evars_of_evar_info evd evi = Evar.Set.union (undefined_evars_of_term evd evi.evar_concl) (Evar.Set.union (match evi.evar_body with | Evar_empty -> Evar.Set.empty | Evar_defined b -> undefined_evars_of_term evd b) (undefined_evars_of_named_context evd (named_context_of_val evi.evar_hyps))) type undefined_evars_cache = { mutable cache : (EConstr.named_declaration * Evar.Set.t) ref Id.Map.t; } let create_undefined_evars_cache () = { cache = Id.Map.empty; } let cached_evar_of_hyp cache sigma decl accu = match cache with | None -> let fold c acc = let evs = undefined_evars_of_term sigma c in Evar.Set.union evs acc in NamedDecl.fold_constr fold decl accu | Some cache -> let id = NamedDecl.get_annot decl in let r = try Id.Map.find id.binder_name cache.cache with Not_found -> (* Dummy value *) let r = ref (NamedDecl.LocalAssum (id, EConstr.mkProp), Evar.Set.empty) in let () = cache.cache <- Id.Map.add id.binder_name r cache.cache in r in let (decl', evs) = !r in let evs = if NamedDecl.equal (==) decl decl' then snd !r else let fold c acc = let evs = undefined_evars_of_term sigma c in Evar.Set.union evs acc in let evs = NamedDecl.fold_constr fold decl Evar.Set.empty in let () = r := (decl, evs) in evs in Evar.Set.fold Evar.Set.add evs accu let filtered_undefined_evars_of_evar_info ?cache sigma evi = let evars_of_named_context cache accu nc = let fold decl accu = cached_evar_of_hyp cache sigma (EConstr.of_named_decl decl) accu in Context.Named.fold_outside fold nc ~init:accu in let accu = match evi.evar_body with | Evar_empty -> Evar.Set.empty | Evar_defined b -> evars_of_term sigma b in let accu = Evar.Set.union (undefined_evars_of_term sigma evi.evar_concl) accu in let ctxt = EConstr.Unsafe.to_named_context (evar_filtered_context evi) in evars_of_named_context cache accu ctxt (* spiwack: this is a more complete version of {!Termops.occur_evar}. The latter does not look recursively into an [evar_map]. If unification only need to check superficially, tactics do not have this luxury, and need the more complete version. *) let occur_evar_upto sigma n c = let c = EConstr.Unsafe.to_constr c in let rec occur_rec c = match kind c with | Evar (sp,_) when Evar.equal sp n -> raise Occur | Evar e -> Option.iter occur_rec (existential_opt_value0 sigma e) | _ -> Constr.iter occur_rec c in try occur_rec c; false with Occur -> true (* We don't try to guess in which sort the type should be defined, since any type has type Type. May cause some trouble, but not so far... *) let judge_of_new_Type evd = let open EConstr in let (evd', s) = new_univ_variable univ_rigid evd in (evd', { uj_val = mkType s; uj_type = mkType (Univ.super s) }) let subterm_source evk ?where (loc,k) = let evk = match k with | Evar_kinds.SubEvar (None,evk) when where = None -> evk | _ -> evk in (loc,Evar_kinds.SubEvar (where,evk)) (* Add equality constraints for covariant/invariant positions. For irrelevant positions, unify universes when flexible. *) let compare_cumulative_instances cv_pb variances u u' sigma = let open UnivProblem in let cstrs = Univ.Constraints.empty in let soft = Set.empty in let cstrs, soft = Array.fold_left3 (fun (cstrs, soft) v u u' -> let open Univ.Variance in match v with | Irrelevant -> cstrs, Set.add (UWeak (u,u')) soft | Covariant when cv_pb == Reduction.CUMUL -> Univ.Constraints.add (u,Univ.Le,u') cstrs, soft | Covariant | Invariant -> Univ.Constraints.add (u,Univ.Eq,u') cstrs, soft) (cstrs,soft) variances (Univ.Instance.to_array u) (Univ.Instance.to_array u') in match Evd.add_constraints sigma cstrs with | sigma -> Inl (Evd.add_universe_constraints sigma soft) | exception Univ.UniverseInconsistency p -> Inr p let compare_constructor_instances evd u u' = let open UnivProblem in let soft = Array.fold_left2 (fun cs u u' -> Set.add (UWeak (u,u')) cs) Set.empty (Univ.Instance.to_array u) (Univ.Instance.to_array u') in Evd.add_universe_constraints evd soft (** [eq_constr_univs_test ~evd ~extended_evd t u] tests equality of [t] and [u] up to existential variable instantiation and equalisable universes. The term [t] is interpreted in [evd] while [u] is interpreted in [extended_evd]. The universe constraints in [extended_evd] are assumed to be an extension of those in [evd]. *) let eq_constr_univs_test ~evd ~extended_evd t u = (* spiwack: mild code duplication with {!Evd.eq_constr_univs}. *) let open Evd in let t = EConstr.Unsafe.to_constr t and u = EConstr.Unsafe.to_constr u in let sigma = ref extended_evd in let eq_universes _ u1 u2 = let u1 = normalize_universe_instance !sigma u1 in let u2 = normalize_universe_instance !sigma u2 in UGraph.check_eq_instances (universes !sigma) u1 u2 in let eq_sorts s1 s2 = if Sorts.equal s1 s2 then true else let u1 = Sorts.univ_of_sort s1 and u2 = Sorts.univ_of_sort s2 in try sigma := add_universe_constraints !sigma UnivProblem.(Set.singleton (UEq (u1, u2))); true with Univ.UniverseInconsistency _ | UniversesDiffer -> false in let kind1 = kind_of_term_upto evd in let kind2 = kind_of_term_upto extended_evd in let rec eq_constr' nargs m n = Constr.compare_head_gen_with kind1 kind2 eq_universes eq_sorts eq_constr' nargs m n in Constr.compare_head_gen_with kind1 kind2 eq_universes eq_sorts eq_constr' 0 t u coq-8.15.0/engine/evarutil.mli000066400000000000000000000251411417001151100161640ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* metavariable (** {6 Creating a fresh evar given their type and context} *) type naming_mode = | KeepUserNameAndRenameExistingButSectionNames | KeepUserNameAndRenameExistingEvenSectionNames | KeepExistingNames | FailIfConflict | ProgramNaming val new_evar : ?src:Evar_kinds.t Loc.located -> ?filter:Filter.t -> ?abstract_arguments:Abstraction.t -> ?candidates:constr list -> ?naming:intro_pattern_naming_expr -> ?typeclass_candidate:bool -> ?principal:bool -> ?hypnaming:naming_mode -> env -> evar_map -> types -> evar_map * EConstr.t (** Low-level interface to create an evar. @param src User-facing source for the evar @param filter See {!Evd.Filter}, must be the same length as [named_context_val] @param identity See {!Evd.Identity}, must be the name projection of [named_context_val] @param naming A naming scheme for the evar @param principal Whether the evar is the principal goal @param named_context_val The context of the evar @param types The type of conclusion of the evar *) val new_pure_evar : ?src:Evar_kinds.t Loc.located -> ?filter:Filter.t -> ?identity:Identity.t -> ?abstract_arguments:Abstraction.t -> ?candidates:constr list -> ?naming:intro_pattern_naming_expr -> ?typeclass_candidate:bool -> ?principal:bool -> named_context_val -> evar_map -> types -> evar_map * Evar.t (** Create a new Type existential variable, as we keep track of them during type-checking and unification. *) val new_type_evar : ?src:Evar_kinds.t Loc.located -> ?filter:Filter.t -> ?naming:intro_pattern_naming_expr -> ?principal:bool -> ?hypnaming:naming_mode -> env -> evar_map -> rigid -> evar_map * (constr * Sorts.t) val new_Type : ?rigid:rigid -> evar_map -> evar_map * constr val make_pure_subst : evar_info -> 'a list -> (Id.t * 'a) list (** {6 Evars/Metas switching...} *) val non_instantiated : evar_map -> evar_info Evar.Map.t (** {6 Unification utils} *) (** [head_evar c] returns the head evar of [c] if any *) exception NoHeadEvar val head_evar : evar_map -> constr -> Evar.t (** may raise NoHeadEvar *) (* Expand head evar if any *) val whd_head_evar : evar_map -> constr -> constr (* An over-approximation of [has_undefined (nf_evars evd c)] *) val has_undefined_evars : evar_map -> constr -> bool val is_ground_term : evar_map -> constr -> bool val is_ground_env : evar_map -> env -> bool (** [gather_dependent_evars evm seeds] classifies the evars in [evm] as dependent_evars and goals (these may overlap). A goal is an evar in [seeds] or an evar appearing in the (partial) definition of a goal. A dependent evar is an evar appearing in the type (hypotheses and conclusion) of a goal, or in the type or (partial) definition of a dependent evar. The value return is a map associating to each dependent evar [None] if it has no (partial) definition or [Some s] if [s] is the list of evars appearing in its (partial) definition. *) val gather_dependent_evars : evar_map -> Evar.t list -> (Evar.Set.t option) Evar.Map.t (** [advance sigma g] returns [Some g'] if [g'] is undefined and is the current avatar of [g] (for instance [g] was changed by [clear] into [g']). It returns [None] if [g] has been (partially) solved. *) val advance : evar_map -> Evar.t -> Evar.t option (** [reachable_from_evars sigma seeds] computes the descendents of evars in [seeds] by restriction or evar-evar unifications in [sigma]. *) val reachable_from_evars : evar_map -> Evar.Set.t -> Evar.Set.t (** The following functions return the set of undefined evars contained in the object, the defined evars being traversed. This is roughly a combination of the previous functions and [nf_evar]. *) val undefined_evars_of_term : evar_map -> constr -> Evar.Set.t val undefined_evars_of_named_context : evar_map -> Constr.named_context -> Evar.Set.t val undefined_evars_of_evar_info : evar_map -> evar_info -> Evar.Set.t type undefined_evars_cache val create_undefined_evars_cache : unit -> undefined_evars_cache val filtered_undefined_evars_of_evar_info : ?cache:undefined_evars_cache -> evar_map -> evar_info -> Evar.Set.t (** [occur_evar_upto sigma k c] returns [true] if [k] appears in [c]. It looks up recursively in [sigma] for the value of existential variables. *) val occur_evar_upto : evar_map -> Evar.t -> constr -> bool (** {6 Value/Type constraints} *) val judge_of_new_Type : evar_map -> evar_map * unsafe_judgment (***********************************************************) val create_clos_infos : env -> evar_map -> CClosure.RedFlags.reds -> CClosure.clos_infos (** [flush_and_check_evars] raise [Uninstantiated_evar] if an evar remains uninstantiated; [nf_evar] leaves uninstantiated evars as is *) val whd_evar : evar_map -> constr -> constr val nf_evar : evar_map -> constr -> constr val j_nf_evar : evar_map -> unsafe_judgment -> unsafe_judgment val jl_nf_evar : evar_map -> unsafe_judgment list -> unsafe_judgment list val jv_nf_evar : evar_map -> unsafe_judgment array -> unsafe_judgment array val tj_nf_evar : evar_map -> unsafe_type_judgment -> unsafe_type_judgment val nf_named_context_evar : evar_map -> Constr.named_context -> Constr.named_context val nf_rel_context_evar : evar_map -> rel_context -> rel_context val nf_env_evar : evar_map -> env -> env val nf_evar_info : evar_map -> evar_info -> evar_info val nf_evar_map : evar_map -> evar_map val nf_evar_map_undefined : evar_map -> evar_map (** Presenting terms without solved evars *) val nf_evars_universes : evar_map -> Constr.constr -> Constr.constr (** Replacing all evars, possibly raising [Uninstantiated_evar] *) exception Uninstantiated_evar of Evar.t val flush_and_check_evars : evar_map -> constr -> Constr.constr (** [finalize env sigma f] combines universe minimisation, evar-and-universe normalisation and universe restriction. It minimizes universes in [sigma], calls [f] a normalisation function with respect to the updated [sigma] and restricts the local universes of [sigma] to those encountered while running [f]. Note that the normalizer passed to [f] holds some imperative state in its closure. *) val finalize : ?abort_on_undefined_evars:bool -> evar_map -> ((EConstr.t -> Constr.t) -> 'a) -> evar_map * 'a (** {6 Term manipulation up to instantiation} *) (** Like {!Constr.kind} except that [kind_of_term sigma t] exposes [t] as an evar [e] only if [e] is uninstantiated in [sigma]. Otherwise the value of [e] in [sigma] is (recursively) used. *) val kind_of_term_upto : evar_map -> Constr.constr -> (Constr.constr, Constr.types, Sorts.t, Univ.Instance.t) kind_of_term (** [eq_constr_univs_test ~evd ~extended_evd t u] tests equality of [t] and [u] up to existential variable instantiation and equalisable universes. The term [t] is interpreted in [evd] while [u] is interpreted in [extended_evd]. The universe constraints in [extended_evd] are assumed to be an extension of those in [evd]. *) val eq_constr_univs_test : evd:Evd.evar_map -> extended_evd:Evd.evar_map -> constr -> constr -> bool (** [compare_cumulative_instances cv_pb variance u1 u2 sigma] Returns [Inl sigma'] where [sigma'] is [sigma] augmented with universe constraints such that [u1 cv_pb? u2] according to [variance]. Additionally flexible universes in irrelevant positions are unified if possible. Returns [Inr p] when the former is impossible. *) val compare_cumulative_instances : Reduction.conv_pb -> Univ.Variance.t array -> Univ.Instance.t -> Univ.Instance.t -> evar_map -> (evar_map, Univ.univ_inconsistency) Util.union (** We should only compare constructors at convertible types, so this is only an opportunity to unify universes. *) val compare_constructor_instances : evar_map -> Univ.Instance.t -> Univ.Instance.t -> evar_map (** {6 Unification problems} *) type unification_pb = conv_pb * env * constr * constr (** [add_unification_pb ?tail pb sigma] Add a unification problem [pb] to [sigma], if not already present. Put it at the end of the list if [tail] is true, by default it is false. *) val add_unification_pb : ?tail:bool -> unification_pb -> evar_map -> evar_map (** {6 Removing hyps in evars'context} raise OccurHypInSimpleClause if the removal breaks dependencies *) type clear_dependency_error = | OccurHypInSimpleClause of Id.t option | EvarTypingBreak of Constr.existential | NoCandidatesLeft of Evar.t exception ClearDependencyError of Id.t * clear_dependency_error * GlobRef.t option (** Restrict an undefined evar according to a (sub)filter and candidates. The evar will be defined if there is only one candidate left, @raise ClearDependencyError NoCandidatesLeft if the filter turns the candidates into an empty list. *) val restrict_evar : evar_map -> Evar.t -> Filter.t -> ?src:Evar_kinds.t Loc.located -> constr list option -> evar_map * Evar.t val clear_hyps_in_evi : env -> evar_map -> named_context_val -> types -> Id.Set.t -> evar_map * named_context_val * types val clear_hyps2_in_evi : env -> evar_map -> named_context_val -> types -> types -> Id.Set.t -> evar_map * named_context_val * types * types type csubst val empty_csubst : csubst val csubst_subst : csubst -> constr -> constr type ext_named_context = csubst * Id.Set.t * named_context_val val push_rel_decl_to_named_context : ?hypnaming:naming_mode -> evar_map -> rel_declaration -> ext_named_context -> ext_named_context val push_rel_context_to_named_context : ?hypnaming:naming_mode -> Environ.env -> evar_map -> types -> named_context_val * types * constr list * csubst val generalize_evar_over_rels : evar_map -> existential -> types * constr list val subterm_source : Evar.t -> ?where:Evar_kinds.subevar_kind -> Evar_kinds.t Loc.located -> Evar_kinds.t Loc.located val meta_counter_summary_tag : int Summary.Dyn.tag coq-8.15.0/engine/evd.ml000066400000000000000000001360431417001151100147420ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool val identity : t val filter_list : t -> 'a list -> 'a list val filter_array : t -> 'a array -> 'a array val extend : int -> t -> t val compose : t -> t -> t val apply_subfilter : t -> bool list -> t val restrict_upon : t -> int -> (int -> bool) -> t option val map_along : (bool -> 'a -> bool) -> t -> 'a list -> t val make : bool list -> t val repr : t -> bool list option end = struct type t = bool list option (** We guarantee through the interface that if a filter is [Some _] then it contains at least one [false] somewhere. *) let identity = None let rec equal l1 l2 = match l1, l2 with | [], [] -> true | h1 :: l1, h2 :: l2 -> (if h1 then h2 else not h2) && equal l1 l2 | _ -> false let equal l1 l2 = match l1, l2 with | None, None -> true | Some _, None | None, Some _ -> false | Some l1, Some l2 -> equal l1 l2 let rec is_identity = function | [] -> true | true :: l -> is_identity l | false :: _ -> false let normalize f = if is_identity f then None else Some f let filter_list f l = match f with | None -> l | Some f -> CList.filter_with f l let filter_array f v = match f with | None -> v | Some f -> CArray.filter_with f v let rec extend n l = if n = 0 then l else extend (pred n) (true :: l) let extend n = function | None -> None | Some f -> Some (extend n f) let compose f1 f2 = match f1 with | None -> f2 | Some f1 -> match f2 with | None -> None | Some f2 -> normalize (CList.filter_with f1 f2) let apply_subfilter_array filter subfilter = (* In both cases we statically know that the argument will contain at least one [false] *) match filter with | None -> Some (Array.to_list subfilter) | Some f -> let len = Array.length subfilter in let fold b (i, ans) = if b then let () = assert (0 <= i) in (pred i, Array.unsafe_get subfilter i :: ans) else (i, false :: ans) in Some (snd (List.fold_right fold f (pred len, []))) let apply_subfilter filter subfilter = apply_subfilter_array filter (Array.of_list subfilter) let restrict_upon f len p = let newfilter = Array.init len p in if Array.for_all (fun id -> id) newfilter then None else Some (apply_subfilter_array f newfilter) let map_along f flt l = let ans = match flt with | None -> List.map (fun x -> f true x) l | Some flt -> List.map2 f flt l in normalize ans let make l = normalize l let repr f = f end module Abstraction = struct type abstraction = | Abstract | Imitate type t = abstraction list let identity = [] let abstract_last l = Abstract :: l end module Identity : sig type t val make : econstr list -> t val none : unit -> t val repr : named_context_val -> Filter.t -> t -> econstr list val is_identity : econstr list -> t -> bool end = struct type t = econstr list option ref let make s = ref (Some s) let none () = ref None let repr sign filter s = match !s with | None -> let ans = Filter.filter_list filter sign.env_named_var in let () = s := Some ans in ans | Some s -> s let is_identity l s = match !s with | None -> false | Some s -> s == l end (* The kinds of existential variables are now defined in [Evar_kinds] *) (* The type of mappings for existential variables *) module Store = Store.Make () let string_of_existential evk = "?X" ^ string_of_int (Evar.repr evk) type evar_body = | Evar_empty | Evar_defined of constr type evar_info = { evar_concl : constr; evar_hyps : named_context_val; evar_body : evar_body; evar_filter : Filter.t; evar_abstract_arguments : Abstraction.t; evar_source : Evar_kinds.t Loc.located; evar_candidates : constr list option; (* if not None, list of allowed instances *) evar_identity : Identity.t; } let make_evar hyps ccl = { evar_concl = ccl; evar_hyps = hyps; evar_body = Evar_empty; evar_filter = Filter.identity; evar_abstract_arguments = Abstraction.identity; evar_source = Loc.tag @@ Evar_kinds.InternalHole; evar_candidates = None; evar_identity = Identity.none (); } let instance_mismatch () = anomaly (Pp.str "Signature and its instance do not match.") let evar_concl evi = evi.evar_concl let evar_filter evi = evi.evar_filter let evar_body evi = evi.evar_body let evar_context evi = named_context_of_val evi.evar_hyps let evar_filtered_context evi = Filter.filter_list (evar_filter evi) (evar_context evi) let evar_candidates evi = evi.evar_candidates let evar_hyps evi = evi.evar_hyps let evar_filtered_hyps evi = match Filter.repr (evar_filter evi) with | None -> evar_hyps evi | Some filter -> let rec make_hyps filter ctxt = match filter, ctxt with | [], [] -> empty_named_context_val | false :: filter, _ :: ctxt -> make_hyps filter ctxt | true :: filter, decl :: ctxt -> let hyps = make_hyps filter ctxt in push_named_context_val decl hyps | _ -> instance_mismatch () in make_hyps filter (evar_context evi) let evar_env env evi = Environ.reset_with_named_context evi.evar_hyps env let evar_filtered_env env evi = match Filter.repr (evar_filter evi) with | None -> evar_env env evi | Some filter -> let rec make_env filter ctxt = match filter, ctxt with | [], [] -> reset_context env | false :: filter, _ :: ctxt -> make_env filter ctxt | true :: filter, decl :: ctxt -> let env = make_env filter ctxt in push_named decl env | _ -> instance_mismatch () in make_env filter (evar_context evi) let evar_identity_subst evi = Identity.repr evi.evar_hyps evi.evar_filter evi.evar_identity let map_evar_body f = function | Evar_empty -> Evar_empty | Evar_defined d -> Evar_defined (f d) let map_evar_info f evi = {evi with evar_body = map_evar_body f evi.evar_body; evar_hyps = map_named_val (fun d -> NamedDecl.map_constr f d) evi.evar_hyps; evar_concl = f evi.evar_concl; evar_candidates = Option.map (List.map f) evi.evar_candidates } (* This exception is raised by *.existential_value *) exception NotInstantiatedEvar (* Note: let-in contributes to the instance *) let evar_instance_array test_id info args = let rec instrec filter ctxt args = match filter, ctxt, args with | [], [], [] -> [] | false :: filter, _ :: ctxt, args -> instrec filter ctxt args | true :: filter, d :: ctxt, c :: args -> if test_id d c then instrec filter ctxt args else (NamedDecl.get_id d, c) :: instrec filter ctxt args | _ -> instance_mismatch () in match Filter.repr (evar_filter info) with | None -> let rec instance ctxt args = match ctxt, args with | [], [] -> [] | d :: ctxt, c :: args -> if test_id d c then instance ctxt args else (NamedDecl.get_id d, c) :: instance ctxt args | _ -> instance_mismatch () in instance (evar_context info) args | Some filter -> instrec filter (evar_context info) args let make_evar_instance_array info args = if Identity.is_identity args info.evar_identity then [] else evar_instance_array (NamedDecl.get_id %> isVarId) info args let instantiate_evar_array info c args = let inst = make_evar_instance_array info args in match inst with | [] -> c | _ -> replace_vars inst c type 'a in_evar_universe_context = 'a * UState.t (*******************************************************************) (* Metamaps *) (*******************************************************************) (* Constraints for existential variables *) (*******************************************************************) type 'a freelisted = { rebus : 'a; freemetas : Int.Set.t } (* Collects all metavars appearing in a constr *) let metavars_of c = let rec collrec acc c = match kind c with | Meta mv -> Int.Set.add mv acc | _ -> Constr.fold collrec acc c in collrec Int.Set.empty c let mk_freelisted c = { rebus = c; freemetas = metavars_of c } let map_fl f cfl = { cfl with rebus=f cfl.rebus } (* Status of an instance found by unification wrt to the meta it solves: - a supertype of the meta (e.g. the solution to ?X <= T is a supertype of ?X) - a subtype of the meta (e.g. the solution to T <= ?X is a supertype of ?X) - a term that can be eta-expanded n times while still being a solution (e.g. the solution [P] to [?X u v = P u v] can be eta-expanded twice) *) type instance_constraint = IsSuperType | IsSubType | Conv let eq_instance_constraint c1 c2 = c1 == c2 (* Status of the unification of the type of an instance against the type of the meta it instantiates: - CoerceToType means that the unification of types has not been done and that a coercion can still be inserted: the meta should not be substituted freely (this happens for instance given via the "with" binding clause). - TypeProcessed means that the information obtainable from the unification of types has been extracted. - TypeNotProcessed means that the unification of types has not been done but it is known that no coercion may be inserted: the meta can be substituted freely. *) type instance_typing_status = CoerceToType | TypeNotProcessed | TypeProcessed (* Status of an instance together with the status of its type unification *) type instance_status = instance_constraint * instance_typing_status (* Clausal environments *) type clbinding = | Cltyp of Name.t * constr freelisted | Clval of Name.t * (constr freelisted * instance_status) * constr freelisted let map_clb f = function | Cltyp (na,cfl) -> Cltyp (na,map_fl f cfl) | Clval (na,(cfl1,pb),cfl2) -> Clval (na,(map_fl f cfl1,pb),map_fl f cfl2) (* name of defined is erased (but it is pretty-printed) *) let clb_name = function Cltyp(na,_) -> (na,false) | Clval (na,_,_) -> (na,true) (***********************) module Metaset = Int.Set module Metamap = Int.Map let metamap_to_list m = Metamap.fold (fun n v l -> (n,v)::l) m [] (*************************) (* Unification state *) type conv_pb = Reduction.conv_pb type evar_constraint = conv_pb * Environ.env * constr * constr module EvMap = Evar.Map module EvNames : sig type t val empty : t val add_name_undefined : Id.t option -> Evar.t -> evar_info -> t -> t val remove_name_defined : Evar.t -> t -> t val rename : Evar.t -> Id.t -> t -> t val reassign_name_defined : Evar.t -> Evar.t -> t -> t val ident : Evar.t -> t -> Id.t option val key : Id.t -> t -> Evar.t end = struct type t = Id.t EvMap.t * Evar.t Id.Map.t let empty = (EvMap.empty, Id.Map.empty) let add_name_newly_undefined id evk evi (evtoid, idtoev as names) = match id with | None -> names | Some id -> if Id.Map.mem id idtoev then user_err (str "Already an existential evar of name " ++ Id.print id); (EvMap.add evk id evtoid, Id.Map.add id evk idtoev) let add_name_undefined naming evk evi (evtoid,idtoev as evar_names) = if EvMap.mem evk evtoid then evar_names else add_name_newly_undefined naming evk evi evar_names let remove_name_defined evk (evtoid, idtoev as names) = let id = try Some (EvMap.find evk evtoid) with Not_found -> None in match id with | None -> names | Some id -> (EvMap.remove evk evtoid, Id.Map.remove id idtoev) let rename evk id (evtoid, idtoev) = let id' = try Some (EvMap.find evk evtoid) with Not_found -> None in match id' with | None -> (EvMap.add evk id evtoid, Id.Map.add id evk idtoev) | Some id' -> if Id.Map.mem id idtoev then anomaly (str "Evar name already in use."); (EvMap.set evk id evtoid (* overwrite old name *), Id.Map.add id evk (Id.Map.remove id' idtoev)) let reassign_name_defined evk evk' (evtoid, idtoev as names) = let id = try Some (EvMap.find evk evtoid) with Not_found -> None in match id with | None -> names (* evk' must not be defined *) | Some id -> (EvMap.add evk' id (EvMap.remove evk evtoid), Id.Map.add id evk' (Id.Map.remove id idtoev)) let ident evk (evtoid, _) = try Some (EvMap.find evk evtoid) with Not_found -> None let key id (_, idtoev) = Id.Map.find id idtoev end type evar_flags = { obligation_evars : Evar.Set.t; aliased_evars : Evar.t Evar.Map.t; typeclass_evars : Evar.Set.t } type side_effect_role = | Schema of inductive * string type side_effects = { seff_private : Safe_typing.private_constants; seff_roles : side_effect_role Cmap.t; } module FutureGoals : sig type t = private { comb : Evar.t list; principal : Evar.t option; (** if [Some e], [e] must be contained in [comb]. The evar [e] will inherit properties (now: the name) of the evar which will be instantiated with a term containing [e]. *) } val map_filter : (Evar.t -> Evar.t option) -> t -> t (** Applies a function on the future goals *) val filter : (Evar.t -> bool) -> t -> t (** Applies a filter on the future goals *) type stack val empty_stack : stack val push : stack -> stack val pop : stack -> t * stack val add : principal:bool -> Evar.t -> stack -> stack val remove : Evar.t -> stack -> stack val fold : ('a -> Evar.t -> 'a) -> 'a -> stack -> 'a val pr_stack : stack -> Pp.t end = struct type t = { comb : Evar.t list; principal : Evar.t option; (** if [Some e], [e] must be contained in [comb]. The evar [e] will inherit properties (now: the name) of the evar which will be instantiated with a term containing [e]. *) } type stack = t list let set f = function | [] -> anomaly Pp.(str"future_goals stack should not be empty") | hd :: tl -> f hd :: tl let add ~principal evk stack = let add fgl = let comb = evk :: fgl.comb in let principal = if principal then match fgl.principal with | Some _ -> CErrors.user_err Pp.(str "Only one main goal per instantiation.") | None -> Some evk else fgl.principal in { comb; principal } in set add stack let remove e stack = let remove fgl = let filter e' = not (Evar.equal e e') in let principal = Option.filter filter fgl.principal in let comb = List.filter filter fgl.comb in { principal; comb } in List.map remove stack let empty = { principal = None; comb = []; } let empty_stack = [empty] let push stack = empty :: stack let pop stack = match stack with | [] -> anomaly Pp.(str"future_goals stack should not be empty") | hd :: tl -> hd, tl let fold f acc stack = let future_goals = List.hd stack in List.fold_left f acc future_goals.comb let filter f fgl = let comb = List.filter f fgl.comb in let principal = Option.filter f fgl.principal in { comb; principal } let map_filter f fgl = let comb = List.map_filter f fgl.comb in let principal = Option.bind fgl.principal f in { comb; principal } let pr_stack stack = let open Pp in let pr_future_goals fgl = prlist_with_sep spc Evar.print fgl.comb ++ pr_opt (fun ev -> str"(principal: " ++ Evar.print ev ++ str")") fgl.principal in if List.is_empty stack then str"(empty stack)" else prlist_with_sep (fun () -> str"||") pr_future_goals stack end type evar_map = { (* Existential variables *) defn_evars : evar_info EvMap.t; undf_evars : evar_info EvMap.t; evar_names : EvNames.t; (** Universes *) universes : UState.t; (** Conversion problems *) conv_pbs : evar_constraint list; last_mods : Evar.Set.t; (** Metas *) metas : clbinding Metamap.t; evar_flags : evar_flags; (** Interactive proofs *) effects : side_effects; future_goals : FutureGoals.stack; (** list of newly created evars, to be eventually turned into goals if not solved.*) given_up : Evar.Set.t; shelf : Evar.t list list; extras : Store.t; } let get_is_maybe_typeclass, (is_maybe_typeclass_hook : (evar_map -> constr -> bool) Hook.t) = Hook.make ~default:(fun evd c -> false) () let is_maybe_typeclass sigma c = Hook.get get_is_maybe_typeclass sigma c (*** Lifting primitive from Evar.Map. ***) let rename evk id evd = { evd with evar_names = EvNames.rename evk id evd.evar_names } let add_with_name ?name ?(typeclass_candidate = true) d e i = match i.evar_body with | Evar_empty -> let evar_names = EvNames.add_name_undefined name e i d.evar_names in let evar_flags = if typeclass_candidate && is_maybe_typeclass d i.evar_concl then let flags = d.evar_flags in { flags with typeclass_evars = Evar.Set.add e flags.typeclass_evars } else d.evar_flags in { d with undf_evars = EvMap.add e i d.undf_evars; evar_names; evar_flags } | Evar_defined _ -> let evar_names = EvNames.remove_name_defined e d.evar_names in { d with defn_evars = EvMap.add e i d.defn_evars; evar_names } (** Evd.add is a low-level function mainly used to update the evar_info associated to an evar, so we prevent registering its typeclass status. *) let add d e i = add_with_name ~typeclass_candidate:false d e i (*** Evar flags: typeclasses, aliased or obligation flag *) let get_typeclass_evars evd = evd.evar_flags.typeclass_evars let set_typeclass_evars evd tcs = let flags = evd.evar_flags in { evd with evar_flags = { flags with typeclass_evars = tcs } } let is_typeclass_evar evd evk = let flags = evd.evar_flags in Evar.Set.mem evk flags.typeclass_evars let get_obligation_evars evd = evd.evar_flags.obligation_evars let set_obligation_evar evd evk = let flags = evd.evar_flags in let evar_flags = { flags with obligation_evars = Evar.Set.add evk flags.obligation_evars } in { evd with evar_flags } let is_obligation_evar evd evk = let flags = evd.evar_flags in Evar.Set.mem evk flags.obligation_evars (** Inheritance of flags: for evar-evar and restriction cases *) let inherit_evar_flags evar_flags evk evk' = let evk_typeclass = Evar.Set.mem evk evar_flags.typeclass_evars in let evk_obligation = Evar.Set.mem evk evar_flags.obligation_evars in let aliased_evars = Evar.Map.add evk evk' evar_flags.aliased_evars in let typeclass_evars = if evk_typeclass then let typeclass_evars = Evar.Set.remove evk evar_flags.typeclass_evars in Evar.Set.add evk' typeclass_evars else evar_flags.typeclass_evars in let obligation_evars = if evk_obligation then let obligation_evars = Evar.Set.remove evk evar_flags.obligation_evars in Evar.Set.add evk' obligation_evars else evar_flags.obligation_evars in { obligation_evars; aliased_evars; typeclass_evars } (** Removal: in all other cases of definition *) let remove_evar_flags evk evar_flags = { typeclass_evars = Evar.Set.remove evk evar_flags.typeclass_evars; obligation_evars = Evar.Set.remove evk evar_flags.obligation_evars; (* Aliasing information is kept. *) aliased_evars = evar_flags.aliased_evars } (** New evars *) let evar_counter_summary_name = "evar counter" (* Generator of existential names *) let evar_ctr, evar_counter_summary_tag = Summary.ref_tag 0 ~name:evar_counter_summary_name let new_untyped_evar () = incr evar_ctr; Evar.unsafe_of_int !evar_ctr let new_evar evd ?name ?typeclass_candidate evi = let evk = new_untyped_evar () in let evd = add_with_name evd ?name ?typeclass_candidate evk evi in (evd, evk) let remove d e = let undf_evars = EvMap.remove e d.undf_evars in let defn_evars = EvMap.remove e d.defn_evars in let future_goals = FutureGoals.remove e d.future_goals in let evar_flags = remove_evar_flags e d.evar_flags in { d with undf_evars; defn_evars; future_goals; evar_flags } let find d e = try EvMap.find e d.undf_evars with Not_found -> EvMap.find e d.defn_evars let find_undefined d e = EvMap.find e d.undf_evars let mem d e = EvMap.mem e d.undf_evars || EvMap.mem e d.defn_evars let undefined_map d = d.undf_evars let drop_all_defined d = { d with defn_evars = EvMap.empty } (* spiwack: not clear what folding over an evar_map, for now we shall simply fold over the inner evar_map. *) let fold f d a = EvMap.fold f d.defn_evars (EvMap.fold f d.undf_evars a) let fold_undefined f d a = EvMap.fold f d.undf_evars a let raw_map f d = let f evk info = let ans = f evk info in let () = match info.evar_body, ans.evar_body with | Evar_defined _, Evar_empty | Evar_empty, Evar_defined _ -> anomaly (str "Unrespectful mapping function.") | _ -> () in ans in let defn_evars = EvMap.Smart.mapi f d.defn_evars in let undf_evars = EvMap.Smart.mapi f d.undf_evars in { d with defn_evars; undf_evars; } let raw_map_undefined f d = let f evk info = let ans = f evk info in let () = match ans.evar_body with | Evar_defined _ -> anomaly (str "Unrespectful mapping function.") | _ -> () in ans in { d with undf_evars = EvMap.Smart.mapi f d.undf_evars; } let is_evar = mem let is_defined d e = EvMap.mem e d.defn_evars let is_undefined d e = EvMap.mem e d.undf_evars let existential_opt_value d (n, args) = match EvMap.find_opt n d.defn_evars with | None -> None | Some info -> match evar_body info with | Evar_defined c -> Some (instantiate_evar_array info c args) | Evar_empty -> None (* impossible but w/e *) let existential_value d ev = match existential_opt_value d ev with | None -> raise NotInstantiatedEvar | Some v -> v let existential_value0 = existential_value let existential_opt_value0 = existential_opt_value let existential_type d (n, args) = let info = try find d n with Not_found -> anomaly (str "Evar " ++ str (string_of_existential n) ++ str " was not declared.") in instantiate_evar_array info info.evar_concl args let existential_type0 = existential_type let add_constraints d c = { d with universes = UState.add_constraints d.universes c } let add_universe_constraints d c = { d with universes = UState.add_universe_constraints d.universes c } (*** /Lifting... ***) (* evar_map are considered empty disregarding histories *) let is_empty d = EvMap.is_empty d.defn_evars && EvMap.is_empty d.undf_evars && List.is_empty d.conv_pbs && Metamap.is_empty d.metas let cmap f evd = { evd with metas = Metamap.map (map_clb f) evd.metas; defn_evars = EvMap.map (map_evar_info f) evd.defn_evars; undf_evars = EvMap.map (map_evar_info f) evd.undf_evars } (* spiwack: deprecated *) let create_evar_defs sigma = { sigma with conv_pbs=[]; last_mods=Evar.Set.empty; metas=Metamap.empty } let empty_evar_flags = { obligation_evars = Evar.Set.empty; aliased_evars = Evar.Map.empty; typeclass_evars = Evar.Set.empty } let empty_side_effects = { seff_private = Safe_typing.empty_private_constants; seff_roles = Cmap.empty; } let empty = { defn_evars = EvMap.empty; undf_evars = EvMap.empty; universes = UState.empty; conv_pbs = []; last_mods = Evar.Set.empty; evar_flags = empty_evar_flags; metas = Metamap.empty; effects = empty_side_effects; evar_names = EvNames.empty; (* id<->key for undefined evars *) future_goals = FutureGoals.empty_stack; given_up = Evar.Set.empty; shelf = [[]]; extras = Store.empty; } let from_env ?binders e = { empty with universes = UState.from_env ?binders e } let from_ctx uctx = { empty with universes = uctx } let has_undefined evd = not (EvMap.is_empty evd.undf_evars) let has_given_up evd = not (Evar.Set.is_empty evd.given_up) let has_shelved evd = not (List.for_all List.is_empty evd.shelf) let evars_reset_evd ?(with_conv_pbs=false) ?(with_univs=true) evd d = let conv_pbs = if with_conv_pbs then evd.conv_pbs else d.conv_pbs in let last_mods = if with_conv_pbs then evd.last_mods else d.last_mods in let universes = if not with_univs then evd.universes else UState.union evd.universes d.universes in { evd with metas = d.metas; last_mods; conv_pbs; universes } let merge_universe_context evd uctx' = { evd with universes = UState.union evd.universes uctx' } let set_universe_context evd uctx' = { evd with universes = uctx' } (* TODO: make unique *) let add_conv_pb ?(tail=false) pb d = if tail then {d with conv_pbs = d.conv_pbs @ [pb]} else {d with conv_pbs = pb::d.conv_pbs} let conv_pbs d = d.conv_pbs let evar_source evk d = (find d evk).evar_source let evar_ident evk evd = EvNames.ident evk evd.evar_names let evar_key id evd = EvNames.key id evd.evar_names let get_aliased_evars evd = evd.evar_flags.aliased_evars let is_aliased_evar evd evk = try Some (Evar.Map.find evk evd.evar_flags.aliased_evars) with Not_found -> None let downcast evk ccl evd = let evar_info = EvMap.find evk evd.undf_evars in let evar_info' = { evar_info with evar_concl = ccl } in { evd with undf_evars = EvMap.add evk evar_info' evd.undf_evars } (* extracts conversion problems that satisfy predicate p *) (* Note: conv_pbs not satisying p are stored back in reverse order *) let extract_conv_pbs evd p = let (pbs,pbs1) = List.fold_left (fun (pbs,pbs1) pb -> if p pb then (pb::pbs,pbs1) else (pbs,pb::pbs1)) ([],[]) evd.conv_pbs in {evd with conv_pbs = pbs1; last_mods = Evar.Set.empty}, pbs let extract_changed_conv_pbs evd p = extract_conv_pbs evd (fun pb -> p evd.last_mods pb) let extract_all_conv_pbs evd = extract_conv_pbs evd (fun _ -> true) let loc_of_conv_pb evd (pbty,env,t1,t2) = match kind (fst (decompose_app t1)) with | Evar (evk1,_) -> fst (evar_source evk1 evd) | _ -> match kind (fst (decompose_app t2)) with | Evar (evk2,_) -> fst (evar_source evk2 evd) | _ -> None (**********************************************************) (* Sort variables *) type rigid = UState.rigid = | UnivRigid | UnivFlexible of bool (** Is substitution by an algebraic ok? *) let univ_rigid = UnivRigid let univ_flexible = UnivFlexible false let univ_flexible_alg = UnivFlexible true let evar_universe_context d = d.universes let universe_context_set d = UState.context_set d.universes let to_universe_context evd = UState.context evd.universes let univ_entry ~poly evd = UState.univ_entry ~poly evd.universes let check_univ_decl ~poly evd decl = UState.check_univ_decl ~poly evd.universes decl let restrict_universe_context evd vars = { evd with universes = UState.restrict evd.universes vars } let universe_subst evd = UState.subst evd.universes let merge_context_set ?loc ?(sideff=false) rigid evd ctx' = {evd with universes = UState.merge ?loc ~sideff rigid evd.universes ctx'} let with_context_set ?loc rigid d (a, ctx) = (merge_context_set ?loc rigid d ctx, a) let new_univ_level_variable ?loc ?name rigid evd = let uctx', u = UState.new_univ_variable ?loc rigid name evd.universes in ({evd with universes = uctx'}, u) let new_univ_variable ?loc ?name rigid evd = let uctx', u = UState.new_univ_variable ?loc rigid name evd.universes in ({evd with universes = uctx'}, Univ.Universe.make u) let new_sort_variable ?loc ?name rigid d = let (d', u) = new_univ_variable ?loc rigid ?name d in (d', Sorts.sort_of_univ u) let add_global_univ d u = { d with universes = UState.add_global_univ d.universes u } let make_flexible_variable evd ~algebraic u = { evd with universes = UState.make_flexible_variable evd.universes ~algebraic u } let make_nonalgebraic_variable evd u = { evd with universes = UState.make_nonalgebraic_variable evd.universes u } (****************************************) (* Operations on constants *) (****************************************) let fresh_sort_in_family ?loc ?(rigid=univ_flexible) evd s = with_context_set ?loc rigid evd (UnivGen.fresh_sort_in_family s) let fresh_constant_instance ?loc ?(rigid=univ_flexible) env evd c = with_context_set ?loc rigid evd (UnivGen.fresh_constant_instance env c) let fresh_inductive_instance ?loc ?(rigid=univ_flexible) env evd i = with_context_set ?loc rigid evd (UnivGen.fresh_inductive_instance env i) let fresh_constructor_instance ?loc ?(rigid=univ_flexible) env evd c = with_context_set ?loc rigid evd (UnivGen.fresh_constructor_instance env c) let fresh_array_instance ?loc ?(rigid=univ_flexible) env evd = with_context_set ?loc rigid evd (UnivGen.fresh_array_instance env) let fresh_global ?loc ?(rigid=univ_flexible) ?names env evd gr = with_context_set ?loc rigid evd (UnivGen.fresh_global_instance ?loc ?names env gr) let is_sort_variable evd s = UState.is_sort_variable evd.universes s let is_flexible_level evd l = let uctx = evd.universes in Univ.Level.Map.mem l (UState.subst uctx) let is_eq_sort s1 s2 = if Sorts.equal s1 s2 then None else let u1 = univ_of_sort s1 and u2 = univ_of_sort s2 in if Univ.Universe.equal u1 u2 then None else Some (u1, u2) (* Precondition: l is not defined in the substitution *) let universe_rigidity evd l = let uctx = evd.universes in if Univ.Level.Set.mem l (Univ.ContextSet.levels (UState.context_set uctx)) then UnivFlexible (Univ.Level.Set.mem l (UState.algebraics uctx)) else UnivRigid let normalize_universe evd = let vars = UState.subst evd.universes in let normalize = UnivSubst.normalize_universe_opt_subst vars in normalize let normalize_universe_instance evd l = let vars = UState.subst evd.universes in let normalize = UnivSubst.level_subst_of (UnivSubst.normalize_univ_variable_opt_subst vars) in Univ.Instance.subst_fn normalize l let normalize_sort evars s = match s with | SProp | Prop | Set -> s | Type u -> let u' = normalize_universe evars u in if u' == u then s else Sorts.sort_of_univ u' (* FIXME inefficient *) let set_eq_sort env d s1 s2 = let s1 = normalize_sort d s1 and s2 = normalize_sort d s2 in match is_eq_sort s1 s2 with | None -> d | Some (u1, u2) -> if not (type_in_type env) then add_universe_constraints d (UnivProblem.Set.singleton (UnivProblem.UEq (u1,u2))) else d let set_eq_level d u1 u2 = add_constraints d (Univ.enforce_eq_level u1 u2 Univ.Constraints.empty) let set_leq_level d u1 u2 = add_constraints d (Univ.enforce_leq_level u1 u2 Univ.Constraints.empty) let set_eq_instances ?(flex=false) d u1 u2 = add_universe_constraints d (UnivProblem.enforce_eq_instances_univs flex u1 u2 UnivProblem.Set.empty) let set_leq_sort env evd s1 s2 = let s1 = normalize_sort evd s1 and s2 = normalize_sort evd s2 in match is_eq_sort s1 s2 with | None -> evd | Some (u1, u2) -> if not (type_in_type env) then add_universe_constraints evd (UnivProblem.Set.singleton (UnivProblem.ULe (u1,u2))) else evd let check_eq evd s s' = UGraph.check_eq (UState.ugraph evd.universes) s s' let check_leq evd s s' = UGraph.check_leq (UState.ugraph evd.universes) s s' let check_constraints evd csts = UGraph.check_constraints csts (UState.ugraph evd.universes) let fix_undefined_variables evd = { evd with universes = UState.fix_undefined_variables evd.universes } let nf_univ_variables evd = let uctx = UState.normalize_variables evd.universes in {evd with universes = uctx} let minimize_universes evd = let uctx' = UState.normalize_variables evd.universes in let uctx' = UState.minimize uctx' in {evd with universes = uctx'} let universe_of_name evd s = UState.universe_of_name evd.universes s let universe_binders evd = UState.universe_binders evd.universes let universes evd = UState.ugraph evd.universes let update_sigma_univs ugraph evd = { evd with universes = UState.update_sigma_univs evd.universes ugraph } exception UniversesDiffer = UState.UniversesDiffer (**********************************************************) (* Side effects *) let concat_side_effects eff eff' = { seff_private = Safe_typing.concat_private eff.seff_private eff'.seff_private; seff_roles = Cmap.fold Cmap.add eff.seff_roles eff'.seff_roles; } let emit_side_effects eff evd = let effects = concat_side_effects eff evd.effects in { evd with effects; universes = UState.emit_side_effects eff.seff_private evd.universes } let drop_side_effects evd = { evd with effects = empty_side_effects; } let eval_side_effects evd = evd.effects (* Future goals *) let declare_future_goal evk evd = let future_goals = FutureGoals.add ~principal:false evk evd.future_goals in { evd with future_goals } let declare_principal_goal evk evd = let future_goals = FutureGoals.add ~principal:true evk evd.future_goals in { evd with future_goals } let push_future_goals evd = { evd with future_goals = FutureGoals.push evd.future_goals } let pop_future_goals evd = let hd, future_goals = FutureGoals.pop evd.future_goals in hd, { evd with future_goals } let fold_future_goals f sigma = FutureGoals.fold f sigma sigma.future_goals let remove_future_goal evd evk = { evd with future_goals = FutureGoals.remove evk evd.future_goals } let pr_future_goals_stack evd = FutureGoals.pr_stack evd.future_goals let give_up ev evd = { evd with given_up = Evar.Set.add ev evd.given_up } let push_shelf evd = { evd with shelf = [] :: evd.shelf } let pop_shelf evd = match evd.shelf with | [] -> anomaly Pp.(str"shelf stack should not be empty") | hd :: tl -> hd, { evd with shelf = tl } let filter_shelf f evd = { evd with shelf = List.map (List.filter f) evd.shelf } let shelve evd l = match evd.shelf with | [] -> anomaly Pp.(str"shelf stack should not be empty") | hd :: tl -> { evd with shelf = (hd@l) :: tl } let unshelve evd l = { evd with shelf = List.map (List.filter (fun ev -> not (CList.mem_f Evar.equal ev l))) evd.shelf } let given_up evd = evd.given_up let shelf evd = List.flatten evd.shelf let pr_shelf evd = let open Pp in if List.is_empty evd.shelf then str"(empty stack)" else prlist_with_sep (fun () -> str"||") (prlist_with_sep spc Evar.print) evd.shelf let define_aux def undef evk body = let oldinfo = try EvMap.find evk undef with Not_found -> if EvMap.mem evk def then anomaly ~label:"Evd.define" (Pp.str "cannot define an evar twice.") else anomaly ~label:"Evd.define" (Pp.str "cannot define undeclared evar.") in let () = assert (oldinfo.evar_body == Evar_empty) in let newinfo = { oldinfo with evar_body = Evar_defined body } in EvMap.add evk newinfo def, EvMap.remove evk undef (* define the existential of section path sp as the constr body *) let define_gen evk body evd evar_flags = let (defn_evars, undf_evars) = define_aux evd.defn_evars evd.undf_evars evk body in let last_mods = match evd.conv_pbs with | [] -> evd.last_mods | _ -> Evar.Set.add evk evd.last_mods in let evar_names = EvNames.remove_name_defined evk evd.evar_names in { evd with defn_evars; undf_evars; last_mods; evar_names; evar_flags } (** By default, the obligation and evar tag of the evar is removed *) let define evk body evd = let evar_flags = remove_evar_flags evk evd.evar_flags in define_gen evk body evd evar_flags (** In case of an evar-evar solution, the flags are inherited *) let define_with_evar evk body evd = let evk' = fst (destEvar body) in let evar_flags = inherit_evar_flags evd.evar_flags evk evk' in let evd = unshelve evd [evk] in let future_goals = FutureGoals.remove evk evd.future_goals in let evd = { evd with future_goals } in define_gen evk body evd evar_flags (* In case of restriction, we declare the aliasing and inherit the obligation and typeclass flags. *) let restrict evk filter ?candidates ?src evd = let evk' = new_untyped_evar () in let evar_info = EvMap.find evk evd.undf_evars in let id_inst = Filter.filter_list filter evar_info.evar_hyps.env_named_var in let evar_info' = { evar_info with evar_filter = filter; evar_candidates = candidates; evar_source = (match src with None -> evar_info.evar_source | Some src -> src); evar_identity = Identity.make id_inst; } in let last_mods = match evd.conv_pbs with | [] -> evd.last_mods | _ -> Evar.Set.add evk evd.last_mods in let evar_names = EvNames.reassign_name_defined evk evk' evd.evar_names in let body = mkEvar(evk',id_inst) in let (defn_evars, undf_evars) = define_aux evd.defn_evars evd.undf_evars evk body in let evar_flags = inherit_evar_flags evd.evar_flags evk evk' in let evd = { evd with undf_evars = EvMap.add evk' evar_info' undf_evars; defn_evars; last_mods; evar_names; evar_flags } in (* Mark new evar as future goal, removing previous one, circumventing Proofview.advance but making Proof.run_tactic catch these. *) let evd = unshelve evd [evk] in let evd = remove_future_goal evd evk in let evd = declare_future_goal evk' evd in (evd, evk') let update_source evd evk src = let evar_info = EvMap.find evk evd.undf_evars in let evar_info' = { evar_info with evar_source = src } in { evd with undf_evars = EvMap.add evk evar_info' evd.undf_evars } (**********************************************************) (* Accessing metas *) (** We use this function to overcome OCaml compiler limitations and to prevent the use of costly in-place modifications. *) let set_metas evd metas = { defn_evars = evd.defn_evars; undf_evars = evd.undf_evars; universes = evd.universes; conv_pbs = evd.conv_pbs; last_mods = evd.last_mods; evar_flags = evd.evar_flags; metas; effects = evd.effects; evar_names = evd.evar_names; future_goals = evd.future_goals; given_up = evd.given_up; shelf = evd.shelf; extras = evd.extras; } let meta_list evd = metamap_to_list evd.metas let undefined_metas evd = let filter = function | (n,Clval(_,_,typ)) -> None | (n,Cltyp (_,typ)) -> Some n in let m = List.map_filter filter (meta_list evd) in List.sort Int.compare m let map_metas_fvalue f evd = let map = function | Clval(id,(c,s),typ) -> Clval(id,(mk_freelisted (f c.rebus),s),typ) | x -> x in set_metas evd (Metamap.Smart.map map evd.metas) let map_metas f evd = let map cl = map_clb f cl in set_metas evd (Metamap.Smart.map map evd.metas) let meta_opt_fvalue evd mv = match Metamap.find mv evd.metas with | Clval(_,b,_) -> Some b | Cltyp _ -> None let meta_defined evd mv = match Metamap.find mv evd.metas with | Clval _ -> true | Cltyp _ -> false let try_meta_fvalue evd mv = match Metamap.find mv evd.metas with | Clval(_,b,_) -> b | Cltyp _ -> raise Not_found let meta_fvalue evd mv = try try_meta_fvalue evd mv with Not_found -> anomaly ~label:"meta_fvalue" (Pp.str "meta has no value.") let meta_value evd mv = (fst (try_meta_fvalue evd mv)).rebus let meta_ftype evd mv = match Metamap.find mv evd.metas with | Cltyp (_,b) -> b | Clval(_,_,b) -> b let meta_type evd mv = (meta_ftype evd mv).rebus let meta_type0 = meta_type let meta_declare mv v ?(name=Anonymous) evd = let metas = Metamap.add mv (Cltyp(name,mk_freelisted v)) evd.metas in set_metas evd metas (* If the meta is defined then forget its name *) let meta_name evd mv = try fst (clb_name (Metamap.find mv evd.metas)) with Not_found -> Anonymous let evar_source_of_meta mv evd = match meta_name evd mv with | Anonymous -> Loc.tag Evar_kinds.GoalEvar | Name id -> Loc.tag @@ Evar_kinds.VarInstance id let use_meta_source evd mv v = match Constr.kind v with | Evar (evk,_) -> let f = function | None -> None | Some evi as x -> match evi.evar_source with | None, Evar_kinds.GoalEvar -> Some { evi with evar_source = evar_source_of_meta mv evd } | _ -> x in { evd with undf_evars = EvMap.update evk f evd.undf_evars } | _ -> evd let meta_assign mv (v, pb) evd = let modify _ = function | Cltyp (na, ty) -> Clval (na, (mk_freelisted v, pb), ty) | _ -> anomaly ~label:"meta_assign" (Pp.str "already defined.") in let metas = Metamap.modify mv modify evd.metas in let evd = use_meta_source evd mv v in set_metas evd metas let meta_reassign mv (v, pb) evd = let modify _ = function | Clval(na, _, ty) -> Clval (na, (mk_freelisted v, pb), ty) | _ -> anomaly ~label:"meta_reassign" (Pp.str "not yet defined.") in let metas = Metamap.modify mv modify evd.metas in set_metas evd metas let clear_metas evd = {evd with metas = Metamap.empty} let meta_merge ?(with_univs = true) evd1 evd2 = let metas = Metamap.fold Metamap.add evd1.metas evd2.metas in let universes = if with_univs then UState.union evd2.universes evd1.universes else evd2.universes in {evd2 with universes; metas; } type metabinding = metavariable * constr * instance_status let retract_coercible_metas evd = let mc = ref [] in let map n v = match v with | Clval (na, (b, (Conv, CoerceToType as s)), typ) -> let () = mc := (n, b.rebus, s) :: !mc in Cltyp (na, typ) | v -> v in let metas = Metamap.Smart.mapi map evd.metas in !mc, set_metas evd metas let dependent_evar_ident ev evd = let evi = find evd ev in match evi.evar_source with | (_,Evar_kinds.VarInstance id) -> id | _ -> anomaly (str "Not an evar resulting of a dependent binding.") (**********************************************************) (* Extra data *) let get_extra_data evd = evd.extras let set_extra_data extras evd = { evd with extras } (*******************************************************************) type open_constr = evar_map * constr (*******************************************************************) (* The type constructor ['a sigma] adds an evar map to an object of type ['a] *) type 'a sigma = { it : 'a ; sigma : evar_map } let sig_it x = x.it let sig_sig x = x.sigma let on_sig s f = let sigma', v = f s.sigma in { s with sigma = sigma' }, v (*******************************************************************) (* The state monad with state an evar map. *) module MonadR = Monad.Make (struct type +'a t = evar_map -> evar_map * 'a let return a = fun s -> (s,a) let (>>=) x f = fun s -> let (s',a) = x s in f a s' let (>>) x y = fun s -> let (s',()) = x s in y s' let map f x = fun s -> on_snd f (x s) end) module Monad = Monad.Make (struct type +'a t = evar_map -> 'a * evar_map let return a = fun s -> (a,s) let (>>=) x f = fun s -> let (a,s') = x s in f a s' let (>>) x y = fun s -> let ((),s') = x s in y s' let map f x = fun s -> on_fst f (x s) end) (**********************************************************) (* Failure explanation *) type unsolvability_explanation = SeveralInstancesFound of int module MiniEConstr = struct module ESorts = struct type t = Sorts.t let make s = s let kind sigma = function | Sorts.Type u -> Sorts.sort_of_univ (normalize_universe sigma u) | s -> s let unsafe_to_sorts s = s end module EInstance = struct type t = Univ.Instance.t let make i = i let kind sigma i = if Univ.Instance.is_empty i then i else normalize_universe_instance sigma i let empty = Univ.Instance.empty let is_empty = Univ.Instance.is_empty let unsafe_to_instance t = t end type t = econstr let rec whd_evar sigma c = match Constr.kind c with | Evar ev -> begin match existential_opt_value sigma ev with | Some c -> whd_evar sigma c | None -> c end | App (f, args) when isEvar f -> (* Enforce smart constructor invariant on applications *) let ev = destEvar f in begin match existential_opt_value sigma ev with | None -> c | Some f -> whd_evar sigma (mkApp (f, args)) end | Cast (c0, k, t) when isEvar c0 -> (* Enforce smart constructor invariant on casts. *) let ev = destEvar c0 in begin match existential_opt_value sigma ev with | None -> c | Some c -> whd_evar sigma (mkCast (c, k, t)) end | _ -> c let kind sigma c = Constr.kind (whd_evar sigma c) let kind_upto = kind let of_kind = Constr.of_kind let of_constr c = c let of_constr_array v = v let unsafe_to_constr c = c let unsafe_to_constr_array v = v let unsafe_eq = Refl let to_constr_nocheck sigma c = let evar_value ev = existential_opt_value sigma ev in UnivSubst.nf_evars_and_universes_opt_subst evar_value (universe_subst sigma) c let to_constr_gen sigma c = let saw_evar = ref false in let evar_value ev = let v = existential_opt_value sigma ev in saw_evar := !saw_evar || Option.is_empty v; v in let c = UnivSubst.nf_evars_and_universes_opt_subst evar_value (universe_subst sigma) c in let saw_evar = if not !saw_evar then false else let exception SawEvar in let rec iter c = match Constr.kind c with | Evar _ -> raise SawEvar | _ -> Constr.iter iter c in try iter c; false with SawEvar -> true in saw_evar, c let to_constr ?(abort_on_undefined_evars=true) sigma c = if not abort_on_undefined_evars then to_constr_nocheck sigma c else let saw_evar, c = to_constr_gen sigma c in if saw_evar then anomaly ~label:"econstr" Pp.(str "grounding a non evar-free term"); c let to_constr_opt sigma c = let saw_evar, c = to_constr_gen sigma c in if saw_evar then None else Some c let of_named_decl d = d let unsafe_to_named_decl d = d let of_rel_decl d = d let unsafe_to_rel_decl d = d let to_rel_decl sigma d = Context.Rel.Declaration.map_constr (to_constr sigma) d let of_named_context d = d let of_rel_context d = d let unsafe_to_case_invert x = x let of_case_invert x = x end (** The following functions return the set of evars immediately contained in the object *) (* excluding defined evars *) let evars_of_term evd c = let rec evrec acc c = let c = MiniEConstr.whd_evar evd c in match kind c with | Evar (n, l) -> Evar.Set.add n (List.fold_left evrec acc l) | _ -> Constr.fold evrec acc c in evrec Evar.Set.empty c let evar_nodes_of_term c = let rec evrec acc c = match kind c with | Evar (n, l) -> Evar.Set.add n (List.fold_left evrec acc l) | _ -> Constr.fold evrec acc c in evrec Evar.Set.empty c let evars_of_named_context evd nc = Context.Named.fold_outside (NamedDecl.fold_constr (fun constr s -> Evar.Set.union s (evars_of_term evd constr))) nc ~init:Evar.Set.empty let evars_of_filtered_evar_info evd evi = Evar.Set.union (evars_of_term evd evi.evar_concl) (Evar.Set.union (match evi.evar_body with | Evar_empty -> Evar.Set.empty | Evar_defined b -> evars_of_term evd b) (evars_of_named_context evd (evar_filtered_context evi))) coq-8.15.0/engine/evd.mli000066400000000000000000000701301417001151100151050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool (** Equality over filters *) val identity : t (** The identity filter. *) val filter_list : t -> 'a list -> 'a list (** Filter a list. Sizes must coincide. *) val filter_array : t -> 'a array -> 'a array (** Filter an array. Sizes must coincide. *) val extend : int -> t -> t (** [extend n f] extends [f] on the left with [n]-th times [true]. *) val compose : t -> t -> t (** Horizontal composition : [compose f1 f2] only keeps parts of [f2] where [f1] is set. In particular, [f1] and [f2] must have the same length. *) val apply_subfilter : t -> bool list -> t (** [apply_subfilter f1 f2] applies filter [f2] where [f1] is [true]. In particular, the length of [f2] is the number of times [f1] is [true] *) val restrict_upon : t -> int -> (int -> bool) -> t option (** Ad-hoc primitive. *) val map_along : (bool -> 'a -> bool) -> t -> 'a list -> t (** Apply the function on the filter and the list. Sizes must coincide. *) val make : bool list -> t (** Create out of a list *) val repr : t -> bool list option (** Observe as a bool list. *) end module Abstraction : sig type abstraction = | Abstract | Imitate type t = abstraction list val identity : t val abstract_last : t -> t end module Identity : sig type t (** Identity substitutions *) val make : econstr list -> t val none : unit -> t end (** {6 Evar infos} *) type evar_body = | Evar_empty | Evar_defined of econstr type evar_info = { evar_concl : econstr; (** Type of the evar. *) evar_hyps : named_context_val; (* TODO econstr? *) (** Context of the evar. *) evar_body : evar_body; (** Optional content of the evar. *) evar_filter : Filter.t; (** Boolean mask over {!evar_hyps}. Should have the same length. When filtered out, the corresponding variable is not allowed to occur in the solution *) evar_abstract_arguments : Abstraction.t; (** Boolean information over {!evar_hyps}, telling if an hypothesis instance can be imitated or should stay abstract in HO unification problems and inversion (see [second_order_matching_with_args] for its use). *) evar_source : Evar_kinds.t located; (** Information about the evar. *) evar_candidates : econstr list option; (** List of possible solutions when known that it is a finite list *) evar_identity : Identity.t; (** Default evar instance, i.e. a list of Var nodes projected from the filtered environment. *) } val make_evar : named_context_val -> etypes -> evar_info val evar_concl : evar_info -> econstr val evar_context : evar_info -> (econstr, etypes) Context.Named.pt val evar_filtered_context : evar_info -> (econstr, etypes) Context.Named.pt val evar_hyps : evar_info -> named_context_val val evar_filtered_hyps : evar_info -> named_context_val val evar_body : evar_info -> evar_body val evar_candidates : evar_info -> constr list option val evar_filter : evar_info -> Filter.t val evar_env : env -> evar_info -> env val evar_filtered_env : env -> evar_info -> env val evar_identity_subst : evar_info -> econstr list val map_evar_body : (econstr -> econstr) -> evar_body -> evar_body val map_evar_info : (econstr -> econstr) -> evar_info -> evar_info (** {6 Unification state} **) type evar_map (** Type of unification state. Essentially a bunch of state-passing data needed to handle incremental term construction. *) val empty : evar_map (** The empty evar map. *) val from_env : ?binders:lident list -> env -> evar_map (** The empty evar map with given universe context, taking its initial universes from env, possibly with initial universe binders. This is the main entry point at the beginning of the process of interpreting a declaration (e.g. before entering the interpretation of a Theorem statement). *) val from_ctx : UState.t -> evar_map (** The empty evar map with given universe context. This is the main entry point when resuming from a already interpreted declaration (e.g. after having interpreted a Theorem statement and preparing to open a goal). *) val is_empty : evar_map -> bool (** Whether an evarmap is empty. *) val has_undefined : evar_map -> bool (** [has_undefined sigma] is [true] if and only if there are uninstantiated evars in [sigma]. *) val has_given_up : evar_map -> bool (** [has_given_up sigma] is [true] if and only if there are given up evars in [sigma]. *) val has_shelved : evar_map -> bool (** [has_shelved sigma] is [true] if and only if there are shelved evars in [sigma]. *) val new_evar : evar_map -> ?name:Id.t -> ?typeclass_candidate:bool -> evar_info -> evar_map * Evar.t (** Creates a fresh evar mapping to the given information. *) val add : evar_map -> Evar.t -> evar_info -> evar_map (** [add sigma ev info] adds [ev] with evar info [info] in sigma. Precondition: ev must not preexist in [sigma]. *) val find : evar_map -> Evar.t -> evar_info (** Recover the data associated to an evar. *) val find_undefined : evar_map -> Evar.t -> evar_info (** Same as {!find} but restricted to undefined evars. For efficiency reasons. *) val remove : evar_map -> Evar.t -> evar_map (** Remove an evar from an evar map. Use with caution. *) val mem : evar_map -> Evar.t -> bool (** Whether an evar is present in an evarmap. *) val fold : (Evar.t -> evar_info -> 'a -> 'a) -> evar_map -> 'a -> 'a (** Apply a function to all evars and their associated info in an evarmap. *) val fold_undefined : (Evar.t -> evar_info -> 'a -> 'a) -> evar_map -> 'a -> 'a (** Same as {!fold}, but restricted to undefined evars. For efficiency reasons. *) val raw_map : (Evar.t -> evar_info -> evar_info) -> evar_map -> evar_map (** Apply the given function to all evars in the map. Beware: this function expects the argument function to preserve the kind of [evar_body], i.e. it must send [Evar_empty] to [Evar_empty] and [Evar_defined c] to some [Evar_defined c']. *) val raw_map_undefined : (Evar.t -> evar_info -> evar_info) -> evar_map -> evar_map (** Same as {!raw_map}, but restricted to undefined evars. For efficiency reasons. *) val define : Evar.t -> econstr -> evar_map -> evar_map (** Set the body of an evar to the given constr. It is expected that: {ul {- The evar is already present in the evarmap.} {- The evar is not defined in the evarmap yet.} {- All the evars present in the constr should be present in the evar map.} } *) val define_with_evar : Evar.t -> econstr -> evar_map -> evar_map (** Same as [define ev body evd], except the body must be an existential variable [ev']. This additionally makes [ev'] inherit the [obligation] and [typeclass] flags of [ev]. *) val cmap : (econstr -> econstr) -> evar_map -> evar_map (** Map the function on all terms in the evar map. *) val is_evar : evar_map -> Evar.t-> bool (** Alias for {!mem}. *) val is_defined : evar_map -> Evar.t-> bool (** Whether an evar is defined in an evarmap. *) val is_undefined : evar_map -> Evar.t-> bool (** Whether an evar is not defined in an evarmap. *) val add_constraints : evar_map -> Univ.Constraints.t -> evar_map (** Add universe constraints in an evar map. *) val undefined_map : evar_map -> evar_info Evar.Map.t (** Access the undefined evar mapping directly. *) val drop_all_defined : evar_map -> evar_map val is_maybe_typeclass_hook : (evar_map -> constr -> bool) Hook.t (** {6 Instantiating partial terms} *) exception NotInstantiatedEvar val existential_value : evar_map -> econstr pexistential -> econstr (** [existential_value sigma ev] raises [NotInstantiatedEvar] if [ev] has no body and [Not_found] if it does not exist in [sigma] *) val existential_value0 : evar_map -> existential -> constr val existential_type : evar_map -> econstr pexistential -> etypes val existential_type0 : evar_map -> existential -> types val existential_opt_value : evar_map -> econstr pexistential -> econstr option (** Same as {!existential_value} but returns an option instead of raising an exception. *) val existential_opt_value0 : evar_map -> existential -> constr option val evar_instance_array : (Constr.named_declaration -> 'a -> bool) -> evar_info -> 'a list -> (Id.t * 'a) list val instantiate_evar_array : evar_info -> econstr -> econstr list -> econstr val evars_reset_evd : ?with_conv_pbs:bool -> ?with_univs:bool -> evar_map -> evar_map -> evar_map (** spiwack: this function seems to somewhat break the abstraction. *) (** {6 Misc} *) val restrict : Evar.t-> Filter.t -> ?candidates:econstr list -> ?src:Evar_kinds.t located -> evar_map -> evar_map * Evar.t (** Restrict an undefined evar into a new evar by filtering context and possibly limiting the instances to a set of candidates (candidates are filtered according to the filter) *) val update_source : evar_map -> Evar.t -> Evar_kinds.t located -> evar_map (** To update the source a posteriori, e.g. when an evar type of another evar has to refer to this other evar, with a mutual dependency *) val get_aliased_evars : evar_map -> Evar.t Evar.Map.t (** The map of aliased evars *) val is_aliased_evar : evar_map -> Evar.t -> Evar.t option (** Tell if an evar has been aliased to another evar, and if yes, which *) val set_typeclass_evars : evar_map -> Evar.Set.t -> evar_map (** Mark the given set of evars as available for resolution. Precondition: they should indeed refer to undefined typeclass evars. *) val get_typeclass_evars : evar_map -> Evar.Set.t (** The set of undefined typeclass evars *) val is_typeclass_evar : evar_map -> Evar.t -> bool (** Is the evar declared resolvable for typeclass resolution *) val get_obligation_evars : evar_map -> Evar.Set.t (** The set of obligation evars *) val set_obligation_evar : evar_map -> Evar.t -> evar_map (** Declare an evar as an obligation *) val is_obligation_evar : evar_map -> Evar.t -> bool (** Is the evar declared as an obligation *) val downcast : Evar.t-> etypes -> evar_map -> evar_map (** Change the type of an undefined evar to a new type assumed to be a subtype of its current type; subtyping must be ensured by caller *) val evar_source : Evar.t -> evar_map -> Evar_kinds.t located (** Convenience function. Wrapper around {!find} to recover the source of an evar in a given evar map. *) val evar_ident : Evar.t -> evar_map -> Id.t option val rename : Evar.t -> Id.t -> evar_map -> evar_map val evar_key : Id.t -> evar_map -> Evar.t val evar_source_of_meta : metavariable -> evar_map -> Evar_kinds.t located val dependent_evar_ident : Evar.t -> evar_map -> Id.t (** {5 Side-effects} *) type side_effect_role = | Schema of inductive * string type side_effects = { seff_private : Safe_typing.private_constants; seff_roles : side_effect_role Cmap.t; } val empty_side_effects : side_effects val concat_side_effects : side_effects -> side_effects -> side_effects val emit_side_effects : side_effects -> evar_map -> evar_map (** Push a side-effect into the evar map. *) val eval_side_effects : evar_map -> side_effects (** Return the effects contained in the evar map. *) val drop_side_effects : evar_map -> evar_map (** This should not be used. For hacking purposes. *) (** {5 Future goals} *) val declare_future_goal : Evar.t -> evar_map -> evar_map (** Adds an existential variable to the list of future goals. For internal uses only. *) val declare_principal_goal : Evar.t -> evar_map -> evar_map (** Adds an existential variable to the list of future goals and make it principal. Only one existential variable can be made principal, an error is raised otherwise. For internal uses only. *) module FutureGoals : sig type t = private { comb : Evar.t list; principal : Evar.t option; (** if [Some e], [e] must be contained in [future_comb]. The evar [e] will inherit properties (now: the name) of the evar which will be instantiated with a term containing [e]. *) } val map_filter : (Evar.t -> Evar.t option) -> t -> t (** Applies a function on the future goals *) val filter : (Evar.t -> bool) -> t -> t (** Applies a filter on the future goals *) end val push_future_goals : evar_map -> evar_map val pop_future_goals : evar_map -> FutureGoals.t * evar_map val fold_future_goals : (evar_map -> Evar.t -> evar_map) -> evar_map -> evar_map val remove_future_goal : evar_map -> Evar.t -> evar_map val pr_future_goals_stack : evar_map -> Pp.t val push_shelf : evar_map -> evar_map val pop_shelf : evar_map -> Evar.t list * evar_map val filter_shelf : (Evar.t -> bool) -> evar_map -> evar_map val give_up : Evar.t -> evar_map -> evar_map val shelve : evar_map -> Evar.t list -> evar_map val unshelve : evar_map -> Evar.t list -> evar_map val given_up : evar_map -> Evar.Set.t val shelf : evar_map -> Evar.t list val pr_shelf : evar_map -> Pp.t (** {5 Sort variables} Evar maps also keep track of the universe constraints defined at a given point. This section defines the relevant manipulation functions. *) exception UniversesDiffer val add_universe_constraints : evar_map -> UnivProblem.Set.t -> evar_map (** Add the given universe unification constraints to the evar map. @raise UniversesDiffer in case a first-order unification fails. @raise UniverseInconsistency . *) (** {5 Extra data} Evar maps can contain arbitrary data, allowing to use an extensible state. As evar maps are theoretically used in a strict state-passing style, such additional data should be passed along transparently. Some old and bug-prone code tends to drop them nonetheless, so you should keep cautious. *) module Store : Store.S (** Datatype used to store additional information in evar maps. *) val get_extra_data : evar_map -> Store.t val set_extra_data : Store.t -> evar_map -> evar_map (** {5 Enriching with evar maps} *) type 'a sigma = { it : 'a ; (** The base object. *) sigma : evar_map (** The added unification state. *) } (** The type constructor ['a sigma] adds an evar map to an object of type ['a]. *) val sig_it : 'a sigma -> 'a val sig_sig : 'a sigma -> evar_map val on_sig : 'a sigma -> (evar_map -> evar_map * 'b) -> 'a sigma * 'b (** {5 The state monad with state an evar map} *) module MonadR : Monad.S with type +'a t = evar_map -> evar_map * 'a module Monad : Monad.S with type +'a t = evar_map -> 'a * evar_map (** {5 Meta machinery} These functions are almost deprecated. They were used before the introduction of the full-fledged evar calculus. In an ideal world, they should be removed. Alas, some parts of the code still use them. Do not use in newly-written code. *) module Metaset : Set.S with type elt = metavariable module Metamap : Map.ExtS with type key = metavariable and module Set := Metaset type 'a freelisted = { rebus : 'a; freemetas : Metaset.t } val metavars_of : econstr -> Metaset.t val mk_freelisted : econstr -> econstr freelisted val map_fl : ('a -> 'b) -> 'a freelisted -> 'b freelisted (** Status of an instance found by unification wrt to the meta it solves: - a supertype of the meta (e.g. the solution to ?X <= T is a supertype of ?X) - a subtype of the meta (e.g. the solution to T <= ?X is a supertype of ?X) - a term that can be eta-expanded n times while still being a solution (e.g. the solution [P] to [?X u v = P u v] can be eta-expanded twice) *) type instance_constraint = IsSuperType | IsSubType | Conv val eq_instance_constraint : instance_constraint -> instance_constraint -> bool (** Status of the unification of the type of an instance against the type of the meta it instantiates: - CoerceToType means that the unification of types has not been done and that a coercion can still be inserted: the meta should not be substituted freely (this happens for instance given via the "with" binding clause). - TypeProcessed means that the information obtainable from the unification of types has been extracted. - TypeNotProcessed means that the unification of types has not been done but it is known that no coercion may be inserted: the meta can be substituted freely. *) type instance_typing_status = CoerceToType | TypeNotProcessed | TypeProcessed (** Status of an instance together with the status of its type unification *) type instance_status = instance_constraint * instance_typing_status (** Clausal environments *) type clbinding = | Cltyp of Name.t * econstr freelisted | Clval of Name.t * (econstr freelisted * instance_status) * econstr freelisted (** Unification constraints *) type conv_pb = Reduction.conv_pb type evar_constraint = conv_pb * env * econstr * econstr (** The following two functions are for internal use only, see [Evarutil.add_unification_pb] for a safe interface. *) val add_conv_pb : ?tail:bool -> evar_constraint -> evar_map -> evar_map val conv_pbs : evar_map -> evar_constraint list val extract_changed_conv_pbs : evar_map -> (Evar.Set.t -> evar_constraint -> bool) -> evar_map * evar_constraint list val extract_all_conv_pbs : evar_map -> evar_map * evar_constraint list val loc_of_conv_pb : evar_map -> evar_constraint -> Loc.t option (** The following functions return the set of undefined evars contained in the object. *) val evars_of_term : evar_map -> econstr -> Evar.Set.t (** including evars in instances of evars *) val evar_nodes_of_term : econstr -> Evar.Set.t (** same as evars_of_term but also including defined evars. For use in printing dependent evars *) val evars_of_named_context : evar_map -> (econstr, etypes) Context.Named.pt -> Evar.Set.t val evars_of_filtered_evar_info : evar_map -> evar_info -> Evar.Set.t (** Metas *) val meta_list : evar_map -> (metavariable * clbinding) list val meta_defined : evar_map -> metavariable -> bool val meta_value : evar_map -> metavariable -> econstr (** [meta_fvalue] raises [Not_found] if meta not in map or [Anomaly] if meta has no value *) val meta_fvalue : evar_map -> metavariable -> econstr freelisted * instance_status val meta_opt_fvalue : evar_map -> metavariable -> (econstr freelisted * instance_status) option val meta_type : evar_map -> metavariable -> etypes val meta_type0 : evar_map -> metavariable -> types val meta_ftype : evar_map -> metavariable -> etypes freelisted val meta_name : evar_map -> metavariable -> Name.t val meta_declare : metavariable -> etypes -> ?name:Name.t -> evar_map -> evar_map val meta_assign : metavariable -> econstr * instance_status -> evar_map -> evar_map val meta_reassign : metavariable -> econstr * instance_status -> evar_map -> evar_map val clear_metas : evar_map -> evar_map (** [meta_merge evd1 evd2] returns [evd2] extended with the metas of [evd1] *) val meta_merge : ?with_univs:bool -> evar_map -> evar_map -> evar_map val undefined_metas : evar_map -> metavariable list val map_metas_fvalue : (econstr -> econstr) -> evar_map -> evar_map val map_metas : (econstr -> econstr) -> evar_map -> evar_map type metabinding = metavariable * econstr * instance_status val retract_coercible_metas : evar_map -> metabinding list * evar_map (** {5 FIXME: Nothing to do here} *) (********************************************************* Sort/universe variables *) (** Rigid or flexible universe variables. [UnivRigid] variables are user-provided or come from an explicit [Type] in the source, we do not minimize them or unify them eagerly. [UnivFlexible alg] variables are fresh universe variables of polymorphic constants or generated during refinement, sometimes in algebraic position (i.e. not appearing in the term at the moment of creation). They are the candidates for minimization (if alg, to an algebraic universe) and unified eagerly in the first-order unification heurstic. *) type rigid = UState.rigid = | UnivRigid | UnivFlexible of bool (** Is substitution by an algebraic ok? *) val univ_rigid : rigid val univ_flexible : rigid val univ_flexible_alg : rigid type 'a in_evar_universe_context = 'a * UState.t val restrict_universe_context : evar_map -> Univ.Level.Set.t -> evar_map (** Raises Not_found if not a name for a universe in this map. *) val universe_of_name : evar_map -> Id.t -> Univ.Level.t val universe_binders : evar_map -> UnivNames.universe_binders val new_univ_level_variable : ?loc:Loc.t -> ?name:Id.t -> rigid -> evar_map -> evar_map * Univ.Level.t val new_univ_variable : ?loc:Loc.t -> ?name:Id.t -> rigid -> evar_map -> evar_map * Univ.Universe.t val new_sort_variable : ?loc:Loc.t -> ?name:Id.t -> rigid -> evar_map -> evar_map * Sorts.t val add_global_univ : evar_map -> Univ.Level.t -> evar_map val universe_rigidity : evar_map -> Univ.Level.t -> rigid val make_flexible_variable : evar_map -> algebraic:bool -> Univ.Level.t -> evar_map (** See [UState.make_flexible_variable] *) val make_nonalgebraic_variable : evar_map -> Univ.Level.t -> evar_map (** See [UState.make_nonalgebraic_variable]. *) val is_sort_variable : evar_map -> Sorts.t -> Univ.Level.t option (** [is_sort_variable evm s] returns [Some u] or [None] if [s] is not a local sort variable declared in [evm] *) val is_flexible_level : evar_map -> Univ.Level.t -> bool (* val normalize_universe_level : evar_map -> Univ.Level.t -> Univ.Level.t *) val normalize_universe : evar_map -> Univ.Universe.t -> Univ.Universe.t val normalize_universe_instance : evar_map -> Univ.Instance.t -> Univ.Instance.t val set_leq_sort : env -> evar_map -> Sorts.t -> Sorts.t -> evar_map val set_eq_sort : env -> evar_map -> Sorts.t -> Sorts.t -> evar_map val set_eq_level : evar_map -> Univ.Level.t -> Univ.Level.t -> evar_map val set_leq_level : evar_map -> Univ.Level.t -> Univ.Level.t -> evar_map val set_eq_instances : ?flex:bool -> evar_map -> Univ.Instance.t -> Univ.Instance.t -> evar_map val check_eq : evar_map -> Univ.Universe.t -> Univ.Universe.t -> bool val check_leq : evar_map -> Univ.Universe.t -> Univ.Universe.t -> bool val check_constraints : evar_map -> Univ.Constraints.t -> bool val evar_universe_context : evar_map -> UState.t val universe_context_set : evar_map -> Univ.ContextSet.t val universe_subst : evar_map -> UnivSubst.universe_opt_subst val universes : evar_map -> UGraph.t (** [to_universe_context evm] extracts the local universes and constraints of [evm] and orders the universes the same as [Univ.ContextSet.to_context]. *) val to_universe_context : evar_map -> Univ.UContext.t val univ_entry : poly:bool -> evar_map -> UState.named_universes_entry val check_univ_decl : poly:bool -> evar_map -> UState.universe_decl -> UState.named_universes_entry val merge_universe_context : evar_map -> UState.t -> evar_map val set_universe_context : evar_map -> UState.t -> evar_map val merge_context_set : ?loc:Loc.t -> ?sideff:bool -> rigid -> evar_map -> Univ.ContextSet.t -> evar_map val with_context_set : ?loc:Loc.t -> rigid -> evar_map -> 'a Univ.in_universe_context_set -> evar_map * 'a val nf_univ_variables : evar_map -> evar_map val fix_undefined_variables : evar_map -> evar_map (** Universe minimization *) val minimize_universes : evar_map -> evar_map (** Lift [UState.update_sigma_univs] *) val update_sigma_univs : UGraph.t -> evar_map -> evar_map (** Polymorphic universes *) val fresh_sort_in_family : ?loc:Loc.t -> ?rigid:rigid -> evar_map -> Sorts.family -> evar_map * Sorts.t val fresh_constant_instance : ?loc:Loc.t -> ?rigid:rigid -> env -> evar_map -> Constant.t -> evar_map * pconstant val fresh_inductive_instance : ?loc:Loc.t -> ?rigid:rigid -> env -> evar_map -> inductive -> evar_map * pinductive val fresh_constructor_instance : ?loc:Loc.t -> ?rigid:rigid -> env -> evar_map -> constructor -> evar_map * pconstructor val fresh_array_instance : ?loc:Loc.t -> ?rigid:rigid -> env -> evar_map -> evar_map * Univ.Instance.t val fresh_global : ?loc:Loc.t -> ?rigid:rigid -> ?names:Univ.Instance.t -> env -> evar_map -> GlobRef.t -> evar_map * econstr (********************************************************************) (* constr with holes and pending resolution of classes, conversion *) (* problems, candidates, etc. *) type open_constr = evar_map * econstr (* Special case when before is empty *) (** Partially constructed constrs. *) type unsolvability_explanation = SeveralInstancesFound of int (** Failure explanation. *) (** {5 Summary names} *) (* This stuff is internal and should not be used. Currently a hack in the STM relies on it. *) val evar_counter_summary_tag : int Summary.Dyn.tag (** {5 Deprecated functions} *) val create_evar_defs : evar_map -> evar_map (* XXX: This is supposed to be deprecated by used by ssrmatching, what should the replacement be? *) (** Create an [evar_map] with empty meta map: *) (** Use this module only to bootstrap EConstr *) module MiniEConstr : sig module ESorts : sig type t val make : Sorts.t -> t val kind : evar_map -> t -> Sorts.t val unsafe_to_sorts : t -> Sorts.t end module EInstance : sig type t val make : Univ.Instance.t -> t val kind : evar_map -> t -> Univ.Instance.t val empty : t val is_empty : t -> bool val unsafe_to_instance : t -> Univ.Instance.t end type t = econstr val kind : evar_map -> t -> (t, t, ESorts.t, EInstance.t) Constr.kind_of_term val kind_upto : evar_map -> constr -> (constr, types, Sorts.t, Univ.Instance.t) Constr.kind_of_term val whd_evar : evar_map -> t -> t val of_kind : (t, t, ESorts.t, EInstance.t) Constr.kind_of_term -> t val of_constr : Constr.t -> t val of_constr_array : Constr.t array -> t array val to_constr : ?abort_on_undefined_evars:bool -> evar_map -> t -> Constr.t val to_constr_opt : evar_map -> t -> Constr.t option val unsafe_to_constr : t -> Constr.t val unsafe_to_constr_array : t array -> Constr.t array val unsafe_eq : (t, Constr.t) eq val of_named_decl : (Constr.t, Constr.types) Context.Named.Declaration.pt -> (t, t) Context.Named.Declaration.pt val unsafe_to_named_decl : (t, t) Context.Named.Declaration.pt -> (Constr.t, Constr.types) Context.Named.Declaration.pt val unsafe_to_rel_decl : (t, t) Context.Rel.Declaration.pt -> (Constr.t, Constr.types) Context.Rel.Declaration.pt val of_case_invert : constr pcase_invert -> econstr pcase_invert val unsafe_to_case_invert : econstr pcase_invert -> constr pcase_invert val of_rel_decl : (Constr.t, Constr.types) Context.Rel.Declaration.pt -> (t, t) Context.Rel.Declaration.pt val to_rel_decl : evar_map -> (t, t) Context.Rel.Declaration.pt -> (Constr.t, Constr.types) Context.Rel.Declaration.pt val of_named_context : (Constr.t, Constr.types) Context.Named.pt -> (t, t) Context.Named.pt val of_rel_context : (Constr.t, Constr.types) Context.Rel.pt -> (t, t) Context.Rel.pt end coq-8.15.0/engine/ftactic.ml000066400000000000000000000070201417001151100155710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* b t) : b t = m >>= function | Uniform x -> f x | Depends l -> let f arg = f arg >>= function | Uniform x -> (* We dispatch the uniform result on each goal under focus, as we know that the [m] argument was actually dependent. *) Proofview.Goal.goals >>= fun goals -> let ans = List.map (fun g -> (g,x)) goals in Proofview.tclUNIT ans | Depends l -> Proofview.Goal.goals >>= fun goals -> Proofview.tclUNIT (List.combine goals l) in (* After the tactic has run, some goals which were previously produced may have been solved by side effects. The values attached to such goals must be discarded, otherwise the list of result would not have the same length as the list of focused goals, which is an invariant of the [Ftactic] module. It is the reason why a goal is attached to each result above. *) let filter (g,x) = g >>= fun g -> Proofview.Goal.unsolved g >>= function | true -> Proofview.tclUNIT (Some x) | false -> Proofview.tclUNIT None in Proofview.tclDISPATCHL (List.map f l) >>= fun l -> Proofview.Monad.List.map_filter filter (List.concat l) >>= fun filtered -> Proofview.tclUNIT (Depends filtered) let goals = Proofview.Goal.goals >>= fun l -> Proofview.tclUNIT (Depends l) let enter f = bind goals (fun gl -> gl >>= fun gl -> Proofview.V82.wrap_exceptions (fun () -> f gl)) let with_env t = t >>= function | Uniform a -> Proofview.tclENV >>= fun env -> Proofview.tclUNIT (Uniform (env,a)) | Depends l -> Proofview.Goal.goals >>= fun gs -> Proofview.Monad.(List.map (map Proofview.Goal.env) gs) >>= fun envs -> Proofview.tclUNIT (Depends (List.combine envs l)) let lift (type a) (t:a Proofview.tactic) : a t = Proofview.tclBIND t (fun x -> Proofview.tclUNIT (Uniform x)) (** If the tactic returns unit, we can focus on the goals if necessary. *) let run m k = m >>= function | Uniform v -> k v | Depends l -> let tacs = List.map k l in Proofview.tclDISPATCH tacs let (>>=) = bind let (<*>) = fun m n -> bind m (fun () -> n) module Self = struct type 'a t = 'a focus Proofview.tactic let return = return let (>>=) = bind let (>>) = (<*>) let map f x = x >>= fun a -> return (f a) end module Ftac = Monad.Make(Self) module List = Ftac.List module Notations = struct let (>>=) = bind let (<*>) = fun m n -> bind m (fun () -> n) end coq-8.15.0/engine/ftactic.mli000066400000000000000000000051271417001151100157500ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t (** The unit of the monad. *) val bind : 'a t -> ('a -> 'b t) -> 'b t (** The bind of the monad. *) (** {5 Operations} *) val lift : 'a Proofview.tactic -> 'a t (** Transform a tactic into a focussing tactic. The resulting tactic is not focused. *) val run : 'a t -> ('a -> unit Proofview.tactic) -> unit Proofview.tactic (** Given a continuation producing a tactic, evaluates the focussing tactic. If the tactic has not focused, then the continuation is evaluated once. Otherwise it is called in each of the currently focused goals. *) (** {5 Focussing} *) (** Enter a goal. The resulting tactic is focused. *) val enter : (Proofview.Goal.t -> 'a t) -> 'a t (** Enter a goal, without evar normalization. The resulting tactic is focused. *) val with_env : 'a t -> (Environ.env*'a) t (** [with_env t] returns, in addition to the return type of [t], an environment, which is the global environment if [t] does not focus on goals, or the local goal environment if [t] focuses on goals. *) (** {5 Notations} *) val (>>=) : 'a t -> ('a -> 'b t) -> 'b t (** Notation for {!bind}. *) val (<*>) : unit t -> 'a t -> 'a t (** Sequence. *) (** {5 List operations} *) module List : Monad.ListS with type 'a t := 'a t (** {5 Notations} *) module Notations : sig val (>>=) : 'a t -> ('a -> 'b t) -> 'b t val (<*>) : unit t -> 'a t -> 'a t end coq-8.15.0/engine/logic_monad.ml000066400000000000000000000272531417001151100164410ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Some (CErrors.print e) | TacticFailure e -> Some (CErrors.print e) | _ -> None end (** {6 Non-logical layer} *) (** The non-logical monad is a simple [unit -> 'a] (i/o) monad. The operations are simple wrappers around corresponding usual operations and require little documentation. *) module NonLogical = struct (* The functions in this module follow the pattern that they are defined with the form [(); fun ()->...]. This is an optimisation which signals to the compiler that the function is usually partially applied up to the [();]. Without this annotation, partial applications can be significantly slower. Documentation of this behaviour can be found at: https://ocaml.janestreet.com/?q=node/30 *) include Monad.Make(struct type 'a t = unit -> 'a let return a = (); fun () -> a let (>>=) a k = (); fun () -> k (a ()) () let (>>) a k = (); fun () -> a (); k () let map f a = (); fun () -> f (a ()) end) type 'a ref = 'a Util.pervasives_ref let ignore a = (); fun () -> ignore (a ()) let ref a = (); fun () -> ref a (** [Pervasives.(:=)] *) let (:=) r a = (); fun () -> r := a (** [Pervasives.(!)] *) let (!) = fun r -> (); fun () -> ! r (** [Pervasives.raise]. Except that exceptions are wrapped with {!Exception}. *) let raise (e, info) () = Exninfo.iraise (Exception e, info) (** [try ... with ...] but restricted to {!Exception}. *) let catch = fun s h -> (); fun () -> try s () with Exception e as src -> let (src, info) = Exninfo.capture src in h (e, info) () let read_line = fun () -> try read_line () with e -> let (e, info) = Exninfo.capture e in raise (e,info) () let print_char = fun c -> (); fun () -> print_char c let timeout = fun n t -> (); fun () -> Control.timeout n t () let make f = (); fun () -> try f () with e when CErrors.noncritical e -> let (e, info) = Exninfo.capture e in Exninfo.iraise (Exception e, info) (** Use the current logger. The buffer is also flushed. *) let print_debug s = make (fun _ -> Feedback.msg_debug s) let print_info s = make (fun _ -> Feedback.msg_info s) let print_warning s = make (fun _ -> Feedback.msg_warning s) let print_notice s = make (fun _ -> Feedback.msg_notice s) let run = fun x -> try x () with Exception e as src -> let (src, info) = Exninfo.capture src in Exninfo.iraise (e, info) end (** {6 Logical layer} *) (** The logical monad is a backtracking monad on top of which is layered a state monad (which is used to implement all of read/write, read only, and write only effects). The state monad being layered on top of the backtracking monad makes it so that the state is backtracked on failure. Backtracking differs from regular exception in that, writing (+) for exception catching and (>>=) for bind, we require the following extra distributivity laws: x+(y+z) = (x+y)+z zero+x = x x+zero = x (x+y)>>=k = (x>>=k)+(y>>=k) *) (** A view type for the logical monad, which is a form of list, hence we can decompose it with as a list. *) type ('a, 'b, 'e) list_view = | Nil of 'e | Cons of 'a * ('e -> 'b) module BackState = struct (** Double-continuation backtracking monads are reasonable folklore for "search" implementations (including the Tac interactive prover's tactics). Yet it's quite hard to wrap your head around these. I recommend reading a few times the "Backtracking, Interleaving, and Terminating Monad Transformers" paper by O. Kiselyov, C. Shan, D. Friedman, and A. Sabry. The peculiar shape of the monadic type is reminiscent of that of the continuation monad transformer. The paper also contains the rationale for the [split] abstraction. An explanation of how to derive such a monad from mathematical principles can be found in "Kan Extensions for Program Optimisation" by Ralf Hinze. A somewhat concrete view is that the type ['a iolist] is, in fact the impredicative encoding of the following stream type: [type 'a _iolist' = Nil of exn | Cons of 'a*'a iolist' and 'a iolist = 'a _iolist NonLogical.t] Using impredicative encoding avoids intermediate allocation and is, empirically, very efficient in Ocaml. It also has the practical benefit that the monadic operation are independent of the underlying monad, which simplifies the code and side-steps the limited inlining of Ocaml. In that vision, [bind] is simply [concat_map] (though the cps version is significantly simpler), [plus] is concatenation, and [split] is pattern-matching. *) type ('a, 'i, 'o, 'e) t = { iolist : 'r. 'i -> ('e -> 'r NonLogical.t) -> ('a -> 'o -> ('e -> 'r NonLogical.t) -> 'r NonLogical.t) -> 'r NonLogical.t } let return x = { iolist = fun s nil cons -> cons x s nil } let (>>=) m f = { iolist = fun s nil cons -> m.iolist s nil (fun x s next -> (f x).iolist s next cons) } let (>>) m f = { iolist = fun s nil cons -> m.iolist s nil (fun () s next -> f.iolist s next cons) } let map f m = { iolist = fun s nil cons -> m.iolist s nil (fun x s next -> cons (f x) s next) } let zero e = { iolist = fun _ nil cons -> nil e } let plus m1 m2 = { iolist = fun s nil cons -> m1.iolist s (fun e -> (m2 e).iolist s nil cons) cons } let ignore m = { iolist = fun s nil cons -> m.iolist s nil (fun _ s next -> cons () s next) } let lift m = { iolist = fun s nil cons -> NonLogical.(m >>= fun x -> cons x s nil) } (** State related *) let get = { iolist = fun s nil cons -> cons s s nil } let set s = { iolist = fun _ nil cons -> cons () s nil } let modify f = { iolist = fun s nil cons -> cons () (f s) nil } (** Exception manipulation *) let interleave src dst m = { iolist = fun s nil cons -> m.iolist s (fun e1 -> nil (src e1)) (fun x s next -> cons x s (fun e2 -> next (dst e2))) } (** List observation *) let once m = { iolist = fun s nil cons -> m.iolist s nil (fun x s _ -> cons x s nil) } let break f m = { iolist = fun s nil cons -> m.iolist s nil (fun x s next -> cons x s (fun e -> match f e with None -> next e | Some e -> nil e)) } (** For [reflect] and [split] see the "Backtracking, Interleaving, and Terminating Monad Transformers" paper. *) type ('a, 'e) reified = ('a, ('a, 'e) reified, 'e) list_view NonLogical.t let rec reflect (m : ('a * 'o, 'e) reified) = { iolist = fun s0 nil cons -> let next = function | Nil e -> nil e | Cons ((x, s), l) -> cons x s (fun e -> (reflect (l e)).iolist s0 nil cons) in NonLogical.(m >>= next) } let split m : ((_, _, _) list_view, _, _, _) t = let rnil e = NonLogical.return (Nil e) in let rcons p s l = NonLogical.return (Cons ((p, s), l)) in { iolist = fun s nil cons -> let open NonLogical in m.iolist s rnil rcons >>= begin function | Nil e -> cons (Nil e) s nil | Cons ((x, s), l) -> let l e = reflect (l e) in cons (Cons (x, l)) s nil end } let run m s = let rnil e = NonLogical.return (Nil e) in let rcons x s l = let p = (x, s) in NonLogical.return (Cons (p, l)) in m.iolist s rnil rcons let repr x = x end module type Param = sig (** Read only *) type e (** Write only *) type w (** [w] must be a monoid *) val wunit : w val wprod : w -> w -> w (** Read-write *) type s (** Update-only. Essentially a writer on [u->u]. *) type u (** [u] must be pointed. *) val uunit : u end module Logical (P:Param) = struct module Unsafe = struct (** All three of environment, writer and state are coded as a single state-passing-style monad.*) type state = { rstate : P.e; ustate : P.u; wstate : P.w; sstate : P.s; } let make m = m let repr m = m end open Unsafe type state = Unsafe.state type iexn = Exninfo.iexn type 'a reified = ('a, iexn) BackState.reified (** Inherited from Backstate *) open BackState include Monad.Make(struct type 'a t = ('a, state, state, iexn) BackState.t let return = BackState.return let (>>=) = BackState.(>>=) let (>>) = BackState.(>>) let map = BackState.map end) let zero = BackState.zero let plus = BackState.plus let ignore = BackState.ignore let lift = BackState.lift let once = BackState.once let break = BackState.break let split = BackState.split let repr = BackState.repr (** State related. We specialize them here to ensure soundness (for reader and writer) and efficiency. *) let get = { iolist = fun s nil cons -> cons s.sstate s nil } let set (sstate : P.s) = { iolist = fun s nil cons -> cons () { s with sstate } nil } let modify (f : P.s -> P.s) = { iolist = fun s nil cons -> cons () { s with sstate = f s.sstate } nil } let current = { iolist = fun s nil cons -> cons s.rstate s nil } let local e m = { iolist = fun s nil cons -> m.iolist { s with rstate = e } nil (fun x s' next -> cons x {s' with rstate = s.rstate} next) } let put w = { iolist = fun s nil cons -> cons () { s with wstate = P.wprod s.wstate w } nil } let update (f : P.u -> P.u) = { iolist = fun s nil cons -> cons () { s with ustate = f s.ustate } nil } (** Monadic run is specialized to handle reader / writer *) let run m r s = let s = { wstate = P.wunit; ustate = P.uunit; rstate = r; sstate = s } in let rnil e = NonLogical.return (Nil e) in let rcons x s l = let p = (x, s.sstate, s.wstate, s.ustate) in NonLogical.return (Cons (p, l)) in m.iolist s rnil rcons end coq-8.15.0/engine/logic_monad.mli000066400000000000000000000152671417001151100166140ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a] (i/o) monad. The operations are simple wrappers around corresponding usual operations and require little documentation. *) module NonLogical : sig include Monad.S val ignore : 'a t -> unit t type 'a ref val ref : 'a -> 'a ref t (** [Pervasives.(:=)] *) val (:=) : 'a ref -> 'a -> unit t (** [Pervasives.(!)] *) val (!) : 'a ref -> 'a t val read_line : string t val print_char : char -> unit t (** Loggers. The buffer is also flushed. *) val print_debug : Pp.t -> unit t val print_warning : Pp.t -> unit t val print_notice : Pp.t -> unit t val print_info : Pp.t -> unit t (** [Pervasives.raise]. Except that exceptions are wrapped with {!Exception}. *) val raise : Exninfo.iexn -> 'a t (** [try ... with ...] but restricted to {!Exception}. *) val catch : 'a t -> (Exninfo.iexn -> 'a t) -> 'a t val timeout : float -> 'a t -> 'a option t (** Construct a monadified side-effect. Exceptions raised by the argument are wrapped with {!Exception}. *) val make : (unit -> 'a) -> 'a t (** [run] performs effects. *) val run : 'a t -> 'a end (** {6 Logical layer} *) (** The logical monad is a backtracking monad on top of which is layered a state monad (which is used to implement all of read/write, read only, and write only effects). The state monad being layered on top of the backtracking monad makes it so that the state is backtracked on failure. Backtracking differs from regular exception in that, writing (+) for exception catching and (>>=) for bind, we require the following extra distributivity laws: x+(y+z) = (x+y)+z zero+x = x x+zero = x (x+y)>>=k = (x>>=k)+(y>>=k) *) (** A view type for the logical monad, which is a form of list, hence we can decompose it with as a list. *) type ('a, 'b, 'e) list_view = | Nil of 'e | Cons of 'a * ('e -> 'b) module BackState : sig type (+'a, -'i, +'o, 'e) t val return : 'a -> ('a, 's, 's, 'e) t val (>>=) : ('a, 'i, 'm, 'e) t -> ('a -> ('b, 'm, 'o, 'e) t) -> ('b, 'i, 'o, 'e) t val (>>) : (unit, 'i, 'm, 'e) t -> ('b, 'm, 'o, 'e) t -> ('b, 'i, 'o, 'e) t val map : ('a -> 'b) -> ('a, 'i, 'o, 'e) t -> ('b, 'i, 'o, 'e) t val ignore : ('a, 'i, 'o, 'e) t -> (unit, 'i, 'o, 'e) t val set : 'o -> (unit, 'i, 'o, 'e) t val get : ('s, 's, 's, 'e) t val modify : ('i -> 'o) -> (unit, 'i, 'o, 'e) t val interleave : ('e1 -> 'e2) -> ('e2 -> 'e1) -> ('a, 'i, 'o, 'e1) t -> ('a, 'i, 'o, 'e2) t (** [interleave src dst m] adapts the exceptional content of the monad according to the functions [src] and [dst]. To ensure a meaningful result, those functions must form a retraction, i.e. [dst (src e1) = e1] for all [e1]. This is typically the case when the type ['e1] is [unit]. *) val zero : 'e -> ('a, 'i, 'o, 'e) t val plus : ('a, 'i, 'o, 'e) t -> ('e -> ('a, 'i, 'o, 'e) t) -> ('a, 'i, 'o, 'e) t val split : ('a, 's, 's, 'e) t -> (('a, ('a, 'i, 's, 'e) t, 'e) list_view, 's, 's, 'e) t val once : ('a, 'i, 'o, 'e) t -> ('a, 'i, 'o, 'e) t val break : ('e -> 'e option) -> ('a, 'i, 'o, 'e) t -> ('a, 'i, 'o, 'e) t val lift : 'a NonLogical.t -> ('a, 's, 's, 'e) t type ('a, 'e) reified val repr : ('a, 'e) reified -> ('a, ('a, 'e) reified, 'e) list_view NonLogical.t val run : ('a, 'i, 'o, 'e) t -> 'i -> ('a * 'o, 'e) reified end (** The monad is parametrised in the types of state, environment and writer. *) module type Param = sig (** Read only *) type e (** Write only *) type w (** [w] must be a monoid *) val wunit : w val wprod : w -> w -> w (** Read-write *) type s (** Update-only. Essentially a writer on [u->u]. *) type u (** [u] must be pointed. *) val uunit : u end module Logical (P:Param) : sig include Monad.S val ignore : 'a t -> unit t val set : P.s -> unit t val get : P.s t val modify : (P.s -> P.s) -> unit t val put : P.w -> unit t val current : P.e t val local : P.e -> 'a t -> 'a t val update : (P.u -> P.u) -> unit t val zero : Exninfo.iexn -> 'a t val plus : 'a t -> (Exninfo.iexn -> 'a t) -> 'a t val split : 'a t -> ('a, 'a t, Exninfo.iexn) list_view t val once : 'a t -> 'a t val break : (Exninfo.iexn -> Exninfo.iexn option) -> 'a t -> 'a t val lift : 'a NonLogical.t -> 'a t type 'a reified = ('a, Exninfo.iexn) BackState.reified val repr : 'a reified -> ('a, 'a reified, Exninfo.iexn) list_view NonLogical.t val run : 'a t -> P.e -> P.s -> ('a * P.s * P.w * P.u) reified module Unsafe : sig type state = { rstate : P.e; ustate : P.u; wstate : P.w; sstate : P.s; } val make : ('a, state, state, Exninfo.iexn) BackState.t -> 'a t val repr : 'a t -> ('a, state, state, Exninfo.iexn) BackState.t end end coq-8.15.0/engine/namegen.ml000066400000000000000000000436621417001151100156020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* true | IntroIdentifier id1, IntroIdentifier id2 -> Names.Id.equal id1 id2 | IntroFresh id1, IntroFresh id2 -> Names.Id.equal id1 id2 | _ -> false (**********************************************************************) (* Conventional names *) let default_prop_string = "H" let default_prop_ident = Id.of_string default_prop_string let default_small_string = "H" let default_small_ident = Id.of_string default_small_string let default_type_string = "X" let default_type_ident = Id.of_string default_type_string let default_non_dependent_string = "H" let default_non_dependent_ident = Id.of_string default_non_dependent_string let default_dependent_ident = Id.of_string "x" let default_generated_non_letter_string = "x" (**********************************************************************) (* Globality of identifiers *) let is_imported_modpath = function | MPfile dp -> let rec find_prefix = function |MPfile dp1 -> not (DirPath.equal dp1 dp) |MPdot(mp,_) -> find_prefix mp |MPbound(_) -> false in find_prefix (Lib.current_mp ()) | _ -> false let is_imported_ref = let open GlobRef in function | VarRef _ -> false | IndRef (kn,_) | ConstructRef ((kn,_),_) -> let mp = MutInd.modpath kn in is_imported_modpath mp | ConstRef kn -> let mp = Constant.modpath kn in is_imported_modpath mp let is_global id = try let ref = Nametab.locate (qualid_of_ident id) in not (is_imported_ref ref) with Not_found -> false let is_constructor id = try match Nametab.locate (qualid_of_ident id) with | GlobRef.ConstructRef _ -> true | _ -> false with Not_found -> false let is_section_variable env id = try let _ = Environ.lookup_named id env in true with Not_found -> false (**********************************************************************) (* Generating "intuitive" names from its type *) let global_of_constr = let open GlobRef in function | Const (c, _) -> ConstRef c | Ind (i, _) -> IndRef i | Construct (c, _) -> ConstructRef c | Var id -> VarRef id | _ -> assert false let head_name sigma c = (* Find the head constant of a constr if any *) let rec hdrec c = match EConstr.kind sigma c with | Prod (_,_,c) | Lambda (_,_,c) | LetIn (_,_,_,c) | Cast (c,_,_) | App (c,_) -> hdrec c | Proj (kn,_) -> Some (Label.to_id (Constant.label (Projection.constant kn))) | Const _ | Ind _ | Construct _ | Var _ as c -> Some (Nametab.basename_of_global (global_of_constr c)) | Fix ((_,i),(lna,_,_)) | CoFix (i,(lna,_,_)) -> Some (match lna.(i).binder_name with Name id -> id | _ -> assert false) | Sort _ | Rel _ | Meta _|Evar _|Case _ | Int _ | Float _ | Array _ -> None in hdrec c let lowercase_first_char id = (* First character of a constr *) let s = Id.to_string id in match Unicode.split_at_first_letter s with | None -> (* General case: nat -> n *) Unicode.lowercase_first_char s | Some (s,s') -> if String.length s' = 0 then (* No letter, e.g. __, or __'_, etc. *) default_generated_non_letter_string else s ^ Unicode.lowercase_first_char s' let sort_hdchar = function | SProp -> "P" | Prop -> "P" | Set -> "S" | Type _ -> "T" let hdchar env sigma c = let rec hdrec k c = match EConstr.kind sigma c with | Prod (_,_,c) | Lambda (_,_,c) | LetIn (_,_,_,c) -> hdrec (k+1) c | Cast (c,_,_) | App (c,_) -> hdrec k c | Proj (kn,_) -> lowercase_first_char (Label.to_id (Constant.label (Projection.constant kn))) | Const (kn,_) -> lowercase_first_char (Label.to_id (Constant.label kn)) | Ind (x,_) -> (try lowercase_first_char (Nametab.basename_of_global (GlobRef.IndRef x)) with Not_found when !Flags.in_debugger -> "zz") | Construct (x,_) -> (try lowercase_first_char (Nametab.basename_of_global (GlobRef.ConstructRef x)) with Not_found when !Flags.in_debugger -> "zz") | Var id -> lowercase_first_char id | Sort s -> sort_hdchar (ESorts.kind sigma s) | Rel n -> (if n<=k then "p" (* the initial term is flexible product/function *) else try match let d = lookup_rel (n-k) env in get_name d, get_type d with | Name id, _ -> lowercase_first_char id | Anonymous, t -> hdrec 0 (lift (n-k) t) with Not_found -> "y") | Fix ((_,i),(lna,_,_)) | CoFix (i,(lna,_,_)) -> let id = match lna.(i).binder_name with Name id -> id | _ -> assert false in lowercase_first_char id | Evar _ (* We could do better... *) | Meta _ | Case _ -> "y" | Int _ -> "i" | Float _ -> "f" | Array _ -> "a" in hdrec 0 c let id_of_name_using_hdchar env sigma a = function | Anonymous -> Id.of_string (hdchar env sigma a) | Name id -> id let named_hd env sigma a = function | Anonymous -> Name (Id.of_string (hdchar env sigma a)) | x -> x let mkProd_name env sigma (n,a,b) = mkProd (map_annot (named_hd env sigma a) n, a, b) let mkLambda_name env sigma (n,a,b) = mkLambda (map_annot (named_hd env sigma a) n, a, b) let lambda_name = mkLambda_name let prod_name = mkProd_name let prod_create env sigma (r,a,b) = mkProd (make_annot (named_hd env sigma a Anonymous) r, a, b) let lambda_create env sigma (r,a,b) = mkLambda (make_annot (named_hd env sigma a Anonymous) r, a, b) let name_assumption env sigma = function | LocalAssum (na,t) -> LocalAssum (map_annot (named_hd env sigma t) na, t) | LocalDef (na,c,t) -> LocalDef (map_annot (named_hd env sigma c) na, c, t) let name_context env sigma hyps = snd (List.fold_left (fun (env,hyps) d -> let d' = name_assumption env sigma d in (push_rel d' env, d' :: hyps)) (env,[]) (List.rev hyps)) let mkProd_or_LetIn_name env sigma b d = mkProd_or_LetIn (name_assumption env sigma d) b let mkLambda_or_LetIn_name env sigma b d = mkLambda_or_LetIn (name_assumption env sigma d) b let it_mkProd_or_LetIn_name env sigma b hyps = it_mkProd_or_LetIn b (name_context env sigma hyps) let it_mkLambda_or_LetIn_name env sigma b hyps = it_mkLambda_or_LetIn b (name_context env sigma hyps) (**********************************************************************) (* Fresh names *) (* Introduce a mode where auto-generated names are mangled to test dependence of scripts on auto-generated names. We also supply a version which only adds a prefix. *) let get_mangle_names = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Mangle";"Names"] ~value:false let get_mangle_names_light = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Mangle";"Names";"Light"] ~value:false let mangle_names_prefix = Goptions.declare_interpreted_string_option_and_ref ~depr:false ~key:["Mangle";"Names";"Prefix"] ~value:("_") (fun x -> Id.to_string (try Id.of_string x with | CErrors.UserError _ -> CErrors.user_err Pp.(str ("Not a valid identifier: \"" ^ x ^ "\".")) ) ) (fun x -> x) (** The name "foo" becomes "_0" if we get_mangle_names and "_foo" if get_mangle_names_light is also set. Otherwise it is left alone. *) let mangle_id id = let prfx = mangle_names_prefix () in if get_mangle_names () then if get_mangle_names_light () then Id.of_string (prfx ^ Id.to_string id) else Id.of_string (prfx ^ "0") else id (* Looks for next "good" name by lifting subscript *) let next_ident_away_from_post_mangling id bad = let rec name_rec id = if bad id then name_rec (increment_subscript id) else id in name_rec id let next_ident_away_from id bad = let id = mangle_id id in next_ident_away_from_post_mangling id bad (* Restart subscript from x0 if name starts with xN, or x00 if name starts with x0N, etc *) let restart_subscript id = if not (has_subscript id) then id else (* It would probably be better with something in the spirit of *** make_ident id (Some 0) *** but compatibility would be lost... *) forget_subscript id let visible_ids sigma (nenv, c) = let accu = ref (GlobRef.Set_env.empty, Int.Set.empty, Id.Set.empty) in let rec visible_ids n c = match EConstr.kind sigma c with | Const _ | Ind _ | Construct _ | Var _ as c -> let (gseen, vseen, ids) = !accu in let g = global_of_constr c in if not (GlobRef.Set_env.mem g gseen) then let gseen = GlobRef.Set_env.add g gseen in let ids = match Nametab.shortest_qualid_of_global Id.Set.empty g with | short -> let dir, id = repr_qualid short in if DirPath.is_empty dir then Id.Set.add id ids else ids | exception Not_found -> (* This may happen if given pathological terms or when manipulating open modules *) ids in accu := (gseen, vseen, ids) | Rel p -> let (gseen, vseen, ids) = !accu in if p > n && not (Int.Set.mem (p - n) vseen) then let vseen = Int.Set.add (p - n) vseen in let name = try Some (List.nth nenv (p - n - 1)) with Invalid_argument _ | Failure _ -> (* Unbound index: may happen in debug and actually also while computing temporary implicit arguments of an inductive type *) None in let ids = match name with | Some (Name id) -> Id.Set.add id ids | _ -> ids in accu := (gseen, vseen, ids) | _ -> EConstr.iter_with_binders sigma succ visible_ids n c in let () = visible_ids 1 c in (* n = 1 to count the binder to rename *) let (_, _, ids) = !accu in ids (* Now, there are different renaming strategies... *) (* 1- Looks for a fresh name for printing in cases pattern *) let next_name_away_in_cases_pattern sigma env_t na avoid = let id = match na with Name id -> id | Anonymous -> default_dependent_ident in let visible = visible_ids sigma env_t in let bad id = Id.Set.mem id avoid || is_constructor id || Id.Set.mem id visible in next_ident_away_from id bad (* 2- Looks for a fresh name for introduction in goal *) (* The legacy strategy for renaming introduction variables is not very uniform: - if the name to use is fresh in the context but used as a global name, then a fresh name is taken by finding a free subscript starting from the current subscript; - but if the name to use is not fresh in the current context, the fresh name is taken by finding a free subscript starting from 0 *) let next_ident_away_in_goal env id avoid = let id = if Id.Set.mem id avoid then restart_subscript id else id in let bad id = Id.Set.mem id avoid || (is_global id && not (is_section_variable env id)) in next_ident_away_from id bad let next_name_away_in_goal env na avoid = let id = match na with | Name id -> id | Anonymous -> default_non_dependent_ident in next_ident_away_in_goal env id avoid (* 3- Looks for next fresh name outside a list that is moreover valid as a global identifier; the legacy algorithm is that if the name is already used in the list, one looks for a name of same base with lower available subscript; if the name is not in the list but is used globally, one looks for a name of same base with lower subscript beyond the current subscript *) let next_global_ident_away id avoid = let id = if Id.Set.mem id avoid then restart_subscript id else id in let bad id = Id.Set.mem id avoid || Global.exists_objlabel (Label.of_id id) in next_ident_away_from id bad (* 4- Looks for next fresh name outside a list; if name already used, looks for same name with lower available subscript *) let next_ident_away id avoid = let id = mangle_id id in if Id.Set.mem id avoid then next_ident_away_from_post_mangling (restart_subscript id) (fun id -> Id.Set.mem id avoid) else id let next_name_away_with_default default na avoid = let id = match na with Name id -> id | Anonymous -> Id.of_string default in next_ident_away id avoid let reserved_type_name = ref (fun t -> Anonymous) let set_reserved_typed_name f = reserved_type_name := f let next_name_away_with_default_using_types default na avoid t = let id = match na with | Name id -> id | Anonymous -> match !reserved_type_name t with | Name id -> id | Anonymous -> Id.of_string default in next_ident_away id avoid let next_name_away = next_name_away_with_default default_non_dependent_string let make_all_name_different env sigma = (* FIXME: this is inefficient, but only used in printing *) let avoid = ref (ids_of_named_context_val (named_context_val env)) in let sign = named_context_val env in let rels = rel_context env in let env0 = reset_with_named_context sign env in Context.Rel.fold_outside (fun decl newenv -> let na = named_hd newenv sigma (RelDecl.get_type decl) (RelDecl.get_name decl) in let id = next_name_away na !avoid in avoid := Id.Set.add id !avoid; push_rel (RelDecl.set_name (Name id) decl) newenv) rels ~init:env0 (* 5- Looks for next fresh name outside a list; avoids also to use names that would clash with short name of global references; if name is already used, looks for name of same base with lower available subscript beyond current subscript *) let next_ident_away_for_default_printing sigma env_t id avoid = let visible = visible_ids sigma env_t in let bad id = Id.Set.mem id avoid || Id.Set.mem id visible in next_ident_away_from id bad let next_name_away_for_default_printing sigma env_t na avoid = let id = match na with | Name id -> id | Anonymous -> (* In principle, an anonymous name is not dependent and will not be *) (* taken into account by the function compute_displayed_name_in; *) (* just in case, invent a valid name *) default_non_dependent_ident in next_ident_away_for_default_printing sigma env_t id avoid (**********************************************************************) (* Displaying terms avoiding bound variables clashes *) (* Renaming strategy introduced in December 1998: - Rule number 1: all names, even if unbound and not displayed, contribute to the list of names to avoid - Rule number 2: only the dependency status is used for deciding if a name is displayed or not Example: bool_ind: "forall (P:bool->Prop)(f:(P true))(f:(P false))(b:bool), P b" is displayed "forall P:bool->Prop, P true -> P false -> forall b:bool, P b" but f and f0 contribute to the list of variables to avoid (knowing that f and f0 are how the f's would be named if introduced, assuming no other f and f0 are already used). *) type renaming_flags = (* The term is the body of a binder and the environment excludes this binder *) (* so, there is a missing binder in the environment *) | RenamingForCasesPattern of (Name.t list * constr) | RenamingForGoal | RenamingElsewhereFor of (Name.t list * constr) let next_name_for_display env sigma flags = match flags with | RenamingForCasesPattern env_t -> next_name_away_in_cases_pattern sigma env_t | RenamingForGoal -> next_name_away_in_goal env | RenamingElsewhereFor env_t -> next_name_away_for_default_printing sigma env_t (* Remark: Anonymous var may be dependent in Evar's contexts *) let compute_displayed_name_in_gen_poly noccurn_fun env sigma flags avoid na c = match na with | Anonymous when noccurn_fun sigma 1 c -> (Anonymous,avoid) | _ -> let fresh_id = next_name_for_display env sigma flags na avoid in let idopt = if noccurn_fun sigma 1 c then Anonymous else Name fresh_id in (idopt, Id.Set.add fresh_id avoid) let compute_displayed_name_in = compute_displayed_name_in_gen_poly noccurn let compute_displayed_name_in_gen f env sigma = (* only flag which does not need a constr, maybe to be refined *) let flag = RenamingForGoal in compute_displayed_name_in_gen_poly f env sigma flag let compute_and_force_displayed_name_in env sigma flags avoid na c = match na with | Anonymous when noccurn sigma 1 c -> (Anonymous,avoid) | _ -> let fresh_id = next_name_for_display env sigma flags na avoid in (Name fresh_id, Id.Set.add fresh_id avoid) let compute_displayed_let_name_in env sigma flags avoid na c = let fresh_id = next_name_for_display env sigma flags na avoid in (Name fresh_id, Id.Set.add fresh_id avoid) let rename_bound_vars_as_displayed env sigma avoid tenv c = let rec rename avoid tenv c = match EConstr.kind sigma c with | Prod (na,c1,c2) -> let na',avoid' = compute_displayed_name_in env sigma (RenamingElsewhereFor (tenv,c2)) avoid na.binder_name c2 in mkProd ({na with binder_name=na'}, c1, rename avoid' (na' :: tenv) c2) | LetIn (na,c1,t,c2) -> let na',avoid' = compute_displayed_let_name_in env sigma (RenamingElsewhereFor (tenv,c2)) avoid na.binder_name c2 in mkLetIn ({na with binder_name=na'},c1,t, rename avoid' (na' :: tenv) c2) | Cast (c,k,t) -> mkCast (rename avoid tenv c, k,t) | _ -> c in rename avoid tenv c coq-8.15.0/engine/namegen.mli000066400000000000000000000132731417001151100157460ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* intro_pattern_naming_expr -> bool (********************************************************************* Conventional default names *) val default_prop_ident : Id.t (* "H" *) val default_small_ident : Id.t (* "H" *) val default_type_ident : Id.t (* "X" *) val default_non_dependent_ident : Id.t (* "H" *) val default_dependent_ident : Id.t (* "x" *) (********************************************************************* Generating "intuitive" names from their type *) val lowercase_first_char : Id.t -> string val sort_hdchar : Sorts.t -> string val hdchar : env -> evar_map -> types -> string val id_of_name_using_hdchar : env -> evar_map -> types -> Name.t -> Id.t val named_hd : env -> evar_map -> types -> Name.t -> Name.t val head_name : evar_map -> types -> Id.t option val mkProd_name : env -> evar_map -> Name.t Context.binder_annot * types * types -> types val mkLambda_name : env -> evar_map -> Name.t Context.binder_annot * types * constr -> constr (** Deprecated synonyms of [mkProd_name] and [mkLambda_name] *) val prod_name : env -> evar_map -> Name.t Context.binder_annot * types * types -> types val lambda_name : env -> evar_map -> Name.t Context.binder_annot * types * constr -> constr val prod_create : env -> evar_map -> Sorts.relevance * types * types -> constr val lambda_create : env -> evar_map -> Sorts.relevance * types * constr -> constr val name_assumption : env -> evar_map -> rel_declaration -> rel_declaration val name_context : env -> evar_map -> rel_context -> rel_context val mkProd_or_LetIn_name : env -> evar_map -> types -> rel_declaration -> types val mkLambda_or_LetIn_name : env -> evar_map -> constr -> rel_declaration -> constr val it_mkProd_or_LetIn_name : env -> evar_map -> types -> rel_context -> types val it_mkLambda_or_LetIn_name : env -> evar_map -> constr -> rel_context -> constr (********************************************************************* Fresh names *) (** Avoid clashing with a name satisfying some predicate *) val next_ident_away_from : Id.t -> (Id.t -> bool) -> Id.t (** [next_ident_away original_id unwanted_ids] returns a new identifier as close as possible to the [original_id] while avoiding all [unwanted_ids]. In particular: {ul {- if [original_id] does not appear in the list of [unwanted_ids], then [original_id] is returned.} {- if [original_id] appears in the list of [unwanted_ids], then this function returns a new id that: {ul {- has the same {i root} as the [original_id],} {- does not occur in the list of [unwanted_ids],} {- has the smallest possible {i subscript}.}}}} where by {i subscript} of some identifier we mean last part of it that is composed only from (decimal) digits and by {i root} of some identifier we mean the whole identifier except for the {i subscript}. E.g. if we take [foo42], then [42] is the {i subscript}, and [foo] is the root. *) val next_ident_away : Id.t -> Id.Set.t -> Id.t (** Avoid clashing with a name already used in current module *) val next_ident_away_in_goal : Environ.env -> Id.t -> Id.Set.t -> Id.t (** Avoid clashing with a name already used in current module but tolerate overwriting section variables, as in goals *) val next_global_ident_away : Id.t -> Id.Set.t -> Id.t (** Default is [default_non_dependent_ident] *) val next_name_away : Name.t -> Id.Set.t -> Id.t val next_name_away_with_default : string -> Name.t -> Id.Set.t -> Id.t val next_name_away_with_default_using_types : string -> Name.t -> Id.Set.t -> types -> Id.t val set_reserved_typed_name : (types -> Name.t) -> unit (********************************************************************* Making name distinct for displaying *) type renaming_flags = | RenamingForCasesPattern of (Name.t list * constr) (** avoid only global constructors *) | RenamingForGoal (** avoid all globals (as in intro) *) | RenamingElsewhereFor of (Name.t list * constr) val make_all_name_different : env -> evar_map -> env val compute_displayed_name_in : Environ.env -> evar_map -> renaming_flags -> Id.Set.t -> Name.t -> constr -> Name.t * Id.Set.t val compute_and_force_displayed_name_in : Environ.env -> evar_map -> renaming_flags -> Id.Set.t -> Name.t -> constr -> Name.t * Id.Set.t val compute_displayed_let_name_in : Environ.env -> evar_map -> renaming_flags -> Id.Set.t -> Name.t -> 'a -> Name.t * Id.Set.t val rename_bound_vars_as_displayed : Environ.env -> evar_map -> Id.Set.t -> Name.t list -> types -> types (* Generic function expecting a "not occurn" function *) val compute_displayed_name_in_gen : (evar_map -> int -> 'a -> bool) -> Environ.env -> evar_map -> Id.Set.t -> Name.t -> 'a -> Name.t * Id.Set.t coq-8.15.0/engine/nameops.ml000066400000000000000000000176301417001151100156260ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* [0] *) { ss_zero = 1; ss_subs = 0 } else (* [0...00] -> [0..01] *) { ss_zero = s.ss_zero - 1; ss_subs = 1 } else if overflow s.ss_subs then if Int.equal s.ss_zero 0 then (* [9...9] -> [10...0] *) { ss_zero = 0; ss_subs = 1 + s.ss_subs } else (* [0...009...9] -> [0...010...0] *) { ss_zero = s.ss_zero - 1; ss_subs = 1 + s.ss_subs } else (* [0...0n] -> [0...0{n+1}] *) { ss_zero = s.ss_zero; ss_subs = s.ss_subs + 1 } let equal s1 s2 = Int.equal s1.ss_zero s2.ss_zero && Int.equal s1.ss_subs s2.ss_subs let compare s1 s2 = (* Lexicographic order is reversed in order to ensure that [succ] is strictly increasing. *) let c = Int.compare s1.ss_subs s2.ss_subs in if Int.equal c 0 then Int.compare s1.ss_zero s2.ss_zero else c end let code_of_0 = Char.code '0' let code_of_9 = Char.code '9' let cut_ident skip_quote s = let s = Id.to_string s in let slen = String.length s in (* [n'] is the position of the first non nullary digit *) let rec numpart n n' = if Int.equal n 0 then (* ident made of _ and digits only [and ' if skip_quote]: don't cut it *) slen else let c = Char.code (String.get s (n-1)) in if Int.equal c code_of_0 && not (Int.equal n slen) then numpart (n-1) n' else if code_of_0 <= c && c <= code_of_9 then numpart (n-1) (n-1) else if skip_quote && (Int.equal c (Char.code '\'') || Int.equal c (Char.code '_')) then numpart (n-1) (n-1) else n' in numpart slen slen let repr_ident s = let numstart = cut_ident false s in let s = Id.to_string s in let slen = String.length s in if Int.equal numstart slen then (s, None) else (String.sub s 0 numstart, Some (int_of_string (String.sub s numstart (slen - numstart)))) let make_ident sa = function | Some n -> let c = Char.code (String.get sa (String.length sa -1)) in let s = if c < code_of_0 || c > code_of_9 then sa ^ (string_of_int n) else sa ^ "_" ^ (string_of_int n) in Id.of_string s | None -> Id.of_string sa let root_of_id id = let suffixstart = cut_ident true id in Id.of_string (String.sub (Id.to_string id) 0 suffixstart) (* Return the same identifier as the original one but whose {i subscript} is incremented. If the original identifier does not have a suffix, [0] is appended to it. Example mappings: [bar] ↦ [bar0] [bar0] ↦ [bar1] [bar00] ↦ [bar01] [bar1] ↦ [bar2] [bar01] ↦ [bar02] [bar9] ↦ [bar10] [bar09] ↦ [bar10] [bar99] ↦ [bar100] *) let increment_subscript id = let id = Id.to_string id in let len = String.length id in let rec add carrypos = let c = id.[carrypos] in if is_digit c then if Int.equal (Char.code c) (Char.code '9') then begin assert (carrypos>0); add (carrypos-1) end else begin let newid = Bytes.of_string id in Bytes.fill newid (carrypos+1) (len-1-carrypos) '0'; Bytes.set newid carrypos (Char.chr (Char.code c + 1)); newid end else begin let newid = Bytes.of_string (id^"0") in if carrypos < len-1 then begin Bytes.fill newid (carrypos+1) (len-1-carrypos) '0'; Bytes.set newid (carrypos+1) '1' end; newid end in Id.of_bytes (add (len-1)) let has_subscript id = let id = Id.to_string id in is_digit (id.[String.length id - 1]) let get_subscript id = let id0 = id in let id = Id.to_string id in let len = String.length id in let rec get_suf accu pos = if pos < 0 then (pos, accu) else let c = id.[pos] in if is_digit c then get_suf (Char.code c - Char.code '0' :: accu) (pos - 1) else (pos, accu) in let (pos, suf) = get_suf [] (len - 1) in if Int.equal pos (len - 1) then (id0, Subscript.zero) else let id = String.sub id 0 (pos + 1) in let rec compute_zeros accu = function | [] -> (accu, []) | 0 :: l -> compute_zeros (succ accu) l | _ :: _ as l -> (accu, l) in let (ss_zero, suf) = compute_zeros 0 suf in let rec compute_suf accu = function | [] -> accu | n :: l -> compute_suf (10 * accu + n) l in let ss_subs = compute_suf 0 suf in (Id.of_string id, { Subscript.ss_subs; ss_zero; }) let add_subscript id ss = if Subscript.equal Subscript.zero ss then id else if Int.equal ss.Subscript.ss_subs 0 then let id = Id.to_string id in let pad = String.make ss.Subscript.ss_zero '0' in Id.of_string (Printf.sprintf "%s%s" id pad) else let id = Id.to_string id in let pad = String.make ss.Subscript.ss_zero '0' in let suf = ss.Subscript.ss_subs in Id.of_string (Printf.sprintf "%s%s%i" id pad suf) let forget_subscript id = let numstart = cut_ident false id in let newid = Bytes.make (numstart+1) '0' in String.blit (Id.to_string id) 0 newid 0 numstart; (Id.of_bytes newid) let add_suffix id s = Id.of_string (Id.to_string id ^ s) let add_prefix s id = Id.of_string (s ^ Id.to_string id) let atompart_of_id id = fst (repr_ident id) (* Names *) module type ExtName = sig include module type of struct include Names.Name end exception IsAnonymous val fold_left : ('a -> Id.t -> 'a) -> 'a -> t -> 'a val fold_right : (Id.t -> 'a -> 'a) -> t -> 'a -> 'a val iter : (Id.t -> unit) -> t -> unit val map : (Id.t -> Id.t) -> t -> t val fold_left_map : ('a -> Id.t -> 'a * Id.t) -> 'a -> t -> 'a * t val fold_right_map : (Id.t -> 'a -> Id.t * 'a) -> Name.t -> 'a -> Name.t * 'a val get_id : t -> Id.t val pick : t -> t -> t val pick_annot : t Context.binder_annot -> t Context.binder_annot -> t Context.binder_annot val cons : t -> Id.t list -> Id.t list val to_option : Name.t -> Id.t option end module Name : ExtName = struct include Names.Name exception IsAnonymous let fold_left f a = function | Name id -> f a id | Anonymous -> a let fold_right f na a = match na with | Name id -> f id a | Anonymous -> a let iter f na = fold_right (fun x () -> f x) na () let map f = function | Name id -> Name (f id) | Anonymous -> Anonymous let fold_left_map f a = function | Name id -> let (a, id) = f a id in (a, Name id) | Anonymous -> a, Anonymous let fold_right_map f na a = match na with | Name id -> let (id, a) = f id a in (Name id, a) | Anonymous -> Anonymous, a let get_id = function | Name id -> id | Anonymous -> raise IsAnonymous let pick na1 na2 = match na1 with | Name _ -> na1 | Anonymous -> na2 let pick_annot na1 na2 = let open Context in match na1.binder_name with | Name _ -> na1 | Anonymous -> na2 let cons na l = match na with | Anonymous -> l | Name id -> id::l let to_option = function | Anonymous -> None | Name id -> Some id end (* Metavariables *) let pr_meta = Pp.int let string_of_meta = string_of_int coq-8.15.0/engine/nameops.mli000066400000000000000000000102331417001151100157670ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int option -> Id.t val repr_ident : Id.t -> string * int option val atompart_of_id : Id.t -> string (** remove trailing digits *) val root_of_id : Id.t -> Id.t (** remove trailing digits, ' and _ *) val add_suffix : Id.t -> string -> Id.t val add_prefix : string -> Id.t -> Id.t (** Below, by {i subscript} we mean a suffix composed solely from (decimal) digits. *) module Subscript : sig type t (** Abstract datatype of subscripts. Isomorphic to a string of digits. *) val zero : t (** Empty subscript *) val succ : t -> t (** Guarantees that [x < succ x], but [succ x] might not be the smallest element strictly above [x], generally it does not exist. Example mappings: "" ↦ "0" "0" ↦ "1" "00" ↦ "01" "1" ↦ "2" "01" ↦ "02" "9" ↦ "10" "09" ↦ "10" "99" ↦ "100" *) val compare : t -> t -> int (** Well-founded order. *) val equal : t -> t -> bool end val has_subscript : Id.t -> bool val get_subscript : Id.t -> Id.t * Subscript.t (** Split an identifier into a base name and a subscript. *) val add_subscript : Id.t -> Subscript.t -> Id.t (** Append the subscript to the identifier. *) val increment_subscript : Id.t -> Id.t (** Return the same identifier as the original one but whose {i subscript} is incremented. If the original identifier does not have a suffix, [0] is appended to it. Example mappings: [bar] ↦ [bar0] [bar0] ↦ [bar1] [bar00] ↦ [bar01] [bar1] ↦ [bar2] [bar01] ↦ [bar01] [bar9] ↦ [bar10] [bar09] ↦ [bar10] [bar99] ↦ [bar100] *) val forget_subscript : Id.t -> Id.t module Name : sig include module type of struct include Names.Name end exception IsAnonymous val fold_left : ('a -> Id.t -> 'a) -> 'a -> Name.t -> 'a (** [fold_left f na a] is [f id a] if [na] is [Name id], and [a] otherwise. *) val fold_right : (Id.t -> 'a -> 'a) -> Name.t -> 'a -> 'a (** [fold_right f a na] is [f a id] if [na] is [Name id], and [a] otherwise. *) val iter : (Id.t -> unit) -> Name.t -> unit (** [iter f na] does [f id] if [na] equals [Name id], nothing otherwise. *) val map : (Id.t -> Id.t) -> Name.t -> t (** [map f na] is [Anonymous] if [na] is [Anonymous] and [Name (f id)] if [na] is [Name id]. *) val fold_left_map : ('a -> Id.t -> 'a * Id.t) -> 'a -> Name.t -> 'a * Name.t (** [fold_left_map f a na] is [a',Name id'] when [na] is [Name id] and [f a id] is [(a',id')]. It is [a,Anonymous] otherwise. *) val fold_right_map : (Id.t -> 'a -> Id.t * 'a) -> Name.t -> 'a -> Name.t * 'a (** [fold_right_map f na a] is [Name id',a'] when [na] is [Name id] and [f id a] is [(id',a')]. It is [Anonymous,a] otherwise. *) val get_id : Name.t -> Id.t (** [get_id] associates [id] to [Name id]. @raise IsAnonymous otherwise. *) val pick : Name.t -> Name.t -> Name.t (** [pick na na'] returns [Anonymous] if both names are [Anonymous]. Pick one of [na] or [na'] otherwise. *) val pick_annot : Name.t Context.binder_annot -> Name.t Context.binder_annot -> Name.t Context.binder_annot val cons : Name.t -> Id.t list -> Id.t list (** [cons na l] returns [id::l] if [na] is [Name id] and [l] otherwise. *) val to_option : Name.t -> Id.t option (** [to_option Anonymous] is [None] and [to_option (Name id)] is [Some id] *) end (** Metavariables *) val pr_meta : Constr.metavariable -> Pp.t val string_of_meta : Constr.metavariable -> string coq-8.15.0/engine/proofview.ml000066400000000000000000001262471417001151100162110ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* i+1) solution 0 in let new_el = List.map (fun (t,ty) -> nf t, nf ty) el in let pruned_solution = Evd.drop_all_defined solution in let apply_subst_einfo _ ei = Evd.({ ei with evar_concl = nf ei.evar_concl; evar_hyps = Environ.map_named_val (fun d -> map_constr nf0 d) ei.evar_hyps; evar_candidates = Option.map (List.map nf) ei.evar_candidates }) in let new_solution = Evd.raw_map_undefined apply_subst_einfo pruned_solution in let new_size = Evd.fold (fun _ _ i -> i+1) new_solution 0 in Feedback.msg_info (Pp.str (Printf.sprintf "Evars: %d -> %d\n" size new_size)); new_el, { pv with solution = new_solution; } (** {6 Starting and querying a proof view} *) type telescope = | TNil of Evd.evar_map | TCons of Environ.env * Evd.evar_map * EConstr.types * (Evd.evar_map -> EConstr.constr -> telescope) let map_telescope_evd f = function | TNil sigma -> TNil (f sigma) | TCons (env,sigma,ty,g) -> TCons(env,(f sigma),ty,g) let dependent_init = (* Goals don't have a source location. *) let src = Loc.tag @@ Evar_kinds.GoalEvar in (* Main routine *) let rec aux = function | TNil sigma -> [], { solution = sigma; comb = [] } | TCons (env, sigma, typ, t) -> let (sigma, econstr) = Evarutil.new_evar env sigma ~src ~typeclass_candidate:false typ in let (gl, _) = EConstr.destEvar sigma econstr in let ret, { solution = sol; comb = comb } = aux (t sigma econstr) in let entry = (econstr, typ) :: ret in entry, { solution = sol; comb = with_empty_state gl :: comb } in fun t -> let t = map_telescope_evd Evd.push_future_goals t in let entry, v = aux t in (* The created goal are not to be shelved. *) let _goals, solution = Evd.pop_future_goals v.solution in entry, { v with solution } let init = let rec aux sigma = function | [] -> TNil sigma | (env,g)::l -> TCons (env,sigma,g,(fun sigma _ -> aux sigma l)) in fun sigma l -> dependent_init (aux sigma l) let initial_goals initial = initial let finished = function | {comb = []} -> true | _ -> false let return { solution=defs } = defs let return_constr { solution = defs } c = Evarutil.nf_evar defs c let partial_proof entry pv = CList.map (return_constr pv) (CList.map fst entry) (** {6 Focusing commands} *) (** A [focus_context] represents the part of the proof view which has been removed by a focusing action, it can be used to unfocus later on. *) (* First component is a reverse list of the goals which come before and second component is the list of the goals which go after (in the expected order). *) type focus_context = goal_with_state list * goal_with_state list (** Returns a stylised view of a focus_context for use by, for instance, ide-s. *) (* spiwack: the type of [focus_context] will change as we push more refined functions to ide-s. This would be better than spawning a new nearly identical function every time. Hence the generic name. *) (* In this version: the goals in the context, as a "zipper" (the first list is in reversed order). *) let focus_context (left,right) = (List.map drop_state left, List.map drop_state right) (** This (internal) function extracts a sublist between two indices, and returns this sublist together with its context: if it returns [(a,(b,c))] then [a] is the sublist and [(rev b) @ a @ c] is the original list. The focused list has length [j-i-1] and contains the goals from number [i] to number [j] (both included) the first goal of the list being numbered [1]. [focus_sublist i j l] raises [IndexOutOfRange] if [i > length l], or [j > length l] or [j < i]. *) let focus_sublist i j l = let (left,sub_right) = CList.goto (i-1) l in let (sub, right) = try CList.chop (j-i+1) sub_right with Failure _ -> raise CList.IndexOutOfRange in (sub, (left,right)) (** Inverse operation to the previous one. *) let unfocus_sublist (left,right) s = CList.rev_append left (s@right) (** [focus i j] focuses a proofview on the goals from index [i] to index [j] (inclusive, goals are indexed from [1]). I.e. goals number [i] to [j] become the only focused goals of the returned proofview. It returns the focused proofview, and a context for the focus stack. *) let focus i j sp = let (new_comb, (left, right)) = focus_sublist i j sp.comb in ( { sp with comb = new_comb } , (left, right) ) let cleared_alias evd g = let evk = drop_state g in let state = get_state g in Option.map (fun g -> goal_with_state g state) (Evarutil.advance evd evk) (** [undefined defs l] is the list of goals in [l] which are still unsolved (after advancing cleared goals). Note that order matters. *) let undefined_evars defs l = List.fold_right (fun evk l -> match Evarutil.advance defs evk with | Some evk -> List.add_set Evar.equal evk l | None -> l) l [] let goal_with_state_equal x y = Evar.equal (drop_state x) (drop_state y) let undefined defs l = List.fold_right (fun evk l -> match cleared_alias defs evk with | Some evk -> List.add_set goal_with_state_equal evk l | None -> l) l [] (** Unfocuses a proofview with respect to a context. *) let unfocus (left, right) sp = { sp with comb = undefined sp.solution (unfocus_sublist (left, right) sp.comb) } let with_empty_state = Proofview_monad.with_empty_state let drop_state = Proofview_monad.drop_state let goal_with_state = Proofview_monad.goal_with_state (** {6 The tactic monad} *) (** - Tactics are objects which apply a transformation to all the subgoals of the current view at the same time. By opposition to the old vision of applying it to a single goal. It allows tactics such as [shelve_unifiable], tactics to reorder the focused goals, or global automation tactic for dependent subgoals (instantiating an evar has influences on the other goals of the proof in progress, not being able to take that into account causes the current eauto tactic to fail on some instances where it could succeed). Another benefit is that it is possible to write tactics that can be executed even if there are no focused goals. - Tactics form a monad ['a tactic], in a sense a tactic can be seen as a function (without argument) which returns a value of type 'a and modifies the environment (in our case: the view). Tactics of course have arguments, but these are given at the meta-level as OCaml functions. Most tactics in the sense we are used to return [()], that is no really interesting values. But some might pass information around. The tactics seen in Coq's Ltac are (for now at least) only [unit tactic], the return values are kept for the OCaml toolkit. The operation or the monad are [Proofview.tclUNIT] (which is the "return" of the tactic monad) [Proofview.tclBIND] (which is the "bind") and [Proofview.tclTHEN] (which is a specialized bind on unit-returning tactics). - Tactics have support for full-backtracking. Tactics can be seen having multiple success: if after returning the first success a failure is encountered, the tactic can backtrack and use a second success if available. The state is backtracked to its previous value, except the non-logical state defined in the {!NonLogical} module below. *) (* spiwack: as far as I'm aware this doesn't really relate to F. Kirchner and C. Muñoz. *) module Proof = Logical (** type of tactics: tactics can - access the environment, - report unsafe status, shelved goals and given up goals - access and change the current [proofview] - backtrack on previous changes of the proofview *) type +'a tactic = 'a Proof.t (** Applies a tactic to the current proofview. *) let apply ~name ~poly env t sp = let open Logic_monad in let ans = Proof.repr (Proof.run t P.{trace=false; name; poly} (sp,env)) in let ans = Logic_monad.NonLogical.run ans in match ans with | Nil (e, info) -> Exninfo.iraise (TacticFailure e, info) | Cons ((r, (state, _), status, info), _) -> r, state, status, Trace.to_tree info (** {7 Monadic primitives} *) (** Unit of the tactic monad. *) let tclUNIT = Proof.return (** Bind operation of the tactic monad. *) let tclBIND = Proof.(>>=) (** Interprets the ";" (semicolon) of Ltac. As a monadic operation, it's a specialized "bind". *) let tclTHEN = Proof.(>>) (** [tclIGNORE t] has the same operational content as [t], but drops the returned value. *) let tclIGNORE = Proof.ignore module Monad = Proof (** {7 Failure and backtracking} *) (** [tclZERO e] fails with exception [e]. It has no success. *) let tclZERO ?(info=Exninfo.null) e = if not (CErrors.noncritical e) then CErrors.anomaly (Pp.str "tclZERO receiving critical error: " ++ CErrors.print e); Proof.zero (e, info) (** [tclOR t1 t2] behaves like [t1] as long as [t1] succeeds. Whenever the successes of [t1] have been depleted and it failed with [e], then it behaves as [t2 e]. In other words, [tclOR] inserts a backtracking point. *) let tclOR = Proof.plus (** [tclORELSE t1 t2] is equal to [t1] if [t1] has at least one success or [t2 e] if [t1] fails with [e]. It is analogous to [try/with] handler of exception in that it is not a backtracking point. *) let tclORELSE t1 t2 = let open Logic_monad in let open Proof in split t1 >>= function | Nil e -> t2 e | Cons (a,t1') -> plus (return a) t1' (** [tclIFCATCH a s f] is a generalisation of {!tclORELSE}: if [a] succeeds at least once then it behaves as [tclBIND a s] otherwise, if [a] fails with [e], then it behaves as [f e]. *) let tclIFCATCH a s f = let open Logic_monad in let open Proof in split a >>= function | Nil e -> f e | Cons (x,a') -> plus (s x) (fun e -> (a' e) >>= fun x' -> (s x')) (** [tclONCE t] behave like [t] except it has at most one success: [tclONCE t] stops after the first success of [t]. If [t] fails with [e], [tclONCE t] also fails with [e]. *) let tclONCE = Proof.once exception MoreThanOneSuccess let _ = CErrors.register_handler begin function | MoreThanOneSuccess -> Some (Pp.str "This tactic has more than one success.") | _ -> None end (** [tclEXACTLY_ONCE e t] succeeds as [t] if [t] has exactly one success. Otherwise it fails. The tactic [t] is run until its first success, then a failure with exception [e] is simulated. It [t] yields another success, then [tclEXACTLY_ONCE e t] fails with [MoreThanOneSuccess] (it is a user error). Otherwise, [tclEXACTLY_ONCE e t] succeeds with the first success of [t]. Notice that the choice of [e] is relevant, as the presence of further successes may depend on [e] (see {!tclOR}). *) let tclEXACTLY_ONCE e t = let open Logic_monad in let open Proof in split t >>= function | Nil (e, info) -> tclZERO ~info e | Cons (x,k) -> let info = Exninfo.null in Proof.split (k (e, Exninfo.null)) >>= function | Nil _ -> tclUNIT x | _ -> tclZERO ~info MoreThanOneSuccess (** [tclCASE t] wraps the {!Proofview_monad.Logical.split} primitive. *) type 'a case = | Fail of Exninfo.iexn | Next of 'a * (Exninfo.iexn -> 'a tactic) let tclCASE t = let open Logic_monad in let map = function | Nil e -> Fail e | Cons (x, t) -> Next (x, t) in Proof.map map (Proof.split t) let tclBREAK = Proof.break (** {7 Focusing tactics} *) exception NoSuchGoals of int let _ = CErrors.register_handler begin function | NoSuchGoals n -> Some (str "No such " ++ str (String.plural n "goal") ++ str ".") | _ -> None end (** [tclFOCUS ?nosuchgoal i j t] applies [t] in a context where only the goals numbered [i] to [j] are focused (the rest of the goals is restored at the end of the tactic). If the range [i]-[j] is not valid, then it [tclFOCUS_gen nosuchgoal i j t] is [nosuchgoal]. *) let tclFOCUS ?nosuchgoal i j t = let nosuchgoal ~info = Option.default (tclZERO ~info (NoSuchGoals (j+1-i))) nosuchgoal in let open Proof in Pv.get >>= fun initial -> try let (focused,context) = focus i j initial in Pv.set focused >> t >>= fun result -> Pv.modify (fun next -> unfocus context next) >> return result with CList.IndexOutOfRange as exn -> let _, info = Exninfo.capture exn in nosuchgoal ~info let tclTRYFOCUS i j t = tclFOCUS ~nosuchgoal:(tclUNIT ()) i j t let tclFOCUSLIST ?(nosuchgoal=tclZERO (NoSuchGoals 0)) l t = let open Proof in Comb.get >>= fun comb -> let n = CList.length comb in let ok (i, j) = 1 <= i && i <= j && j <= n in if not (CList.for_all ok l) then nosuchgoal else match l with | [] -> nosuchgoal | (mi, _) :: _ -> (* Get the left-most goal to focus. This goal won't move, and we will then place all the other goals to focus to the right. *) let mi = CList.fold_left (fun m (i, _) -> min m i) mi l in (* [CList.goto] returns a zipper, so that [(rev left) @ sub_right = comb]. *) let left, sub_right = CList.goto (mi-1) comb in let p x _ = CList.exists (fun (i, j) -> i <= x + mi && x + mi <= j) l in let sub, right = CList.partitioni p sub_right in let mj = mi - 1 + CList.length sub in Comb.set (CList.rev_append left (sub @ right)) >> tclFOCUS mi mj t (** Like {!tclFOCUS} but selects a single goal by name. *) let tclFOCUSID ?(nosuchgoal=tclZERO (NoSuchGoals 1)) id t = let open Proof in Pv.get >>= fun initial -> try let ev = Evd.evar_key id initial.solution in try let comb = CList.map drop_state initial.comb in let n = CList.index Evar.equal ev comb in (* goal is already under focus *) let (focused,context) = focus n n initial in Pv.set focused >> t >>= fun result -> Pv.modify (fun next -> unfocus context next) >> return result with Not_found -> (* otherwise, save current focus and work purely on the shelve *) Comb.set [with_empty_state ev] >> t >>= fun result -> Comb.set initial.comb >> return result with Not_found -> nosuchgoal (** {7 Dispatching on goals} *) exception SizeMismatch of int*int let _ = CErrors.register_handler begin function | SizeMismatch (i,j) -> let open Pp in Some ( str"Incorrect number of goals" ++ spc() ++ str"(expected "++int i++str(String.plural i " tactic") ++ str", was given "++ int j++str").") | _ -> None end (** A variant of [Monad.List.iter] where we iter over the focused list of goals. The argument tactic is executed in a focus comprising only of the current goal, a goal which has been solved by side effect is skipped. The generated subgoals are concatenated in order. *) let iter_goal i = let open Proof in Comb.get >>= fun initial -> Proof.List.fold_left begin fun (subgoals as cur) goal -> Solution.get >>= fun step -> match cleared_alias step goal with | None -> return cur | Some goal -> Comb.set [goal] >> i goal >> Proof.map (fun comb -> comb :: subgoals) Comb.get end [] initial >>= fun subgoals -> Solution.get >>= fun evd -> Comb.set CList.(undefined evd (flatten (rev subgoals))) (** List iter but allocates a list of results *) let map_goal i = let rev = List.rev in (* hem... Proof masks List... *) let open Proof in Comb.get >>= fun initial -> Proof.List.fold_left begin fun (acc, subgoals as cur) goal -> Solution.get >>= fun step -> match cleared_alias step goal with | None -> return cur | Some goal -> Comb.set [goal] >> i goal >>= fun res -> Proof.map (fun comb -> comb :: subgoals) Comb.get >>= fun x -> return (res :: acc, x) end ([],[]) initial >>= fun (results_rev, subgoals) -> Solution.get >>= fun evd -> Comb.set CList.(undefined evd (flatten (rev subgoals))) >> return (rev results_rev) (** A variant of [Monad.List.fold_left2] where the first list is the list of focused goals. The argument tactic is executed in a focus comprising only of the current goal, a goal which has been solved by side effect is skipped. The generated subgoals are concatenated in order. *) let fold_left2_goal i s l = let open Proof in Pv.get >>= fun initial -> let err = return () >>= fun () -> (* Delay the computation of list lengths. *) tclZERO (SizeMismatch (CList.length initial.comb,CList.length l)) in Proof.List.fold_left2 err begin fun ((r,subgoals) as cur) goal a -> Solution.get >>= fun step -> match cleared_alias step goal with | None -> return cur | Some goal -> Comb.set [goal] >> i goal a r >>= fun r -> Proof.map (fun comb -> (r, comb :: subgoals)) Comb.get end (s,[]) initial.comb l >>= fun (r,subgoals) -> Solution.get >>= fun evd -> Comb.set CList.(undefined evd (flatten (rev subgoals))) >> return r (** Dispatch tacticals are used to apply a different tactic to each goal under focus. They come in two flavours: [tclDISPATCH] takes a list of [unit tactic]-s and build a [unit tactic]. [tclDISPATCHL] takes a list of ['a tactic] and returns an ['a list tactic]. They both work by applying each of the tactic in a focus restricted to the corresponding goal (starting with the first goal). In the case of [tclDISPATCHL], the tactic returns a list of the same size as the argument list (of tactics), each element being the result of the tactic executed in the corresponding goal. When the length of the tactic list is not the number of goal, raises [SizeMismatch (g,t)] where [g] is the number of available goals, and [t] the number of tactics passed. [tclDISPATCHGEN join tacs] generalises both functions as the successive results of [tacs] are stored in reverse order in a list, and [join] is used to convert the result into the expected form. *) let tclDISPATCHGEN0 join tacs = match tacs with | [] -> begin let open Proof in Comb.get >>= function | [] -> tclUNIT (join []) | comb -> tclZERO (SizeMismatch (CList.length comb,0)) end | [tac] -> begin let open Proof in Pv.get >>= function | { comb=[goal] ; solution } -> begin match cleared_alias solution goal with | None -> tclUNIT (join []) | Some _ -> Proof.map (fun res -> join [res]) tac end | {comb} -> tclZERO (SizeMismatch(CList.length comb,1)) end | _ -> let iter _ t cur = Proof.map (fun y -> y :: cur) t in let ans = fold_left2_goal iter [] tacs in Proof.map join ans let tclDISPATCHGEN join tacs = let branch t = InfoL.tag (Info.DBranch) t in let tacs = CList.map branch tacs in InfoL.tag (Info.Dispatch) (tclDISPATCHGEN0 join tacs) let tclDISPATCH tacs = tclDISPATCHGEN ignore tacs let tclDISPATCHL tacs = tclDISPATCHGEN CList.rev tacs (** [extend_to_list startxs rx endxs l] builds a list [startxs @ [rx,...,rx] @ endxs] of the same length as [l]. Raises [SizeMismatch] if [startxs @ endxs] is already longer than [l]. *) let extend_to_list startxs rx endxs l = (* spiwack: I use [l] essentially as a natural number *) let rec duplicate acc = function | [] -> acc | _::rest -> duplicate (rx::acc) rest in let rec tail to_match rest = match rest, to_match with | [] , _::_ -> raise (SizeMismatch(0,0)) (* placeholder *) | _::rest , _::to_match -> tail to_match rest | _ , [] -> duplicate endxs rest in let rec copy pref rest = match rest,pref with | [] , _::_ -> raise (SizeMismatch(0,0)) (* placeholder *) | _::rest, a::pref -> a::(copy pref rest) | _ , [] -> tail endxs rest in copy startxs l (** [tclEXTEND b r e] is a variant of {!tclDISPATCH}, where the [r] tactic is "repeated" enough time such that every goal has a tactic assigned to it ([b] is the list of tactics applied to the first goals, [e] to the last goals, and [r] is applied to every goal in between). *) let tclEXTEND tacs1 rtac tacs2 = let open Proof in Comb.get >>= fun comb -> try let tacs = extend_to_list tacs1 rtac tacs2 comb in tclDISPATCH tacs with SizeMismatch _ -> tclZERO (SizeMismatch( CList.length comb, (CList.length tacs1)+(CList.length tacs2))) (* spiwack: failure occurs only when the number of goals is too small. Hence we can assume that [rtac] is replicated 0 times for any error message. *) (** [tclEXTEND [] tac []]. *) let tclINDEPENDENT tac = let open Proof in Pv.get >>= fun initial -> match initial.comb with | [] -> tclUNIT () | [_] -> tac | _ -> let tac = InfoL.tag (Info.DBranch) tac in InfoL.tag (Info.Dispatch) (iter_goal (fun _ -> tac)) let tclINDEPENDENTL tac = let open Proof in Pv.get >>= fun initial -> match initial.comb with | [] -> tclUNIT [] | [_] -> tac >>= fun x -> return [x] | _ -> let tac = InfoL.tag (Info.DBranch) tac in InfoL.tag (Info.Dispatch) (map_goal (fun _ -> tac)) (** {7 Goal manipulation} *) (** Shelves all the goals under focus. *) let shelve = let open Proof in Comb.get >>= fun initial -> Comb.set [] >> InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"shelve")) >> let initial = CList.map drop_state initial in Pv.modify (fun pv -> { pv with solution = Evd.shelve pv.solution initial }) let shelve_goals l = let open Proof in Comb.get >>= fun initial -> let comb = CList.filter (fun g -> not (CList.mem (drop_state g) l)) initial in Comb.set comb >> InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"shelve_goals")) >> Pv.modify (fun pv -> { pv with solution = Evd.shelve pv.solution l }) (** [depends_on sigma src tgt] checks whether the goal [src] appears as an existential variable in the definition of the goal [tgt] in [sigma]. *) let depends_on sigma src tgt = let evi = Evd.find sigma tgt in Evar.Set.mem src (Evd.evars_of_filtered_evar_info sigma (Evarutil.nf_evar_info sigma evi)) let unifiable_delayed g l = CList.exists (fun (tgt, lazy evs) -> not (Evar.equal g tgt) && Evar.Set.mem g evs) l let free_evars sigma l = let cache = Evarutil.create_undefined_evars_cache () in let map ev = (* Computes the set of evars appearing in the hypotheses, the conclusion or the body of the evar_info [evi]. Note: since we want to use it on goals, the body is actually supposed to be empty. *) let evi = Evd.find sigma ev in let fevs = lazy (Evarutil.filtered_undefined_evars_of_evar_info ~cache sigma evi) in (ev, fevs) in List.map map l let free_evars_with_state sigma l = let cache = Evarutil.create_undefined_evars_cache () in let map ev = (* Computes the set of evars appearing in the hypotheses, the conclusion or the body of the evar_info [evi]. Note: since we want to use it on goals, the body is actually supposed to be empty. *) let ev = drop_state ev in let evi = Evd.find sigma ev in let fevs = lazy (Evarutil.filtered_undefined_evars_of_evar_info ~cache sigma evi) in (ev, fevs) in List.map map l (** [unifiable sigma g l] checks whether [g] appears in another subgoal of [l]. The list [l] may contain [g], but it does not affect the result. *) let unifiable_delayed_with_state sigma g l = let g = drop_state g in unifiable_delayed g l let unifiable sigma g l = let l = free_evars sigma l in unifiable_delayed g l (** [partition_unifiable sigma l] partitions [l] into a pair [(u,n)] where [u] is composed of the unifiable goals, i.e. the goals on whose definition other goals of [l] depend, and [n] are the non-unifiable goals. *) let partition_unifiable sigma l = let fevs = free_evars_with_state sigma l in CList.partition (fun g -> unifiable_delayed_with_state sigma g fevs) l (** Shelves the unifiable goals under focus, i.e. the goals which appear in other goals under focus (the unfocused goals are not considered). *) let shelve_unifiable_informative = let open Proof in Pv.get >>= fun initial -> let (u,n) = partition_unifiable initial.solution initial.comb in Comb.set n >> InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"shelve_unifiable")) >> let u = CList.map drop_state u in Pv.modify (fun pv -> { pv with solution = Evd.shelve pv.solution u }) >> tclUNIT u let shelve_unifiable = let open Proof in shelve_unifiable_informative >>= fun _ -> tclUNIT () (** [guard_no_unifiable] returns the list of unifiable goals if some goals are unifiable (see {!shelve_unifiable}) in the current focus. *) let guard_no_unifiable = let open Proof in Pv.get >>= fun initial -> let (u,n) = partition_unifiable initial.solution initial.comb in match u with | [] -> tclUNIT None | gls -> let l = CList.map (fun g -> Evd.dependent_evar_ident (drop_state g) initial.solution) gls in let l = CList.map (fun id -> Names.Name id) l in tclUNIT (Some l) (** [unshelve l p] moves all the goals in [l] from the shelf and put them at the end of the focused goals of p, if they are still undefined after [advance] *) let unshelve l p = let solution = Evd.unshelve p.solution l in let l = List.map with_empty_state l in (* advance the goals in case of clear *) let l = undefined p.solution l in { comb = p.comb@l; solution } let filter_shelf f pv = { pv with solution = Evd.filter_shelf f pv.solution } let mark_in_evm ~goal evd evars = let evd = if goal then let mark evd content = let info = Evd.find evd content in let info = { info with Evd.evar_source = match info.Evd.evar_source with (* Two kinds for goal evars: - GoalEvar (morally not dependent) - VarInstance (morally dependent of some name). This is a heuristic for naming these evars. *) | loc, (Evar_kinds.QuestionMark { Evar_kinds.qm_name=Names.Name id} | Evar_kinds.ImplicitArg (_,(_,Some id),_)) -> loc, Evar_kinds.VarInstance id | _, (Evar_kinds.VarInstance _ | Evar_kinds.GoalEvar) as x -> x | loc,_ -> loc,Evar_kinds.GoalEvar } in Evd.add evd content info in CList.fold_left mark evd evars else evd in let tcs = Evd.get_typeclass_evars evd in let evset = Evar.Set.of_list evars in Evd.set_typeclass_evars evd (Evar.Set.diff tcs evset) let with_shelf tac = let open Proof in Pv.get >>= fun pv -> let { solution } = pv in Pv.set { pv with solution = Evd.push_shelf @@ Evd.push_future_goals solution } >> tac >>= fun ans -> Pv.get >>= fun npv -> let { solution = sigma } = npv in let gls, sigma = Evd.pop_shelf sigma in (* The pending future goals are necessarily coming from V82.tactic *) (* and thus considered as to shelve, as in Proof.run_tactic *) let fgl, sigma = Evd.pop_future_goals sigma in (* Ensure we mark and return only unsolved goals *) let gls' = CList.rev_append fgl.Evd.FutureGoals.comb gls in let gls' = undefined_evars sigma gls' in let sigma = mark_in_evm ~goal:false sigma gls' in let npv = { npv with solution = sigma } in Pv.set npv >> tclUNIT (gls', ans) (** [goodmod p m] computes the representative of [p] modulo [m] in the interval [[0,m-1]].*) let goodmod p m = if m = 0 then 0 else let p' = p mod m in (* if [n] is negative [n mod l] is negative of absolute value less than [l], so [(n mod l)+l] is the representative of [n] in the interval [[0,l-1]].*) if p' < 0 then p'+m else p' let cycle n = let open Proof in InfoL.leaf (Info.Tactic (fun _ _ -> Pp.(str"cycle "++int n))) >> Comb.modify begin fun initial -> let l = CList.length initial in let n' = goodmod n l in let (front,rear) = CList.chop n' initial in rear@front end let swap i j = let open Proof in InfoL.leaf (Info.Tactic (fun _ _ -> Pp.(hov 2 (str"swap"++spc()++int i++spc()++int j)))) >> Comb.modify begin fun initial -> let l = CList.length initial in let i = if i>0 then i-1 else i and j = if j>0 then j-1 else j in let i = goodmod i l and j = goodmod j l in CList.map_i begin fun k x -> match k with | k when Int.equal k i -> CList.nth initial j | k when Int.equal k j -> CList.nth initial i | _ -> x end 0 initial end let revgoals = let open Proof in InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"revgoals")) >> Comb.modify CList.rev let numgoals = let open Proof in Comb.get >>= fun comb -> return (CList.length comb) (** {7 Access primitives} *) let tclEVARMAP = Solution.get let tclENV = Env.get (** {7 Put-like primitives} *) let emit_side_effects eff x = { x with solution = Evd.emit_side_effects eff x.solution } let tclEFFECTS eff = let open Proof in return () >>= fun () -> (* The Global.env should be taken at exec time *) Env.set (Global.env ()) >> Pv.modify (fun initial -> emit_side_effects eff initial) let mark_as_unsafe = Status.put false (** Gives up on the goal under focus. Reports an unsafe status. Proofs with given up goals cannot be closed. *) let give_up evs pv = let solution = List.fold_left (fun sigma ev -> Evd.give_up (drop_state ev) sigma) pv.solution evs in { pv with solution } let give_up = let open Proof in Comb.get >>= fun initial -> Comb.set [] >> mark_as_unsafe >> InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"give_up")) >> Pv.modify (give_up initial) (** {7 Control primitives} *) module Progress = struct let eq_constr evd extended_evd = Evarutil.eq_constr_univs_test ~evd ~extended_evd (** equality function on hypothesis contexts *) let eq_named_context_val sigma1 sigma2 ctx1 ctx2 = let c1 = EConstr.named_context_of_val ctx1 and c2 = EConstr.named_context_of_val ctx2 in let eq_named_declaration d1 d2 = match d1, d2 with | LocalAssum (i1,t1), LocalAssum (i2,t2) -> Context.eq_annot Names.Id.equal i1 i2 && eq_constr sigma1 sigma2 t1 t2 | LocalDef (i1,c1,t1), LocalDef (i2,c2,t2) -> Context.eq_annot Names.Id.equal i1 i2 && eq_constr sigma1 sigma2 c1 c2 && eq_constr sigma1 sigma2 t1 t2 | _ -> false in List.equal eq_named_declaration c1 c2 let eq_evar_body sigma1 sigma2 b1 b2 = let open Evd in match b1, b2 with | Evar_empty, Evar_empty -> true | Evar_defined t1, Evar_defined t2 -> eq_constr sigma1 sigma2 t1 t2 | _ -> false let eq_evar_info sigma1 sigma2 ei1 ei2 = let open Evd in eq_constr sigma1 sigma2 ei1.evar_concl ei2.evar_concl && eq_named_context_val sigma1 sigma2 (ei1.evar_hyps) (ei2.evar_hyps) && eq_evar_body sigma1 sigma2 ei1.evar_body ei2.evar_body (** Equality function on goals *) let goal_equal ~evd ~extended_evd evar extended_evar = let evi = Evd.find evd evar in let extended_evi = Evd.find extended_evd extended_evar in eq_evar_info evd extended_evd evi extended_evi end let tclPROGRESS t = let open Proof in Pv.get >>= fun initial -> t >>= fun res -> Pv.get >>= fun final -> (* [*_test] test absence of progress. [quick_test] is approximate whereas [exhaustive_test] is complete. *) let quick_test = initial.solution == final.solution && initial.comb == final.comb in let test = quick_test || (CList.same_length initial.comb final.comb && Util.List.for_all2eq begin fun i f -> Progress.goal_equal ~evd:initial.solution ~extended_evd:final.solution (drop_state i) (drop_state f) end initial.comb final.comb) in if not test then tclUNIT res else let info = Exninfo.reify () in tclZERO ~info (CErrors.UserError Pp.(str "Failed to progress.")) let _ = CErrors.register_handler begin function | Logic_monad.Tac_Timeout -> Some (Pp.str "[Proofview.tclTIMEOUT] Tactic timeout!") | _ -> None end let tclTIMEOUTF n t = let open Proof in (* spiwack: as one of the monad is a continuation passing monad, it doesn't force the computation to be threaded inside the underlying (IO) monad. Hence I force it myself by asking for the evaluation of a dummy value first, lest [timeout] be called when everything has already been computed. *) let t = Proof.lift (Logic_monad.NonLogical.return ()) >> t in Proof.get >>= fun initial -> Proof.current >>= fun envvar -> Proof.lift begin let open Logic_monad.NonLogical in timeout n (Proof.repr (Proof.run t envvar initial)) >>= fun r -> match r with | None -> return (Util.Inr (Logic_monad.Tac_Timeout, Exninfo.null)) | Some (Logic_monad.Nil e) -> return (Util.Inr e) | Some (Logic_monad.Cons (r, _)) -> return (Util.Inl r) end >>= function | Util.Inl (res,s,m,i) -> Proof.set s >> Proof.put m >> Proof.update (fun _ -> i) >> return res | Util.Inr (e, info) -> tclZERO ~info e let tclTIMEOUT n t = tclTIMEOUTF (float_of_int n) t let tclTIME s t = let pr_time t1 t2 n msg = let msg = if n = 0 then str msg else str (msg ^ " after ") ++ int n ++ str (String.plural n " backtracking") in Feedback.msg_info(str "Tactic call" ++ pr_opt str s ++ str " ran for " ++ System.fmt_time_difference t1 t2 ++ str " " ++ surround msg) in let rec aux n t = let open Proof in tclUNIT () >>= fun () -> let tstart = System.get_time() in Proof.split t >>= let open Logic_monad in function | Nil (e, info) -> begin let tend = System.get_time() in pr_time tstart tend n "failure"; tclZERO ~info e end | Cons (x,k) -> let tend = System.get_time() in pr_time tstart tend n "success"; tclOR (tclUNIT x) (fun e -> aux (n+1) (k e)) in aux 0 t let tclProofInfo = let open Proof in Logical.current >>= fun P.{name; poly} -> tclUNIT (name, poly) (** {7 Unsafe primitives} *) module Unsafe = struct let (>>=) = tclBIND let tclEVARS evd = Pv.modify (fun ps -> { ps with solution = evd }) let tclNEWGOALS gls = Pv.modify begin fun step -> let gls = undefined step.solution gls in { step with comb = step.comb @ gls } end let tclNEWSHELVED gls = Pv.modify begin fun step -> let gls = undefined_evars step.solution gls in { step with solution = Evd.shelve step.solution gls } end let tclGETSHELF = tclEVARMAP >>= fun sigma -> tclUNIT @@ Evd.shelf sigma let tclSETENV = Env.set let tclGETGOALS = Comb.get let tclSETGOALS = Comb.set let tclEVARSADVANCE evd = Pv.modify (fun ps -> { solution = evd; comb = undefined evd ps.comb }) let tclEVARUNIVCONTEXT ctx = Pv.modify (fun ps -> { ps with solution = Evd.set_universe_context ps.solution ctx }) let push_future_goals p = { p with solution = Evd.push_future_goals p.solution } let mark_as_goals evd content = mark_in_evm ~goal:true evd content let advance = Evarutil.advance let undefined = undefined let mark_unresolvables evm evs = mark_in_evm ~goal:false evm evs let mark_as_unresolvables p evs = { p with solution = mark_in_evm ~goal:false p.solution evs } let update_sigma_univs ugraph pv = { pv with solution = Evd.update_sigma_univs ugraph pv.solution } end module UnsafeRepr = Proof.Unsafe let (>>=) = tclBIND (** {6 Goal-dependent tactics} *) let goal_env env evars gl = let evi = Evd.find evars gl in Evd.evar_filtered_env env evi let goal_nf_evar sigma gl = let evi = Evd.find sigma gl in let evi = Evarutil.nf_evar_info sigma evi in let sigma = Evd.add sigma gl evi in (gl, sigma) let catchable_exception = function | Logic_monad.Exception _ -> false | e -> CErrors.noncritical e module Goal = struct type t = { env : Environ.env; sigma : Evd.evar_map; concl : EConstr.constr ; state : StateStore.t; self : Evar.t ; (* for compatibility with old-style definitions *) } let print { sigma; self } = { Evd.it = self; sigma } let state { state=state } = state let env {env} = env let sigma {sigma} = sigma let hyps {env} = EConstr.named_context env let concl {concl} = concl let gmake_with info env sigma goal state = { env = Environ.reset_with_named_context (Evd.evar_filtered_hyps info) env ; sigma = sigma ; concl = Evd.evar_concl info; state = state ; self = goal } let nf_gmake env sigma goal = let state = get_state goal in let goal = drop_state goal in let info = Evarutil.nf_evar_info sigma (Evd.find sigma goal) in let sigma = Evd.add sigma goal info in gmake_with info env sigma goal state , sigma let nf_enter f = InfoL.tag (Info.Dispatch) begin iter_goal begin fun goal -> tclENV >>= fun env -> tclEVARMAP >>= fun sigma -> try let (gl, sigma) = nf_gmake env sigma goal in tclTHEN (Unsafe.tclEVARS sigma) (InfoL.tag (Info.DBranch) (f gl)) with e when catchable_exception e -> let (e, info) = Exninfo.capture e in tclZERO ~info e end end let gmake env sigma goal = let state = get_state goal in let goal = drop_state goal in let info = Evd.find sigma goal in gmake_with info env sigma goal state let enter f = let f gl = InfoL.tag (Info.DBranch) (f gl) in InfoL.tag (Info.Dispatch) begin iter_goal begin fun goal -> Env.get >>= fun env -> tclEVARMAP >>= fun sigma -> try f (gmake env sigma goal) with e when catchable_exception e -> let (e, info) = Exninfo.capture e in tclZERO ~info e end end let enter_one ?(__LOC__=__LOC__) f = let open Proof in Comb.get >>= function | [goal] -> begin Env.get >>= fun env -> tclEVARMAP >>= fun sigma -> try f (gmake env sigma goal) with e when catchable_exception e -> let (e, info) = Exninfo.capture e in tclZERO ~info e end | _ -> CErrors.anomaly Pp.(str __LOC__ ++ str " enter_one") let goals = Pv.get >>= fun step -> let sigma = step.solution in let map goal = match cleared_alias sigma goal with | None -> None (* ppedrot: Is this check really necessary? *) | Some goal -> let gl = Env.get >>= fun env -> tclEVARMAP >>= fun sigma -> tclUNIT (gmake env sigma goal) in Some gl in tclUNIT (CList.map_filter map step.comb) let unsolved { self=self } = tclEVARMAP >>= fun sigma -> tclUNIT (not (Option.is_empty (Evarutil.advance sigma self))) (* compatibility *) let goal { self=self } = self end (** {6 Trace} *) module Trace = struct let record_info_trace = InfoL.record_trace let log m = InfoL.leaf (Info.Msg m) let name_tactic m t = InfoL.tag (Info.Tactic m) t let pr_info env sigma ?(lvl=0) info = assert (lvl >= 0); Info.(print env sigma (collapse lvl info)) end (** {6 Non-logical state} *) module NonLogical = Logic_monad.NonLogical let tclLIFT = Proof.lift let tclCHECKINTERRUPT = tclLIFT (NonLogical.make Control.check_for_interrupt) (*** Compatibility layer with <= 8.2 tactics ***) module V82 = struct type tac = Evar.t Evd.sigma -> Evar.t list Evd.sigma let tactic ?(nf_evars=true) tac = (* spiwack: we ignore the dependencies between goals here, expectingly preserving the semantics of <= 8.2 tactics *) (* spiwack: convenience notations, waiting for ocaml 3.12 *) let open Proof in Pv.get >>= fun ps -> try let tac g_w_s evd = let g, w = drop_state g_w_s, get_state g_w_s in let glsigma = tac { Evd.it = g ; sigma = evd; } in let sigma = glsigma.Evd.sigma in let g = CList.map (fun g -> goal_with_state g w) glsigma.Evd.it in ( g, sigma ) in (* Old style tactics expect the goals normalized with respect to evars. *) let (initgoals_w_state, initevd) = Evd.Monad.List.map (fun g_w_s s -> let g, w = drop_state g_w_s, get_state g_w_s in let g, s = if nf_evars then goal_nf_evar s g else g, s in goal_with_state g w, s) ps.comb ps.solution in let (goalss,evd) = Evd.Monad.List.map tac initgoals_w_state initevd in let sgs = CList.flatten goalss in let sgs = undefined evd sgs in InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"")) >> Pv.set { solution = evd; comb = sgs; } with e when catchable_exception e -> let (e, info) = Exninfo.capture e in tclZERO ~info e (* normalises the evars in the goals, and stores the result in solution. *) let nf_evar_goals = Pv.modify begin fun ps -> let map g s = goal_nf_evar s g in let comb = CList.map drop_state ps.comb in let (_goals,evd) = Evd.Monad.List.map map comb ps.solution in { ps with solution = evd; } end let has_unresolved_evar pv = Evd.has_undefined pv.solution let top_goals initial { solution=solution; } = let goals = CList.map (fun (t,_) -> fst (Constr.destEvar (EConstr.Unsafe.to_constr t))) initial in { Evd.it = goals ; sigma=solution; } let top_evars initial { solution=sigma; } = let evars_of_initial (c,_) = Evar.Set.elements (Evd.evar_nodes_of_term c) in CList.flatten (CList.map evars_of_initial initial) let of_tactic t gls = try let env = Global.env () in let init = { solution = gls.Evd.sigma ; comb = [with_empty_state gls.Evd.it] } in let name, poly = Names.Id.of_string "legacy_pe", false in let (_,final,_,_) = apply ~name ~poly (goal_env env gls.Evd.sigma gls.Evd.it) t init in { Evd.sigma = final.solution ; it = CList.map drop_state final.comb } with Logic_monad.TacticFailure e as src -> let (_, info) = Exninfo.capture src in Exninfo.iraise (e, info) let put_status = Status.put let catchable_exception = catchable_exception let wrap_exceptions f = try f () with e when catchable_exception e -> let (e, info) = Exninfo.capture e in tclZERO ~info e end (** {7 Notations} *) module Notations = struct let (>>=) = tclBIND let (<*>) = tclTHEN let (<+>) t1 t2 = tclOR t1 (fun _ -> t2) end coq-8.15.0/engine/proofview.mli000066400000000000000000000630141417001151100163520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Evar.t list * Evd.evar_map (** {6 Starting and querying a proof view} *) (** Abstract representation of the initial goals of a proof. *) type entry (** Optimize memory consumption *) val compact : entry -> proofview -> entry * proofview (** Initialises a proofview, the main argument is a list of environments (including a [named_context] which are used as hypotheses) pair with conclusion types, creating accordingly many initial goals. Because a proof does not necessarily starts in an empty [evar_map] (indeed a proof can be triggered by an incomplete pretyping), [init] takes an additional argument to represent the initial [evar_map]. *) val init : Evd.evar_map -> (Environ.env * types) list -> entry * proofview (** A [telescope] is a list of environment and conclusion like in {!init}, except that each element may depend on the previous goals. The telescope passes the goals in the form of a [Term.constr] which represents the goal as an [evar]. The [evar_map] is threaded in state passing style. *) type telescope = | TNil of Evd.evar_map | TCons of Environ.env * Evd.evar_map * types * (Evd.evar_map -> constr -> telescope) (** Like {!init}, but goals are allowed to be dependent on one another. Dependencies between goals is represented with the type [telescope] instead of [list]. Note that the first [evar_map] of the telescope plays the role of the [evar_map] argument in [init]. *) val dependent_init : telescope -> entry * proofview (** [finished pv] is [true] if and only if [pv] is complete. That is, if it has an empty list of focused goals. There could still be unsolved subgoals, but they would then be out of focus. *) val finished : proofview -> bool (** Returns the current [evar] state. *) val return : proofview -> Evd.evar_map val partial_proof : entry -> proofview -> constr list val initial_goals : entry -> (constr * types) list (** goal <-> goal_with_state *) val with_empty_state : Proofview_monad.goal -> Proofview_monad.goal_with_state val drop_state : Proofview_monad.goal_with_state -> Proofview_monad.goal val goal_with_state : Proofview_monad.goal -> Proofview_monad.StateStore.t -> Proofview_monad.goal_with_state (** {6 Focusing commands} *) (** A [focus_context] represents the part of the proof view which has been removed by a focusing action, it can be used to unfocus later on. *) type focus_context (** Returns a stylised view of a focus_context for use by, for instance, ide-s. *) (* spiwack: the type of [focus_context] will change as we push more refined functions to ide-s. This would be better than spawning a new nearly identical function every time. Hence the generic name. *) (* In this version: the goals in the context, as a "zipper" (the first list is in reversed order). *) val focus_context : focus_context -> Evar.t list * Evar.t list (** [focus i j] focuses a proofview on the goals from index [i] to index [j] (inclusive, goals are indexed from [1]). I.e. goals number [i] to [j] become the only focused goals of the returned proofview. It returns the focused proofview, and a context for the focus stack. *) val focus : int -> int -> proofview -> proofview * focus_context (** Unfocuses a proofview with respect to a context. *) val unfocus : focus_context -> proofview -> proofview (** {6 The tactic monad} *) (** - Tactics are objects which apply a transformation to all the subgoals of the current view at the same time. By opposition to the old vision of applying it to a single goal. It allows tactics such as [shelve_unifiable], tactics to reorder the focused goals, or global automation tactic for dependent subgoals (instantiating an evar has influences on the other goals of the proof in progress, not being able to take that into account causes the current eauto tactic to fail on some instances where it could succeed). Another benefit is that it is possible to write tactics that can be executed even if there are no focused goals. - Tactics form a monad ['a tactic], in a sense a tactic can be seen as a function (without argument) which returns a value of type 'a and modifies the environment (in our case: the view). Tactics of course have arguments, but these are given at the meta-level as OCaml functions. Most tactics in the sense we are used to return [()], that is no really interesting values. But some might pass information around. The tactics seen in Coq's Ltac are (for now at least) only [unit tactic], the return values are kept for the OCaml toolkit. The operation or the monad are [Proofview.tclUNIT] (which is the "return" of the tactic monad) [Proofview.tclBIND] (which is the "bind") and [Proofview.tclTHEN] (which is a specialized bind on unit-returning tactics). - Tactics have support for full-backtracking. Tactics can be seen having multiple success: if after returning the first success a failure is encountered, the tactic can backtrack and use a second success if available. The state is backtracked to its previous value, except the non-logical state defined in the {!NonLogical} module below. *) (** The abstract type of tactics *) type +'a tactic (** Applies a tactic to the current proofview. Returns a tuple [a,pv,(b,sh,gu)] where [a] is the return value of the tactic, [pv] is the updated proofview, [b] a boolean which is [true] if the tactic has not done any action considered unsafe (such as admitting a lemma), [sh] is the list of goals which have been shelved by the tactic, and [gu] the list of goals on which the tactic has given up. In case of multiple success the first one is selected. If there is no success, fails with {!Logic_monad.TacticFailure}*) val apply : name:Names.Id.t -> poly:bool -> Environ.env -> 'a tactic -> proofview -> 'a * proofview * bool * Proofview_monad.Info.tree (** {7 Monadic primitives} *) (** Unit of the tactic monad. *) val tclUNIT : 'a -> 'a tactic (** Bind operation of the tactic monad. *) val tclBIND : 'a tactic -> ('a -> 'b tactic) -> 'b tactic (** Interprets the ";" (semicolon) of Ltac. As a monadic operation, it's a specialized "bind". *) val tclTHEN : unit tactic -> 'a tactic -> 'a tactic (** [tclIGNORE t] has the same operational content as [t], but drops the returned value. *) val tclIGNORE : 'a tactic -> unit tactic (** Generic monadic combinators for tactics. *) module Monad : Monad.S with type +'a t = 'a tactic (** {7 Failure and backtracking} *) (** [tclZERO e] fails with exception [e]. It has no success. Exception is supposed to be non critical *) val tclZERO : ?info:Exninfo.info -> exn -> 'a tactic (** [tclOR t1 t2] behaves like [t1] as long as [t1] succeeds. Whenever the successes of [t1] have been depleted and it failed with [e], then it behaves as [t2 e]. In other words, [tclOR] inserts a backtracking point. In [t2], exception can be assumed non critical. *) val tclOR : 'a tactic -> (Exninfo.iexn -> 'a tactic) -> 'a tactic (** [tclORELSE t1 t2] is equal to [t1] if [t1] has at least one success or [t2 e] if [t1] fails with [e]. It is analogous to [try/with] handler of exception in that it is not a backtracking point. In [t2], exception can be assumed non critical. *) val tclORELSE : 'a tactic -> (Exninfo.iexn -> 'a tactic) -> 'a tactic (** [tclIFCATCH a s f] is a generalisation of {!tclORELSE}: if [a] succeeds at least once then it behaves as [tclBIND a s] otherwise, if [a] fails with [e], then it behaves as [f e]. In [f] exception can be assumed non critical. *) val tclIFCATCH : 'a tactic -> ('a -> 'b tactic) -> (Exninfo.iexn -> 'b tactic) -> 'b tactic (** [tclONCE t] behave like [t] except it has at most one success: [tclONCE t] stops after the first success of [t]. If [t] fails with [e], [tclONCE t] also fails with [e]. *) val tclONCE : 'a tactic -> 'a tactic (** [tclEXACTLY_ONCE e t] succeeds as [t] if [t] has exactly one success. Otherwise it fails. The tactic [t] is run until its first success, then a failure with exception [e] is simulated ([e] has to be non critical). If [t] yields another success, then [tclEXACTLY_ONCE e t] fails with [MoreThanOneSuccess] (it is a user error). Otherwise, [tclEXACTLY_ONCE e t] succeeds with the first success of [t]. Notice that the choice of [e] is relevant, as the presence of further successes may depend on [e] (see {!tclOR}). *) exception MoreThanOneSuccess val tclEXACTLY_ONCE : exn -> 'a tactic -> 'a tactic (** [tclCASE t] splits [t] into its first success and a continuation. It is the most general primitive to control backtracking. *) type 'a case = | Fail of Exninfo.iexn | Next of 'a * (Exninfo.iexn -> 'a tactic) val tclCASE : 'a tactic -> 'a case tactic (** [tclBREAK p t] is a generalization of [tclONCE t]. Instead of stopping after the first success, it succeeds like [t] until a failure with an exception [e] such that [p e = Some e'] is raised. At which point it drops the remaining successes, failing with [e']. [tclONCE t] is equivalent to [tclBREAK (fun e -> Some e) t]. *) val tclBREAK : (Exninfo.iexn -> Exninfo.iexn option) -> 'a tactic -> 'a tactic (** {7 Focusing tactics} *) (** [tclFOCUS i j t] applies [t] after focusing on the goals number [i] to [j] (see {!focus}). The rest of the goals is restored after the tactic action. If the specified range doesn't correspond to existing goals, fails with the [nosuchgoal] argument, by default raising [NoSuchGoals] (a user error). This exception is caught at toplevel with a default message. *) exception NoSuchGoals of int val tclFOCUS : ?nosuchgoal:'a tactic -> int -> int -> 'a tactic -> 'a tactic (** [tclFOCUSLIST li t] applies [t] on the list of focused goals described by [li]. Each element of [li] is a pair [(i, j)] denoting the goals numbered from [i] to [j] (inclusive, starting from 1). It will try to apply [t] to all the valid goals in any of these intervals. If the set of such goals is not a single range, then it will move goals such that it is a single range. (So, for instance, [[1, 3-5]; idtac.] is not the identity.) If the set of such goals is empty, it will fail with [nosuchgoal], by default raising [NoSuchGoals 0]. *) val tclFOCUSLIST : ?nosuchgoal:'a tactic -> (int * int) list -> 'a tactic -> 'a tactic (** [tclFOCUSID x t] applies [t] on a (single) focused goal like {!tclFOCUS}. The goal is found by its name rather than its number. Fails with [nosuchgoal], by default raising [NoSuchGoals 1]. *) val tclFOCUSID : ?nosuchgoal:'a tactic -> Names.Id.t -> 'a tactic -> 'a tactic (** [tclTRYFOCUS i j t] behaves like {!tclFOCUS}, except that if the specified range doesn't correspond to existing goals, behaves like [tclUNIT ()] instead of failing. *) val tclTRYFOCUS : int -> int -> unit tactic -> unit tactic (** {7 Dispatching on goals} *) (** Dispatch tacticals are used to apply a different tactic to each goal under focus. They come in two flavours: [tclDISPATCH] takes a list of [unit tactic]-s and build a [unit tactic]. [tclDISPATCHL] takes a list of ['a tactic] and returns an ['a list tactic]. They both work by applying each of the tactic in a focus restricted to the corresponding goal (starting with the first goal). In the case of [tclDISPATCHL], the tactic returns a list of the same size as the argument list (of tactics), each element being the result of the tactic executed in the corresponding goal. When the length of the tactic list is not the number of goal, raises [SizeMismatch (g,t)] where [g] is the number of available goals, and [t] the number of tactics passed. *) exception SizeMismatch of int*int val tclDISPATCH : unit tactic list -> unit tactic val tclDISPATCHL : 'a tactic list -> 'a list tactic (** [tclEXTEND b r e] is a variant of {!tclDISPATCH}, where the [r] tactic is "repeated" enough time such that every goal has a tactic assigned to it ([b] is the list of tactics applied to the first goals, [e] to the last goals, and [r] is applied to every goal in between). *) val tclEXTEND : unit tactic list -> unit tactic -> unit tactic list -> unit tactic (** [tclINDEPENDENT tac] runs [tac] on each goal successively, from the first one to the last one. Backtracking in one goal is independent of backtracking in another. It is equivalent to [tclEXTEND [] tac []]. *) val tclINDEPENDENT : unit tactic -> unit tactic val tclINDEPENDENTL: 'a tactic -> 'a list tactic (** {7 Goal manipulation} *) (** Shelves all the goals under focus. The goals are placed on the shelf for later use (or being solved by side-effects). *) val shelve : unit tactic (** Shelves the given list of goals, which might include some that are under focus and some that aren't. All the goals are placed on the shelf for later use (or being solved by side-effects). *) val shelve_goals : Evar.t list -> unit tactic (** [unifiable sigma g l] checks whether [g] appears in another subgoal of [l]. The list [l] may contain [g], but it does not affect the result. Used by [shelve_unifiable]. *) val unifiable : Evd.evar_map -> Evar.t -> Evar.t list -> bool (** Shelves the unifiable goals under focus, i.e. the goals which appear in other goals under focus (the unfocused goals are not considered). *) val shelve_unifiable : unit tactic (** [guard_no_unifiable] returns the list of unifiable goals if some goals are unifiable (see {!shelve_unifiable}) in the current focus. *) val guard_no_unifiable : Names.Name.t list option tactic (** [unshelve l p] moves all the goals in [l] from the shelf and put them at the end of the focused goals of p, if they are still undefined after [advance] *) val unshelve : Evar.t list -> proofview -> proofview val filter_shelf : (Evar.t -> bool) -> proofview -> proofview (** [depends_on g1 g2 sigma] checks if g1 occurs in the type/ctx of g2 *) val depends_on : Evd.evar_map -> Evar.t -> Evar.t -> bool (** [with_shelf tac] executes [tac] and returns its result together with the set of goals shelved by [tac]. The current shelf is unchanged and the returned list contains only unsolved goals. *) val with_shelf : 'a tactic -> (Evar.t list * 'a) tactic (** If [n] is positive, [cycle n] puts the [n] first goal last. If [n] is negative, then it puts the [n] last goals first.*) val cycle : int -> unit tactic (** [swap i j] swaps the position of goals number [i] and [j] (negative numbers can be used to address goals from the end. Goals are indexed from [1]. For simplicity index [0] corresponds to goal [1] as well, rather than raising an error. *) val swap : int -> int -> unit tactic (** [revgoals] reverses the list of focused goals. *) val revgoals : unit tactic (** [numgoals] returns the number of goals under focus. *) val numgoals : int tactic (** {7 Access primitives} *) (** [tclEVARMAP] doesn't affect the proof, it returns the current [evar_map]. *) val tclEVARMAP : Evd.evar_map tactic (** [tclENV] doesn't affect the proof, it returns the current environment. It is not the environment of a particular goal, rather the "global" environment of the proof. The goal-wise environment is obtained via {!Proofview.Goal.env}. *) val tclENV : Environ.env tactic (** {7 Put-like primitives} *) (** [tclEFFECTS eff] add the effects [eff] to the current state. *) val tclEFFECTS : Evd.side_effects -> unit tactic (** [mark_as_unsafe] declares the current tactic is unsafe. *) val mark_as_unsafe : unit tactic (** Gives up on the goal under focus. Reports an unsafe status. Proofs with given up goals cannot be closed. *) val give_up : unit tactic (** {7 Control primitives} *) (** [tclPROGRESS t] checks the state of the proof after [t]. It it is identical to the state before, then [tclPROGRESS t] fails, otherwise it succeeds like [t]. *) val tclPROGRESS : 'a tactic -> 'a tactic module Progress : sig (** [goal_equal ~evd ~extended_evd evar extended_evar] tests whether the [evar_info] from [evd] corresponding to [evar] is equal to that from [extended_evd] corresponding to [extended_evar], up to existential variable instantiation and equalisable universes. The universe constraints in [extended_evd] are assumed to be an extension of the universe constraints in [evd]. *) val goal_equal : evd:Evd.evar_map -> extended_evd:Evd.evar_map -> Evar.t -> Evar.t -> bool end (** Checks for interrupts *) val tclCHECKINTERRUPT : unit tactic (** [tclTIMEOUT n t] can have only one success. In case of timeout it fails with [tclZERO Tac_Timeout]. *) val tclTIMEOUTF : float -> 'a tactic -> 'a tactic val tclTIMEOUT : int -> 'a tactic -> 'a tactic (** [tclTIME s t] displays time for each atomic call to t, using s as an identifying annotation if present *) val tclTIME : string option -> 'a tactic -> 'a tactic (** Internal, don't use. *) val tclProofInfo : (Names.Id.t * bool) tactic [@@ocaml.deprecated "internal, don't use"] (** {7 Unsafe primitives} *) (** The primitives in the [Unsafe] module should be avoided as much as possible, since they can make the proof state inconsistent. They are nevertheless helpful, in particular when interfacing the pretyping and the proof engine. *) module Unsafe : sig (** [tclEVARS sigma] replaces the current [evar_map] by [sigma]. If [sigma] has new unresolved [evar]-s they will not appear as goal. If goals have been solved in [sigma] they will still appear as unsolved goals. *) val tclEVARS : Evd.evar_map -> unit tactic (** Like {!tclEVARS} but also checks whether goals have been solved. *) val tclEVARSADVANCE : Evd.evar_map -> unit tactic (** Set the global environment of the tactic *) val tclSETENV : Environ.env -> unit tactic (** [tclNEWGOALS gls] adds the goals [gls] to the ones currently being proved, appending them to the list of focused goals. If a goal is already solved, it is not added. *) val tclNEWGOALS : Proofview_monad.goal_with_state list -> unit tactic (** [tclNEWSHELVED gls] adds the goals [gls] to the shelf. If a goal is already solved, it is not added. *) val tclNEWSHELVED : Evar.t list -> unit tactic (** [tclSETGOALS gls] sets goals [gls] as the goals being under focus. If a goal is already solved, it is not set. *) val tclSETGOALS : Proofview_monad.goal_with_state list -> unit tactic (** [tclGETGOALS] returns the list of goals under focus. *) val tclGETGOALS : Proofview_monad.goal_with_state list tactic (** [tclGETSHELF] returns the list of goals on the shelf. *) val tclGETSHELF : Evar.t list tactic (** Sets the evar universe context. *) val tclEVARUNIVCONTEXT : UState.t -> unit tactic (** Clears the future goals store in the proof view. *) val push_future_goals : proofview -> proofview (** Give the evars the status of a goal (changes their source location and makes them unresolvable for type classes. *) val mark_as_goals : Evd.evar_map -> Evar.t list -> Evd.evar_map (** Make some evars unresolvable for type classes. We need two functions as some functions use the proofview and others directly manipulate the undelying evar_map. *) val mark_unresolvables : Evd.evar_map -> Evar.t list -> Evd.evar_map val mark_as_unresolvables : proofview -> Evar.t list -> proofview (** [advance sigma g] returns [Some g'] if [g'] is undefined and is the current avatar of [g] (for instance [g] was changed by [clear] into [g']). It returns [None] if [g] has been (partially) solved. *) val advance : Evd.evar_map -> Evar.t -> Evar.t option (** [undefined sigma l] applies [advance] to the goals of [l], then returns the subset of resulting goals which have not yet been defined *) val undefined : Evd.evar_map -> Proofview_monad.goal_with_state list -> Proofview_monad.goal_with_state list (** [update_sigma_univs] lifts [UState.update_sigma_univs] to the proofview *) val update_sigma_univs : UGraph.t -> proofview -> proofview end (** This module gives access to the innards of the monad. Its use is restricted to very specific cases. *) module UnsafeRepr : sig type state = Proofview_monad.Logical.Unsafe.state val repr : 'a tactic -> ('a, state, state, Exninfo.iexn) Logic_monad.BackState.t val make : ('a, state, state, Exninfo.iexn) Logic_monad.BackState.t -> 'a tactic end (** {6 Goal-dependent tactics} *) module Goal : sig (** Type of goals. *) type t (** [concl], [hyps], [env] and [sigma] given a goal [gl] return respectively the conclusion of [gl], the hypotheses of [gl], the environment of [gl] (i.e. the global environment and the hypotheses) and the current evar map. *) val concl : t -> constr val hyps : t -> named_context val env : t -> Environ.env val sigma : t -> Evd.evar_map val state : t -> Proofview_monad.StateStore.t (** [nf_enter t] applies the goal-dependent tactic [t] in each goal independently, in the manner of {!tclINDEPENDENT} except that the current goal is also given as an argument to [t]. The goal is normalised with respect to evars. *) val nf_enter : (t -> unit tactic) -> unit tactic [@@ocaml.deprecated "Normalization is enforced by EConstr, please use [enter]"] (** Like {!nf_enter}, but does not normalize the goal beforehand. *) val enter : (t -> unit tactic) -> unit tactic (** Like {!enter}, but assumes exactly one goal under focus, raising a fatal error otherwise. *) val enter_one : ?__LOC__:string -> (t -> 'a tactic) -> 'a tactic (** Recover the list of current goals under focus, without evar-normalization. FIXME: encapsulate the level in an existential type. *) val goals : t tactic list tactic (** [unsolved g] is [true] if [g] is still unsolved in the current proof state. *) val unsolved : t -> bool tactic (** Compatibility: avoid if possible *) val goal : t -> Evar.t val print : t -> Evar.t Evd.sigma end (** {6 Trace} *) module Trace : sig (** [record_info_trace t] behaves like [t] except the [info] trace is stored. *) val record_info_trace : 'a tactic -> 'a tactic val log : Proofview_monad.lazy_msg -> unit tactic val name_tactic : Proofview_monad.lazy_msg -> 'a tactic -> 'a tactic val pr_info : Environ.env -> Evd.evar_map -> ?lvl:int -> Proofview_monad.Info.tree -> Pp.t end (** {6 Non-logical state} *) (** The [NonLogical] module allows the execution of effects (including I/O) in tactics (non-logical side-effects are not discarded at failures). *) module NonLogical : module type of Logic_monad.NonLogical (** [tclLIFT c] is a tactic which behaves exactly as [c]. *) val tclLIFT : 'a NonLogical.t -> 'a tactic (**/**) (*** Compatibility layer with <= 8.2 tactics ***) module V82 : sig type tac = Evar.t Evd.sigma -> Evar.t list Evd.sigma (* [nf_evars=true] applies the evar (assignment) map to the goals * (conclusion and context) before calling the tactic *) val tactic : ?nf_evars:bool -> tac -> unit tactic (* normalises the evars in the goals, and stores the result in solution. *) val nf_evar_goals : unit tactic val has_unresolved_evar : proofview -> bool val top_goals : entry -> proofview -> Evar.t list Evd.sigma (* returns the existential variable used to start the proof *) val top_evars : entry -> proofview -> Evar.t list (* Caution: this function loses quite a bit of information. It should be avoided as much as possible. It should work as expected for a tactic obtained from {!V82.tactic} though. *) val of_tactic : 'a tactic -> tac (* marks as unsafe if the argument is [false] *) val put_status : bool -> unit tactic (* exception for which it is deemed to be safe to transmute into tactic failure. *) val catchable_exception : exn -> bool (* transforms every Ocaml (catchable) exception into a failure in the monad. *) val wrap_exceptions : (unit -> 'a tactic) -> 'a tactic end (** {7 Notations} *) module Notations : sig (** {!tclBIND} *) val (>>=) : 'a tactic -> ('a -> 'b tactic) -> 'b tactic (** {!tclTHEN} *) val (<*>) : unit tactic -> 'a tactic -> 'a tactic (** {!tclOR}: [t1+t2] = [tclOR t1 (fun _ -> t2)]. *) val (<+>) : 'a tactic -> 'a tactic -> 'a tactic end coq-8.15.0/engine/proofview_monad.ml000066400000000000000000000210621417001151100173540ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* { head = a::head ; opened=[] } | a::Seq(b,f)::opened -> { head ; opened=Seq(b,a::f)::opened } | [] -> assert false let leaf a s = close (opn a s) (** Returning a forest. It is the responsibility of the library builder to close all the tags. *) (* spiwack: I may want to close the tags instead, to deal with interruptions. *) let rec mirror f = List.rev_map mirror_tree f and mirror_tree (Seq(a,f)) = Seq(a,mirror f) let to_tree = function | { head ; opened=[] } -> mirror head | { head ; opened=_::_} -> assert false end (** {6 State types} *) (** We typically label nodes of [Trace.tree] with messages to print. But we don't want to compute the result. *) type lazy_msg = Environ.env -> Evd.evar_map -> Pp.t (** Info trace. *) module Info = struct (** The type of the tags for [info]. *) type tag = | Msg of lazy_msg (** A simple message *) | Tactic of lazy_msg (** A tactic call *) | Dispatch (** A call to [tclDISPATCH]/[tclEXTEND] *) | DBranch (** A special marker to delimit individual branch of a dispatch. *) type state = tag Trace.incr type tree = tag Trace.forest let pr_in_comments env sigma m = Pp.(str"(* "++ m env sigma ++str" *)") let unbranch = function | Trace.Seq (DBranch,brs) -> brs | _ -> assert false let is_empty_branch = let open Trace in function | Seq(DBranch,[]) -> true | _ -> false (** Dispatch with empty branches are (supposed to be) equivalent to [idtac] which need not appear, so they are removed from the trace. *) let dispatch brs = let open Trace in if CList.for_all is_empty_branch brs then None else Some (Seq(Dispatch,brs)) let constr = let open Trace in function | Dispatch -> dispatch | t -> fun br -> Some (Seq(t,br)) let rec compress_tree = let open Trace in function | Seq(t,f) -> constr t (compress f) and compress f = CList.map_filter compress_tree f (** [with_sep] is [true] when [Tactic m] must be printed with a trailing semi-colon. *) let rec pr_tree env sigma with_sep = let open Trace in function | Seq (Msg m,[]) -> pr_in_comments env sigma m | Seq (Tactic m,_) -> let tail = if with_sep then Pp.str";" else Pp.mt () in Pp.(m env sigma ++ tail) | Seq (Dispatch,brs) -> let tail = if with_sep then Pp.str";" else Pp.mt () in Pp.(pr_dispatch env sigma brs++tail) | Seq (Msg _,_::_) | Seq (DBranch,_) -> assert false and pr_dispatch env sigma brs = let open Pp in let brs = List.map unbranch brs in match brs with | [br] -> pr_forest env sigma br | _ -> let sep () = spc()++str"|"++spc() in let branches = prlist_with_sep sep (pr_forest env sigma) brs in str"[>"++spc()++branches++spc()++str"]" and pr_forest env sigma = function | [] -> Pp.mt () | [tr] -> pr_tree env sigma false tr | tr::l -> Pp.(pr_tree env sigma true tr ++ pr_forest env sigma l) let print env sigma f = pr_forest env sigma (compress f) let rec collapse_tree n t = let open Trace in match n , t with | 0 , t -> [t] | _ , (Seq(Tactic _,[]) as t) -> [t] | n , Seq(Tactic _,f) -> collapse (pred n) f | n , Seq(Dispatch,brs) -> [Seq(Dispatch, (collapse n brs))] | n , Seq(DBranch,br) -> [Seq(DBranch, (collapse n br))] | _ , (Seq(Msg _,_) as t) -> [t] and collapse n f = CList.map_append (collapse_tree n) f end module StateStore = Store.Make(struct end) (* let (set_state, get_state) = StateDyn.Easy.make_dyn "goal_state" *) type goal = Evar.t type goal_with_state = Evar.t * StateStore.t let drop_state = fst let get_state = snd let goal_with_state g s = (g, s) let with_empty_state g = (g, StateStore.empty) let map_goal_with_state f (g, s) = (f g, s) (** Type of proof views: current [evar_map] together with the list of focused goals. *) type proofview = { solution : Evd.evar_map; comb : goal_with_state list; } (** {6 Instantiation of the logic monad} *) (** Parameters of the logic monads *) module P = struct type s = proofview * Environ.env (** Recording info trace (true) or not. *) type e = { trace: bool; name : Names.Id.t; poly : bool } (** Status (safe/unsafe) * shelved goals * given up *) type w = bool let wunit = true let wprod b1 b2 = b1 && b2 type u = Info.state let uunit = Trace.empty_incr end module Logical = Logic_monad.Logical(P) (** {6 Lenses to access to components of the states} *) module type State = sig type t val get : t Logical.t val set : t -> unit Logical.t val modify : (t->t) -> unit Logical.t end module type Reader = sig type t val get : t Logical.t end module type Writer = sig type t val put : t -> unit Logical.t end module Pv : State with type t := proofview = struct let get = Logical.(map fst get) let set p = Logical.modify (fun (_,e) -> (p,e)) let modify f= Logical.modify (fun (p,e) -> (f p,e)) end module Solution : State with type t := Evd.evar_map = struct let get = Logical.map (fun {solution} -> solution) Pv.get let set s = Pv.modify (fun pv -> { pv with solution = s }) let modify f = Pv.modify (fun pv -> { pv with solution = f pv.solution }) end module Comb : State with type t = goal_with_state list = struct (* spiwack: I don't know why I cannot substitute ([:=]) [t] with a type expression. *) type t = goal_with_state list let get = Logical.map (fun {comb} -> comb) Pv.get let set c = Pv.modify (fun pv -> { pv with comb = c }) let modify f = Pv.modify (fun pv -> { pv with comb = f pv.comb }) end module Env : State with type t := Environ.env = struct let get = Logical.(map snd get) let set e = Logical.modify (fun (p,_) -> (p,e)) let modify f = Logical.modify (fun (p,e) -> (p,f e)) end module Status : Writer with type t := bool = struct let put s = Logical.put s end (** Lens and utilities pertaining to the info trace *) module InfoL = struct let recording = Logical.(map (fun {P.trace} -> trace) current) let if_recording t = let open Logical in recording >>= fun r -> if r then t else return () let record_trace t = Logical.( current >>= fun s -> local {s with P.trace = true} t) let raw_update = Logical.update let update f = if_recording (raw_update f) let opn a = update (Trace.opn a) let close = update Trace.close let leaf a = update (Trace.leaf a) let tag a t = let open Logical in recording >>= fun r -> if r then begin raw_update (Trace.opn a) >> t >>= fun a -> raw_update Trace.close >> return a end else t end coq-8.15.0/engine/proofview_monad.mli000066400000000000000000000115011417001151100175220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a forest (** [open a] opens a tag with name [a]. *) val opn : 'a -> 'a incr -> 'a incr (** [close] closes the last open tag. It is the responsibility of the user to close all the tags. *) val close : 'a incr -> 'a incr (** [leaf] creates an empty tag with name [a]. *) val leaf : 'a -> 'a incr -> 'a incr end (** {6 State types} *) (** We typically label nodes of [Trace.tree] with messages to print. But we don't want to compute the result. *) type lazy_msg = Environ.env -> Evd.evar_map -> Pp.t (** Info trace. *) module Info : sig (** The type of the tags for [info]. *) type tag = | Msg of lazy_msg (** A simple message *) | Tactic of lazy_msg (** A tactic call *) | Dispatch (** A call to [tclDISPATCH]/[tclEXTEND] *) | DBranch (** A special marker to delimit individual branch of a dispatch. *) type state = tag Trace.incr type tree = tag Trace.forest val print : Environ.env -> Evd.evar_map -> tree -> Pp.t (** [collapse n t] flattens the first [n] levels of [Tactic] in an info trace, effectively forgetting about the [n] top level of names (if there are fewer, the last name is kept). *) val collapse : int -> tree -> tree end module StateStore : Store.S type goal = Evar.t type goal_with_state val drop_state : goal_with_state -> goal val get_state : goal_with_state -> StateStore.t val goal_with_state : goal -> StateStore.t -> goal_with_state val with_empty_state : goal -> goal_with_state val map_goal_with_state : (goal -> goal) -> goal_with_state -> goal_with_state (** Type of proof views: current [evar_map] together with the list of focused goals, locally shelved goals and globally shelved goals. *) type proofview = { solution : Evd.evar_map; comb : goal_with_state list; } (** {6 Instantiation of the logic monad} *) module P : sig type s = proofview * Environ.env (** Status (safe/unsafe) * given up *) type w = bool val wunit : w val wprod : w -> w -> w (** Recording info trace (true) or not. *) type e = { trace: bool; name : Names.Id.t; poly : bool } type u = Info.state val uunit : u end module Logical : module type of Logic_monad.Logical(P) (** {6 Lenses to access to components of the states} *) module type State = sig type t val get : t Logical.t val set : t -> unit Logical.t val modify : (t->t) -> unit Logical.t end module type Reader = sig type t val get : t Logical.t end module type Writer = sig type t val put : t -> unit Logical.t end (** Lens to the [proofview]. *) module Pv : State with type t := proofview (** Lens to the [evar_map] of the proofview. *) module Solution : State with type t := Evd.evar_map (** Lens to the list of focused goals. *) module Comb : State with type t = goal_with_state list (** Lens to the global environment. *) module Env : State with type t := Environ.env (** Lens to the tactic status ([true] if safe, [false] if unsafe) *) module Status : Writer with type t := bool (** Lens and utilities pertaining to the info trace *) module InfoL : sig (** [record_trace t] behaves like [t] and compute its [info] trace. *) val record_trace : 'a Logical.t -> 'a Logical.t val update : (Info.state -> Info.state) -> unit Logical.t val opn : Info.tag -> unit Logical.t val close : unit Logical.t val leaf : Info.tag -> unit Logical.t (** [tag a t] opens tag [a] runs [t] then closes the tag. *) val tag : Info.tag -> 'a Logical.t -> 'a Logical.t end coq-8.15.0/engine/termops.ml000066400000000000000000001450631417001151100156570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* id | None -> match evi.evar_source with | _,Evar_kinds.ImplicitArg (c,(n,Some id),b) -> id | _,Evar_kinds.VarInstance id -> id | _,Evar_kinds.QuestionMark {Evar_kinds.qm_name = Name id} -> id | _,Evar_kinds.GoalEvar -> Id.of_string "Goal" | _ -> let env = reset_with_named_context evi.evar_hyps env in Namegen.id_of_name_using_hdchar env sigma evi.evar_concl Anonymous in let names = EvMap.mapi base_id (undefined_map sigma) in let id = EvMap.find evk names in let fold evk' id' (seen, n) = if seen then (seen, n) else if Evar.equal evk evk' then (true, n) else if Id.equal id id' then (seen, succ n) else (seen, n) in let (_, n) = EvMap.fold fold names (false, 0) in if n = 0 then id else Nameops.add_suffix id (string_of_int (pred n)) let pr_existential_key env sigma evk = let open Evd in match evar_ident evk sigma with | None -> str "?" ++ Id.print (evar_suggested_name env sigma evk) | Some id -> str "?" ++ Id.print id let pr_instance_status (sc,typ) = let open Evd in begin match sc with | IsSubType -> str " [or a subtype of it]" | IsSuperType -> str " [or a supertype of it]" | Conv -> mt () end ++ begin match typ with | CoerceToType -> str " [up to coercion]" | TypeNotProcessed -> mt () | TypeProcessed -> str " [type is checked]" end let protect f x = try f x with e -> str "EXCEPTION: " ++ str (Printexc.to_string e) let print_kconstr env sigma a = protect (fun c -> print_constr_env env sigma c) a let pr_meta_map env sigma = let open Evd in let print_constr = print_kconstr in let pr_name = function Name id -> str"[" ++ Id.print id ++ str"]" | _ -> mt() in let pr_meta_binding = function | (mv,Cltyp (na,b)) -> hov 0 (pr_meta mv ++ pr_name na ++ str " : " ++ print_constr env sigma b.rebus ++ fnl ()) | (mv,Clval(na,(b,s),t)) -> hov 0 (pr_meta mv ++ pr_name na ++ str " := " ++ print_constr env sigma b.rebus ++ str " : " ++ print_constr env sigma t.rebus ++ spc () ++ pr_instance_status s ++ fnl ()) in prlist pr_meta_binding (meta_list sigma) let pr_decl env sigma (decl,ok) = let open NamedDecl in let print_constr = print_kconstr in match decl with | LocalAssum ({binder_name=id},_) -> if ok then Id.print id else (str "{" ++ Id.print id ++ str "}") | LocalDef ({binder_name=id},c,_) -> str (if ok then "(" else "{") ++ Id.print id ++ str ":=" ++ print_constr env sigma c ++ str (if ok then ")" else "}") let pr_evar_source env sigma = function | Evar_kinds.NamedHole id -> Id.print id | Evar_kinds.QuestionMark _ -> str "underscore" | Evar_kinds.CasesType false -> str "pattern-matching return predicate" | Evar_kinds.CasesType true -> str "subterm of pattern-matching return predicate" | Evar_kinds.BinderType (Name id) -> str "type of " ++ Id.print id | Evar_kinds.BinderType Anonymous -> str "type of anonymous binder" | Evar_kinds.EvarType (ido,evk) -> let pp = match ido with | Some id -> str "?" ++ Id.print id | None -> try pr_existential_key env sigma evk with (* defined *) Not_found -> str "an internal placeholder" in str "type of " ++ pp | Evar_kinds.ImplicitArg (c,(n,ido),b) -> let open Globnames in let print_constr = print_kconstr in let id = Option.get ido in str "parameter " ++ Id.print id ++ spc () ++ str "of" ++ spc () ++ print_constr env sigma (EConstr.of_constr @@ printable_constr_of_global c) | Evar_kinds.InternalHole -> str "internal placeholder" | Evar_kinds.TomatchTypeParameter (ind,n) -> let print_constr = print_kconstr in pr_nth n ++ str " argument of type " ++ print_constr env sigma (EConstr.mkInd ind) | Evar_kinds.GoalEvar -> str "goal evar" | Evar_kinds.ImpossibleCase -> str "type of impossible pattern-matching clause" | Evar_kinds.MatchingVar _ -> str "matching variable" | Evar_kinds.VarInstance id -> str "instance of " ++ Id.print id | Evar_kinds.SubEvar (where,evk) -> (match where with | None -> str "subterm of " | Some Evar_kinds.Body -> str "body of " | Some Evar_kinds.Domain -> str "domain of " | Some Evar_kinds.Codomain -> str "codomain of ") ++ Evar.print evk let pr_evar_info env sigma evi = let open Evd in let print_constr = print_kconstr in let phyps = try let decls = match Filter.repr (evar_filter evi) with | None -> List.map (fun c -> (c, true)) (evar_context evi) | Some filter -> List.combine (evar_context evi) filter in prlist_with_sep spc (pr_decl env sigma) (List.rev decls) with Invalid_argument _ -> str "Ill-formed filtered context" in let pty = print_constr env sigma evi.evar_concl in let pb = match evi.evar_body with | Evar_empty -> mt () | Evar_defined c -> spc() ++ str"=> " ++ print_constr env sigma c in let candidates = match evi.evar_body, evi.evar_candidates with | Evar_empty, Some l -> spc () ++ str "{" ++ prlist_with_sep (fun () -> str "|") (print_constr env sigma) l ++ str "}" | _ -> mt () in let src = str "(" ++ pr_evar_source env sigma (snd evi.evar_source) ++ str ")" in hov 2 (str"[" ++ phyps ++ spc () ++ str"|- " ++ pty ++ pb ++ str"]" ++ candidates ++ spc() ++ src) let compute_evar_dependency_graph sigma = let open Evd in (* Compute the map binding ev to the evars whose body depends on ev *) let fold evk evi acc = let fold_ev evk' acc = let tab = try EvMap.find evk' acc with Not_found -> Evar.Set.empty in EvMap.add evk' (Evar.Set.add evk tab) acc in match evar_body evi with | Evar_empty -> acc | Evar_defined c -> Evar.Set.fold fold_ev (evars_of_term sigma c) acc in Evd.fold fold sigma EvMap.empty let evar_dependency_closure n sigma = let open Evd in (* Create the DAG of depth [n] representing the recursive dependencies of undefined evars. *) let graph = compute_evar_dependency_graph sigma in let rec aux n curr accu = if Int.equal n 0 then Evar.Set.union curr accu else let fold evk accu = try let deps = EvMap.find evk graph in Evar.Set.union deps accu with Not_found -> accu in (* Consider only the newly added evars *) let ncurr = Evar.Set.fold fold curr Evar.Set.empty in (* Merge the others *) let accu = Evar.Set.union curr accu in aux (n - 1) ncurr accu in let undef = EvMap.domain (undefined_map sigma) in aux n undef Evar.Set.empty let evar_dependency_closure n sigma = let open Evd in let deps = evar_dependency_closure n sigma in let map = EvMap.bind (fun ev -> find sigma ev) deps in EvMap.bindings map let has_no_evar sigma = try let () = Evd.fold (fun _ _ () -> raise Exit) sigma () in true with Exit -> false let pr_evd_level sigma = UState.pr_uctx_level (Evd.evar_universe_context sigma) let reference_of_level sigma l = UState.qualid_of_level (Evd.evar_universe_context sigma) l let pr_evar_universe_context ctx = let open UState in let prl = pr_uctx_level ctx in if UState.is_empty ctx then mt () else v 0 (str"UNIVERSES:"++brk(0,1)++ h (Univ.pr_universe_context_set prl (UState.context_set ctx)) ++ fnl () ++ str"ALGEBRAIC UNIVERSES:"++brk(0,1)++ h (Univ.Level.Set.pr prl (UState.algebraics ctx)) ++ fnl() ++ str"UNDEFINED UNIVERSES:"++brk(0,1)++ h (UState.pr_universe_opt_subst (UState.subst ctx)) ++ fnl() ++ str "WEAK CONSTRAINTS:"++brk(0,1)++ h (UState.pr_weak prl ctx) ++ fnl ()) let print_env_short env sigma = let print_constr = print_kconstr in let pr_rel_decl = function | RelDecl.LocalAssum (n,_) -> Name.print n.binder_name | RelDecl.LocalDef (n,b,_) -> str "(" ++ Name.print n.binder_name ++ str " := " ++ print_constr env sigma (EConstr.of_constr b) ++ str ")" in let pr_named_decl = NamedDecl.to_rel_decl %> pr_rel_decl in let nc = List.rev (named_context env) in let rc = List.rev (rel_context env) in str "[" ++ pr_sequence pr_named_decl nc ++ str "]" ++ spc () ++ str "[" ++ pr_sequence pr_rel_decl rc ++ str "]" let pr_evar_constraints sigma pbs = let pr_evconstr (pbty, env, t1, t2) = let env = (* We currently allow evar instances to refer to anonymous de Bruijn indices, so we protect the error printing code in this case by giving names to every de Bruijn variable in the rel_context of the conversion problem. MS: we should rather stop depending on anonymous variables, they can be used to indicate independency. Also, this depends on a strategy for naming/renaming. *) Namegen.make_all_name_different env sigma in print_env_short env sigma ++ spc () ++ str "|-" ++ spc () ++ protect (print_constr_env env sigma) t1 ++ spc () ++ str (match pbty with | Reduction.CONV -> "==" | Reduction.CUMUL -> "<=") ++ spc () ++ protect (print_constr_env env @@ Evd.from_env env) t2 in prlist_with_sep fnl pr_evconstr pbs let pr_evar_map_gen with_univs pr_evars env sigma = let uvs = Evd.evar_universe_context sigma in let (_, conv_pbs) = Evd.extract_all_conv_pbs sigma in let evs = if has_no_evar sigma then mt () else pr_evars sigma ++ fnl () and svs = if with_univs then pr_evar_universe_context uvs else mt () and cstrs = if List.is_empty conv_pbs then mt () else str "CONSTRAINTS:" ++ brk (0, 1) ++ pr_evar_constraints sigma conv_pbs ++ fnl () and typeclasses = let evars = Evd.get_typeclass_evars sigma in if Evar.Set.is_empty evars then mt () else str "TYPECLASSES:" ++ brk (0, 1) ++ prlist_with_sep spc Evar.print (Evar.Set.elements evars) ++ fnl () and obligations = let evars = Evd.get_obligation_evars sigma in if Evar.Set.is_empty evars then mt () else str "OBLIGATIONS:" ++ brk (0, 1) ++ prlist_with_sep spc Evar.print (Evar.Set.elements evars) ++ fnl () and metas = if List.is_empty (Evd.meta_list sigma) then mt () else str "METAS:" ++ brk (0, 1) ++ pr_meta_map env sigma and shelf = str "SHELF:" ++ brk (0, 1) ++ Evd.pr_shelf sigma ++ fnl () and future_goals = str "FUTURE GOALS STACK:" ++ brk (0, 1) ++ Evd.pr_future_goals_stack sigma ++ fnl () in evs ++ svs ++ cstrs ++ typeclasses ++ obligations ++ metas ++ shelf ++ future_goals let pr_evar_list env sigma l = let open Evd in let pr_alias ev = match is_aliased_evar sigma ev with | None -> mt () | Some ev' -> str " (aliased to " ++ Evar.print ev' ++ str ")" in let pr (ev, evi) = h (Evar.print ev ++ str "==" ++ pr_evar_info env sigma evi ++ pr_alias ev ++ (if evi.evar_body == Evar_empty then str " {" ++ pr_existential_key env sigma ev ++ str "}" else mt ())) in hv 0 (prlist_with_sep fnl pr l) let to_list d = let open Evd in (* Workaround for change in Map.fold behavior in ocaml 3.08.4 *) let l = ref [] in let fold_def evk evi () = match evi.evar_body with | Evar_defined _ -> l := (evk, evi) :: !l | Evar_empty -> () in let fold_undef evk evi () = match evi.evar_body with | Evar_empty -> l := (evk, evi) :: !l | Evar_defined _ -> () in Evd.fold fold_def d (); Evd.fold fold_undef d (); !l let pr_evar_by_depth depth env sigma = match depth with | None -> (* Print all evars *) str"EVARS:" ++ brk(0,1) ++ pr_evar_list env sigma (to_list sigma) ++ fnl() | Some n -> (* Print closure of undefined evars *) str"UNDEFINED EVARS:"++ (if Int.equal n 0 then mt() else str" (+level "++int n++str" closure):")++ brk(0,1)++ pr_evar_list env sigma (evar_dependency_closure n sigma) ++ fnl() let pr_evar_by_filter filter env sigma = let open Evd in let elts = Evd.fold (fun evk evi accu -> (evk, evi) :: accu) sigma [] in let elts = List.rev elts in let is_def (_, evi) = match evi.evar_body with | Evar_defined _ -> true | Evar_empty -> false in let (defined, undefined) = List.partition is_def elts in let filter (evk, evi) = filter evk evi in let defined = List.filter filter defined in let undefined = List.filter filter undefined in let prdef = if List.is_empty defined then mt () else str "DEFINED EVARS:" ++ brk (0, 1) ++ pr_evar_list env sigma defined in let prundef = if List.is_empty undefined then mt () else str "UNDEFINED EVARS:" ++ brk (0, 1) ++ pr_evar_list env sigma undefined in prdef ++ prundef let pr_evar_map ?(with_univs=true) depth env sigma = pr_evar_map_gen with_univs (fun sigma -> pr_evar_by_depth depth env sigma) env sigma let pr_evar_map_filter ?(with_univs=true) filter env sigma = pr_evar_map_gen with_univs (fun sigma -> pr_evar_by_filter filter env sigma) env sigma let pr_metaset metas = str "[" ++ pr_sequence pr_meta (Evd.Metaset.elements metas) ++ str "]" let pr_var_decl env decl = let open NamedDecl in let sigma = Evd.from_env env in let pbody = match decl with | LocalAssum _ -> mt () | LocalDef (_,c,_) -> (* Force evaluation *) let c = EConstr.of_constr c in let pb = print_constr_env env sigma c in (str" := " ++ pb ++ cut () ) in let pt = print_constr_env env sigma (EConstr.of_constr (get_type decl)) in let ptyp = (str" : " ++ pt) in (Id.print (get_id decl) ++ hov 0 (pbody ++ ptyp)) let pr_rel_decl env decl = let open RelDecl in let sigma = Evd.from_env env in let pbody = match decl with | LocalAssum _ -> mt () | LocalDef (_,c,_) -> (* Force evaluation *) let c = EConstr.of_constr c in let pb = print_constr_env env sigma c in (str":=" ++ spc () ++ pb ++ spc ()) in let ptyp = print_constr_env env sigma (EConstr.of_constr (get_type decl)) in match get_name decl with | Anonymous -> hov 0 (str"<>" ++ spc () ++ pbody ++ str":" ++ spc () ++ ptyp) | Name id -> hov 0 (Id.print id ++ spc () ++ pbody ++ str":" ++ spc () ++ ptyp) let print_named_context env = hv 0 (fold_named_context (fun env d pps -> pps ++ ws 2 ++ pr_var_decl env d) env ~init:(mt ())) let print_rel_context env = hv 0 (fold_rel_context (fun env d pps -> pps ++ ws 2 ++ pr_rel_decl env d) env ~init:(mt ())) let print_env env = let sign_env = fold_named_context (fun env d pps -> let pidt = pr_var_decl env d in (pps ++ fnl () ++ pidt)) env ~init:(mt ()) in let db_env = fold_rel_context (fun env d pps -> let pnat = pr_rel_decl env d in (pps ++ fnl () ++ pnat)) env ~init:(mt ()) in (sign_env ++ db_env) (* [Rel (n+m);...;Rel(n+1)] *) let rel_vect n m = Array.init m (fun i -> mkRel(n+m-i)) let rel_list n m = let open EConstr in let rec reln l p = if p>m then l else reln (mkRel(n+p)::l) (p+1) in reln [] 1 let push_rel_assum (x,t) env = let open RelDecl in let open EConstr in push_rel (LocalAssum (x,t)) env let push_rels_assum assums = let open RelDecl in push_rel_context (List.map (fun (x,t) -> LocalAssum (x,t)) assums) let push_named_rec_types (lna,typarray,_) env = let open NamedDecl in let ctxt = Array.map2_i (fun i na t -> let id = map_annot (function | Name id -> id | Anonymous -> anomaly (Pp.str "Fix declarations must be named.")) na in LocalAssum (id, lift i t)) lna typarray in Array.fold_left (fun e assum -> push_named assum e) env ctxt let lookup_rel_id id sign = let open RelDecl in let rec lookrec n = function | [] -> raise Not_found | decl :: l -> if Names.Name.equal (Name id) (get_name decl) then (n, get_value decl, get_type decl) else lookrec (n+1) l in lookrec 1 sign (* Constructs either [forall x:t, c] or [let x:=b:t in c] *) let mkProd_or_LetIn = EConstr.mkProd_or_LetIn (* Constructs either [forall x:t, c] or [c] in which [x] is replaced by [b] *) let mkProd_wo_LetIn decl c = let open EConstr in let open RelDecl in match decl with | LocalAssum (na,t) -> mkProd (na, t, c) | LocalDef (_,b,_) -> Vars.subst1 b c let it_mkProd init = List.fold_left (fun c (n,t) -> EConstr.mkProd (n, t, c)) init let it_mkLambda init = List.fold_left (fun c (n,t) -> EConstr.mkLambda (n, t, c)) init let it_named_context_quantifier f ~init = List.fold_left (fun c d -> f d c) init let it_mkProd_or_LetIn init = it_named_context_quantifier mkProd_or_LetIn ~init let it_mkProd_wo_LetIn init = it_named_context_quantifier mkProd_wo_LetIn ~init let it_mkLambda_or_LetIn init = it_named_context_quantifier mkLambda_or_LetIn ~init let it_mkNamedProd_or_LetIn init = it_named_context_quantifier EConstr.mkNamedProd_or_LetIn ~init let it_mkNamedProd_wo_LetIn init = it_named_context_quantifier mkNamedProd_wo_LetIn ~init let it_mkNamedLambda_or_LetIn init = it_named_context_quantifier EConstr.mkNamedLambda_or_LetIn ~init let it_mkLambda_or_LetIn_from_no_LetIn c decls = let open RelDecl in let rec aux k decls c = match decls with | [] -> c | LocalDef (na,b,t) :: decls -> mkLetIn (na,b,t,aux (k-1) decls (liftn 1 k c)) | LocalAssum (na,t) :: decls -> mkLambda (na,t,aux (k-1) decls c) in aux (List.length decls) (List.rev decls) c (* *) (* strips head casts and flattens head applications *) let rec strip_head_cast sigma c = match EConstr.kind sigma c with | App (f,cl) -> let rec collapse_rec f cl2 = match EConstr.kind sigma f with | App (g,cl1) -> collapse_rec g (Array.append cl1 cl2) | Cast (c,_,_) -> collapse_rec c cl2 | _ -> if Int.equal (Array.length cl2) 0 then f else EConstr.mkApp (f,cl2) in collapse_rec f cl | Cast (c,_,_) -> strip_head_cast sigma c | _ -> c let rec drop_extra_implicit_args sigma c = match EConstr.kind sigma c with (* Removed trailing extra implicit arguments, what improves compatibility for constants with recently added maximal implicit arguments *) | App (f,args) when EConstr.isEvar sigma (Array.last args) -> let open EConstr in drop_extra_implicit_args sigma (mkApp (f,fst (Array.chop (Array.length args - 1) args))) | _ -> c (* Get the last arg of an application *) let last_arg sigma c = match EConstr.kind sigma c with | App (f,cl) -> Array.last cl | _ -> anomaly (Pp.str "last_arg.") (* Get the last arg of an application *) let decompose_app_vect sigma c = match EConstr.kind sigma c with | App (f,cl) -> (f, cl) | _ -> (c,[||]) let adjust_app_list_size f1 l1 f2 l2 = let open EConstr in let len1 = List.length l1 and len2 = List.length l2 in if Int.equal len1 len2 then (f1,l1,f2,l2) else if len1 < len2 then let extras,restl2 = List.chop (len2-len1) l2 in (f1, l1, applist (f2,extras), restl2) else let extras,restl1 = List.chop (len1-len2) l1 in (applist (f1,extras), restl1, f2, l2) let adjust_app_array_size f1 l1 f2 l2 = let open EConstr in let len1 = Array.length l1 and len2 = Array.length l2 in if Int.equal len1 len2 then (f1,l1,f2,l2) else if len1 < len2 then let extras,restl2 = Array.chop (len2-len1) l2 in (f1, l1, mkApp (f2,extras), restl2) else let extras,restl1 = Array.chop (len1-len2) l1 in (mkApp (f1,extras), restl1, f2, l2) (* [map_constr_with_binders_left_to_right g f n c] maps [f n] on the immediate subterms of [c]; it carries an extra data [n] (typically a lift index) which is processed by [g] (which typically add 1 to [n]) at each binder traversal; the subterms are processed from left to right according to the usual representation of the constructions (this may matter if [f] does a side-effect); it is not recursive; in fact, the usual representation of the constructions is at the time being almost those of the ML representation (except for (co-)fixpoint) *) let fold_rec_types g (lna,typarray,_) e = let open EConstr in let open Vars in let ctxt = Array.map2_i (fun i na t -> RelDecl.LocalAssum (na, lift i t)) lna typarray in Array.fold_left (fun e assum -> g assum e) e ctxt let map_left2 f a g b = let l = Array.length a in if Int.equal l 0 then [||], [||] else begin let r = Array.make l (f a.(0)) in let s = Array.make l (g b.(0)) in for i = 1 to l - 1 do r.(i) <- f a.(i); s.(i) <- g b.(i) done; r, s end let map_constr_with_binders_left_to_right env sigma g f l c = let open RelDecl in let open EConstr in match EConstr.kind sigma c with | (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ | Int _ | Float _) -> c | Cast (b,k,t) -> let b' = f l b in let t' = f l t in if b' == b && t' == t then c else mkCast (b',k,t') | Prod (na,t,b) -> let t' = f l t in let b' = f (g (LocalAssum (na,t)) l) b in if t' == t && b' == b then c else mkProd (na, t', b') | Lambda (na,t,b) -> let t' = f l t in let b' = f (g (LocalAssum (na,t)) l) b in if t' == t && b' == b then c else mkLambda (na, t', b') | LetIn (na,bo,t,b) -> let bo' = f l bo in let t' = f l t in let b' = f (g (LocalDef (na,bo,t)) l) b in if bo' == bo && t' == t && b' == b then c else mkLetIn (na, bo', t', b') | App (c,[||]) -> assert false | App (t,al) -> (*Special treatment to be able to recognize partially applied subterms*) let a = al.(Array.length al - 1) in let app = (mkApp (t, Array.sub al 0 (Array.length al - 1))) in let app' = f l app in let a' = f l a in if app' == app && a' == a then c else mkApp (app', [| a' |]) | Proj (p,b) -> let b' = f l b in if b' == b then c else mkProj (p, b') | Evar (e,al) -> let al' = List.map_left (f l) al in if List.for_all2 (==) al' al then c else mkEvar (e, al') | Case (ci,u,pms,p,iv,b,bl) -> let (ci, _, pms, p0, _, b, bl0) = annotate_case env sigma (ci, u, pms, p, iv, b, bl) in let f_ctx (nas, _ as r) (ctx, c) = let c' = f (List.fold_right g ctx l) c in if c' == c then r else (nas, c') in (* In v8 concrete syntax, predicate is after the term to match! *) let b' = f l b in let pms' = Array.map_left (f l) pms in let p' = f_ctx p p0 in let iv' = map_invert (f l) iv in let bl' = Array.map_left (fun (c, c0) -> f_ctx c c0) (Array.map2 (fun x y -> (x, y)) bl bl0) in if b' == b && pms' == pms && p' == p && iv' == iv && bl' == bl then c else mkCase (ci, u, pms', p', iv', b', bl') | Fix (ln,(lna,tl,bl as fx)) -> let l' = fold_rec_types g fx l in let (tl', bl') = map_left2 (f l) tl (f l') bl in if Array.for_all2 (==) tl tl' && Array.for_all2 (==) bl bl' then c else mkFix (ln,(lna,tl',bl')) | CoFix(ln,(lna,tl,bl as fx)) -> let l' = fold_rec_types g fx l in let (tl', bl') = map_left2 (f l) tl (f l') bl in if Array.for_all2 (==) tl tl' && Array.for_all2 (==) bl bl' then c else mkCoFix (ln,(lna,tl',bl')) | Array(u,t,def,ty) -> let t' = Array.map_left (f l) t in let def' = f l def in let ty' = f l ty in if def' == def && t' == t && ty' == ty then c else mkArray(u,t',def',ty') (* strong *) let map_constr_with_full_binders env sigma g f l cstr = let open EConstr in match EConstr.kind sigma cstr with | (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ | Int _ | Float _) -> cstr | Cast (c,k, t) -> let c' = f l c in let t' = f l t in if c==c' && t==t' then cstr else mkCast (c', k, t') | Prod (na,t,c) -> let t' = f l t in let c' = f (g (RelDecl.LocalAssum (na, t)) l) c in if t==t' && c==c' then cstr else mkProd (na, t', c') | Lambda (na,t,c) -> let t' = f l t in let c' = f (g (RelDecl.LocalAssum (na, t)) l) c in if t==t' && c==c' then cstr else mkLambda (na, t', c') | LetIn (na,b,t,c) -> let b' = f l b in let t' = f l t in let c' = f (g (RelDecl.LocalDef (na, b, t)) l) c in if b==b' && t==t' && c==c' then cstr else mkLetIn (na, b', t', c') | App (c,al) -> let c' = f l c in let al' = Array.map (f l) al in if c==c' && Array.for_all2 (==) al al' then cstr else mkApp (c', al') | Proj (p,c) -> let c' = f l c in if c' == c then cstr else mkProj (p, c') | Evar (e,al) -> let al' = List.map (f l) al in if List.for_all2 (==) al al' then cstr else mkEvar (e, al') | Case (ci, u, pms, p, iv, c, bl) -> let (ci, _, pms, p0, _, c, bl0) = annotate_case env sigma (ci, u, pms, p, iv, c, bl) in let f_ctx (nas, _ as r) (ctx, c) = let c' = f (List.fold_right g ctx l) c in if c' == c then r else (nas, c') in let pms' = Array.Smart.map (f l) pms in let p' = f_ctx p p0 in let iv' = map_invert (f l) iv in let c' = f l c in let bl' = Array.map2 f_ctx bl bl0 in if pms==pms' && p==p' && iv'==iv && c==c' && Array.for_all2 (==) bl bl' then cstr else mkCase (ci, u, pms', p', iv', c', bl') | Fix (ln,(lna,tl,bl as fx)) -> let tl' = Array.map (f l) tl in let l' = fold_rec_types g fx l in let bl' = Array.map (f l') bl in if Array.for_all2 (==) tl tl' && Array.for_all2 (==) bl bl' then cstr else mkFix (ln,(lna,tl',bl')) | CoFix(ln,(lna,tl,bl as fx)) -> let tl' = Array.map (f l) tl in let l' = fold_rec_types g fx l in let bl' = Array.map (f l') bl in if Array.for_all2 (==) tl tl' && Array.for_all2 (==) bl bl' then cstr else mkCoFix (ln,(lna,tl',bl')) | Array(u,t,def,ty) -> let t' = Array.Smart.map (f l) t in let def' = f l def in let ty' = f l ty in if def==def' && t == t' && ty==ty' then cstr else mkArray (u,t', def',ty') (* [fold_constr_with_binders g f n acc c] folds [f n] on the immediate subterms of [c] starting from [acc] and proceeding from left to right according to the usual representation of the constructions as [fold_constr] but it carries an extra data [n] (typically a lift index) which is processed by [g] (which typically add 1 to [n]) at each binder traversal; it is not recursive *) let fold_constr_with_full_binders env sigma g f n acc c = let open EConstr.Vars in let open Context.Rel.Declaration in match EConstr.kind sigma c with | Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ | Int _ | Float _ -> acc | Cast (c,_, t) -> f n (f n acc c) t | Prod (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c | Lambda (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c | LetIn (na,b,t,c) -> f (g (LocalDef (na,b,t)) n) (f n (f n acc b) t) c | App (c,l) -> Array.fold_left (f n) (f n acc c) l | Proj (_,c) -> f n acc c | Evar (_,l) -> List.fold_left (f n) acc l | Case (ci, u, pms, p, iv, c, bl) -> let (ci, _, pms, p, _, c, bl) = EConstr.annotate_case env sigma (ci, u, pms, p, iv, c, bl) in let f_ctx acc (ctx, c) = f (List.fold_right g ctx n) acc c in Array.fold_left f_ctx (f n (fold_invert (f n) (f_ctx (Array.fold_left (f n) acc pms) p) iv) c) bl | Fix (_,(lna,tl,bl)) -> let n' = CArray.fold_left2_i (fun i c n t -> g (LocalAssum (n,lift i t)) c) n lna tl in let fd = Array.map2 (fun t b -> (t,b)) tl bl in Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd | CoFix (_,(lna,tl,bl)) -> let n' = CArray.fold_left2_i (fun i c n t -> g (LocalAssum (n,lift i t)) c) n lna tl in let fd = Array.map2 (fun t b -> (t,b)) tl bl in Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd | Array(_u,t,def,ty) -> f n (f n (Array.fold_left (f n) acc t) def) ty let fold_constr_with_binders sigma g f n acc c = let open EConstr in let f l acc c = f l acc (of_constr c) in let c = Unsafe.to_constr (whd_evar sigma c) in Constr.fold_constr_with_binders g f n acc c (***************************) (* occurs check functions *) (***************************) exception Occur let occur_meta sigma c = let rec occrec c = match EConstr.kind sigma c with | Meta _ -> raise Occur | _ -> EConstr.iter sigma occrec c in try occrec c; false with Occur -> true let occur_existential sigma c = let rec occrec c = match EConstr.kind sigma c with | Evar _ -> raise Occur | _ -> EConstr.iter sigma occrec c in try occrec c; false with Occur -> true let occur_meta_or_existential sigma c = let rec occrec c = match EConstr.kind sigma c with | Evar _ -> raise Occur | Meta _ -> raise Occur | _ -> EConstr.iter sigma occrec c in try occrec c; false with Occur -> true let occur_metavariable sigma m c = let rec occrec c = match EConstr.kind sigma c with | Meta m' -> if Int.equal m m' then raise Occur | _ -> EConstr.iter sigma occrec c in try occrec c; false with Occur -> true let occur_evar sigma n c = let rec occur_rec c = match EConstr.kind sigma c with | Evar (sp,_) when Evar.equal sp n -> raise Occur | _ -> EConstr.iter sigma occur_rec c in try occur_rec c; false with Occur -> true let occur_in_global env id constr = let vars = vars_of_global env constr in Id.Set.mem id vars let occur_var env sigma id c = let rec occur_rec c = match EConstr.destRef sigma c with | gr, _ -> if occur_in_global env id gr then raise Occur | exception DestKO -> EConstr.iter sigma occur_rec c in try occur_rec c; false with Occur -> true let occur_vars env sigma ids c = let rec occur_rec c = match EConstr.destRef sigma c with | gr, _ -> let vars = vars_of_global env gr in if not (Id.Set.is_empty (Id.Set.inter ids vars)) then raise Occur | exception DestKO -> EConstr.iter sigma occur_rec c in try occur_rec c; false with Occur -> true exception OccurInGlobal of GlobRef.t let occur_var_indirectly env sigma id c = let var = GlobRef.VarRef id in let rec occur_rec c = match EConstr.destRef sigma c with | gr, _ -> if not (GlobRef.equal gr var) && occur_in_global env id gr then raise (OccurInGlobal gr) | exception DestKO -> EConstr.iter sigma occur_rec c in try occur_rec c; None with OccurInGlobal gr -> Some gr let occur_var_in_decl env sigma hyp decl = NamedDecl.exists (occur_var env sigma hyp) decl let occur_vars_in_decl env sigma hyps decl = NamedDecl.exists (occur_vars env sigma hyps) decl let local_occur_var sigma id c = let rec occur c = match EConstr.kind sigma c with | Var id' -> if Id.equal id id' then raise Occur | _ -> EConstr.iter sigma occur c in try occur c; false with Occur -> true let local_occur_var_in_decl sigma hyp decl = NamedDecl.exists (local_occur_var sigma hyp) decl (* returns the list of free debruijn indices in a term *) let free_rels sigma m = let rec frec depth acc c = match EConstr.kind sigma c with | Rel n -> if n >= depth then Int.Set.add (n-depth+1) acc else acc | _ -> fold_constr_with_binders sigma succ frec depth acc c in frec 1 Int.Set.empty m let free_rels_and_unqualified_refs sigma t = let rec aux k (gseen, vseen, ids as accu) t = match EConstr.kind sigma t with | Const _ | Ind _ | Construct _ | Var _ -> let g, _ = EConstr.destRef sigma t in if not (GlobRef.Set_env.mem g gseen) then begin try let gseen = GlobRef.Set_env.add g gseen in let short = Nametab.shortest_qualid_of_global Id.Set.empty g in let dir, id = Libnames.repr_qualid short in let ids = if DirPath.is_empty dir then Id.Set.add id ids else ids in (gseen, vseen, ids) with Not_found when !Flags.in_debugger || !Flags.in_toplevel -> accu end else accu | Rel p -> if p > k && not (Int.Set.mem (p - k) vseen) then let vseen = Int.Set.add (p - k) vseen in (gseen, vseen, ids) else accu | _ -> EConstr.fold_with_binders sigma succ aux k accu t in let accu = (GlobRef.Set_env.empty, Int.Set.empty, Id.Set.empty) in let (_, rels, ids) = aux 0 accu t in rels, ids (* collects all metavar occurrences, in left-to-right order, preserving * repetitions and all. *) let collect_metas sigma c = let rec collrec acc c = match EConstr.kind sigma c with | Meta mv -> List.add_set Int.equal mv acc | _ -> EConstr.fold sigma collrec acc c in List.rev (collrec [] c) (* collects all vars; warning: this is only visible vars, not dependencies in all section variables; for the latter, use global_vars_set *) let collect_vars sigma c = let rec aux vars c = match EConstr.kind sigma c with | Var id -> Id.Set.add id vars | _ -> EConstr.fold sigma aux vars c in aux Id.Set.empty c (* Tests whether [m] is a subterm of [t]: [m] is appropriately lifted through abstractions of [t] *) let dependent_main noevar sigma m t = let open EConstr in let eqc x y = eq_constr_nounivs sigma x y in let rec deprec m t = if eqc m t then raise Occur else match EConstr.kind sigma m, EConstr.kind sigma t with | App (fm,lm), App (ft,lt) when Array.length lm < Array.length lt -> deprec m (mkApp (ft,Array.sub lt 0 (Array.length lm))); Array.Fun1.iter deprec m (Array.sub lt (Array.length lm) ((Array.length lt) - (Array.length lm))) | _, Cast (c,_,_) when noevar && isMeta sigma c -> () | _, Evar _ when noevar -> () | _ -> EConstr.iter_with_binders sigma (fun c -> Vars.lift 1 c) deprec m t in try deprec m t; false with Occur -> true let dependent sigma c t = dependent_main false sigma c t let dependent_no_evar sigma c t = dependent_main true sigma c t let dependent_in_decl sigma a decl = let open NamedDecl in match decl with | LocalAssum (_,t) -> dependent sigma a t | LocalDef (_, body, t) -> dependent sigma a body || dependent sigma a t let count_occurrences sigma m t = let open EConstr in let n = ref 0 in let rec countrec m t = if EConstr.eq_constr sigma m t then incr n else match EConstr.kind sigma m, EConstr.kind sigma t with | App (fm,lm), App (ft,lt) when Array.length lm < Array.length lt -> countrec m (mkApp (ft,Array.sub lt 0 (Array.length lm))); Array.iter (countrec m) (Array.sub lt (Array.length lm) ((Array.length lt) - (Array.length lm))) | _, Cast (c,_,_) when isMeta sigma c -> () | _, Evar _ -> () | _ -> EConstr.iter_with_binders sigma (Vars.lift 1) countrec m t in countrec m t; !n let pop t = EConstr.Vars.lift (-1) t (***************************) (* bindings functions *) (***************************) type meta_type_map = (metavariable * types) list type meta_value_map = (metavariable * constr) list let isMetaOf sigma mv c = match EConstr.kind sigma c with Meta mv' -> Int.equal mv mv' | _ -> false let rec subst_meta bl c = match kind c with | Meta i -> (try Int.List.assoc i bl with Not_found -> c) | _ -> Constr.map (subst_meta bl) c let rec strip_outer_cast sigma c = match EConstr.kind sigma c with | Cast (c,_,_) -> strip_outer_cast sigma c | _ -> c (* First utilities for avoiding telescope computation for subst_term *) let prefix_application sigma eq_fun k l1 t = let open EConstr in if 0 < l1 then match EConstr.kind sigma t with | App (f2,cl2) -> let l2 = Array.length cl2 in if l1 <= l2 && eq_fun sigma k (mkApp (f2, Array.sub cl2 0 l1)) then Some (Array.sub cl2 l1 (l2 - l1)) else None | _ -> None else None let eq_upto_lift cache c sigma k t = let c = try Int.Map.find k !cache with Not_found -> let c = EConstr.Vars.lift k c in let () = cache := Int.Map.add k c !cache in c in EConstr.eq_constr sigma c t (* Recognizing occurrences of a given subterm in a term : [replace_term c1 c2 t] substitutes [c2] for all occurrences of term [c1] in a term [t]; works if [c1] and [c2] have rels *) let replace_term_gen sigma eq_fun ar by_c in_t = let rec substrec k t = match prefix_application sigma eq_fun k ar t with | Some args -> EConstr.mkApp (EConstr.Vars.lift k by_c, args) | None -> (if eq_fun sigma k t then (EConstr.Vars.lift k by_c) else EConstr.map_with_binders sigma succ substrec k t) in substrec 0 in_t let replace_term sigma c byc t = let cache = ref Int.Map.empty in let ar = Array.length (snd (decompose_app_vect sigma c)) in let eq sigma k t = eq_upto_lift cache c sigma k t in replace_term_gen sigma eq ar byc t let subst_term sigma c t = replace_term sigma c (EConstr.mkRel 1) t let vars_of_env env = let s = Environ.ids_of_named_context_val (Environ.named_context_val env) in if List.is_empty (Environ.rel_context env) then s else Context.Rel.fold_outside (fun decl s -> match RelDecl.get_name decl with Name id -> Id.Set.add id s | _ -> s) (rel_context env) ~init:s let add_vname vars = function Name id -> Id.Set.add id vars | _ -> vars (*************************) (* Names environments *) (*************************) type names_context = Name.t list let add_name n nl = n::nl let lookup_name_of_rel p names = try List.nth names (p-1) with Invalid_argument _ | Failure _ -> raise Not_found let lookup_rel_of_name id names = let rec lookrec n = function | Anonymous :: l -> lookrec (n+1) l | (Name id') :: l -> if Id.equal id' id then n else lookrec (n+1) l | [] -> raise Not_found in lookrec 1 names let empty_names_context = [] let ids_of_rel_context sign = Context.Rel.fold_outside (fun decl l -> match RelDecl.get_name decl with Name id -> id::l | Anonymous -> l) sign ~init:[] let ids_of_named_context sign = Context.Named.fold_outside (fun decl idl -> NamedDecl.get_id decl :: idl) sign ~init:[] let ids_of_context env = (ids_of_rel_context (rel_context env)) @ (ids_of_named_context (named_context env)) let names_of_rel_context env = List.map RelDecl.get_name (rel_context env) let is_section_variable env id = try let _ = Environ.lookup_named id env in true with Not_found -> false let global_of_constr sigma c = let open GlobRef in match EConstr.kind sigma c with | Const (c, u) -> ConstRef c, u | Ind (i, u) -> IndRef i, u | Construct (c, u) -> ConstructRef c, u | Var id -> VarRef id, EConstr.EInstance.empty | _ -> raise Not_found let is_global = EConstr.isRefX let isGlobalRef = EConstr.isRef let is_template_polymorphic_ind env sigma f = match EConstr.kind sigma f with | Ind (ind, u) -> if not (EConstr.EInstance.is_empty u) then false else Environ.template_polymorphic_ind ind env | _ -> false let base_sort_cmp pb s0 s1 = match (s0,s1) with | SProp, SProp | Prop, Prop | Set, Set | Type _, Type _ -> true | SProp, _ | _, SProp -> false | Prop, Set | Prop, Type _ | Set, Type _ -> pb == Reduction.CUMUL | Set, Prop | Type _, Prop | Type _, Set -> false let rec is_Prop sigma c = match EConstr.kind sigma c with | Sort u -> begin match EConstr.ESorts.kind sigma u with | Prop -> true | _ -> false end | Cast (c,_,_) -> is_Prop sigma c | _ -> false let rec is_Set sigma c = match EConstr.kind sigma c with | Sort u -> begin match EConstr.ESorts.kind sigma u with | Set -> true | _ -> false end | Cast (c,_,_) -> is_Set sigma c | _ -> false let rec is_Type sigma c = match EConstr.kind sigma c with | Sort u -> begin match EConstr.ESorts.kind sigma u with | Type _ -> true | _ -> false end | Cast (c,_,_) -> is_Type sigma c | _ -> false (* eq_constr extended with universe erasure *) let compare_constr_univ sigma f cv_pb t1 t2 = let open EConstr in match EConstr.kind sigma t1, EConstr.kind sigma t2 with Sort s1, Sort s2 -> base_sort_cmp cv_pb (ESorts.kind sigma s1) (ESorts.kind sigma s2) | Prod (_,t1,c1), Prod (_,t2,c2) -> f Reduction.CONV t1 t2 && f cv_pb c1 c2 | Const (c, u), Const (c', u') -> Constant.CanOrd.equal c c' | Ind (i, _), Ind (i', _) -> Ind.CanOrd.equal i i' | Construct (i, _), Construct (i', _) -> Construct.CanOrd.equal i i' | _ -> EConstr.compare_constr sigma (fun t1 t2 -> f Reduction.CONV t1 t2) t1 t2 let constr_cmp sigma cv_pb t1 t2 = let rec compare cv_pb t1 t2 = compare_constr_univ sigma compare cv_pb t1 t2 in compare cv_pb t1 t2 let eq_constr sigma t1 t2 = constr_cmp sigma Reduction.CONV t1 t2 (* App(c,[t1,...tn]) -> ([c,t1,...,tn-1],tn) App(c,[||]) -> ([],c) *) let split_app sigma c = match EConstr.kind sigma c with App(c,l) -> let len = Array.length l in if Int.equal len 0 then ([],c) else let last = Array.get l (len-1) in let prev = Array.sub l 0 (len-1) in c::(Array.to_list prev), last | _ -> assert false type subst = (EConstr.rel_context * EConstr.constr) Evar.Map.t exception CannotFilter let filtering sigma env cv_pb c1 c2 = let open EConstr in let open Vars in let evm = ref Evar.Map.empty in let define cv_pb e1 ev c1 = try let (e2,c2) = Evar.Map.find ev !evm in let shift = List.length e1 - List.length e2 in if constr_cmp sigma cv_pb c1 (lift shift c2) then () else raise CannotFilter with Not_found -> evm := Evar.Map.add ev (e1,c1) !evm in let rec aux env cv_pb c1 c2 = match EConstr.kind sigma c1, EConstr.kind sigma c2 with | App _, App _ -> let ((p1,l1),(p2,l2)) = (split_app sigma c1),(split_app sigma c2) in let () = aux env cv_pb l1 l2 in begin match p1, p2 with | [], [] -> () | (h1 :: p1), (h2 :: p2) -> aux env cv_pb (applist (h1, p1)) (applist (h2, p2)) | _ -> assert false end | Prod (n,t1,c1), Prod (_,t2,c2) -> aux env cv_pb t1 t2; aux (RelDecl.LocalAssum (n,t1) :: env) cv_pb c1 c2 | _, Evar (ev,_) -> define cv_pb env ev c1 | Evar (ev,_), _ -> define cv_pb env ev c2 | _ -> if compare_constr_univ sigma (fun pb c1 c2 -> aux env pb c1 c2; true) cv_pb c1 c2 then () else raise CannotFilter (* TODO: le reste des binders *) in aux env cv_pb c1 c2; !evm let decompose_prod_letin sigma c = let rec prodec_rec i l c = match EConstr.kind sigma c with | Prod (n,t,c) -> prodec_rec (succ i) (RelDecl.LocalAssum (n,t)::l) c | LetIn (n,d,t,c) -> prodec_rec (succ i) (RelDecl.LocalDef (n,d,t)::l) c | Cast (c,_,_) -> prodec_rec i l c | _ -> i,l,c in prodec_rec 0 [] c (* (nb_lam [na1:T1]...[nan:Tan]c) where c is not an abstraction * gives n (casts are ignored) *) let nb_lam sigma c = let rec nbrec n c = match EConstr.kind sigma c with | Lambda (_,_,c) -> nbrec (n+1) c | Cast (c,_,_) -> nbrec n c | _ -> n in nbrec 0 c (* similar to nb_lam, but gives the number of products instead *) let nb_prod sigma c = let rec nbrec n c = match EConstr.kind sigma c with | Prod (_,_,c) -> nbrec (n+1) c | Cast (c,_,_) -> nbrec n c | _ -> n in nbrec 0 c let nb_prod_modulo_zeta sigma x = let rec count n c = match EConstr.kind sigma c with Prod(_,_,t) -> count (n+1) t | LetIn(_,a,_,t) -> count n (EConstr.Vars.subst1 a t) | Cast(c,_,_) -> count n c | _ -> n in count 0 x let align_prod_letin sigma c a = let (lc,_,_) = decompose_prod_letin sigma c in let (la,l,a) = decompose_prod_letin sigma a in if not (la >= lc) then invalid_arg "align_prod_letin"; let (l1,l2) = Util.List.chop lc l in l2,it_mkProd_or_LetIn a l1 (* We reduce a series of head eta-redex or nothing at all *) (* [x1:c1;...;xn:cn]@(f;a1...an;x1;...;xn) --> @(f;a1...an) *) (* Remplace 2 earlier buggish versions *) let rec eta_reduce_head sigma c = let open EConstr in let open Vars in match EConstr.kind sigma c with | Lambda (_,c1,c') -> (match EConstr.kind sigma (eta_reduce_head sigma c') with | App (f,cl) -> let lastn = (Array.length cl) - 1 in if lastn < 0 then anomaly (Pp.str "application without arguments.") else (match EConstr.kind sigma cl.(lastn) with | Rel 1 -> let c' = if Int.equal lastn 0 then f else mkApp (f, Array.sub cl 0 lastn) in if noccurn sigma 1 c' then lift (-1) c' else c | _ -> c) | _ -> c) | _ -> c (* iterator on rel context *) let process_rel_context f env = let sign = named_context_val env in let rels = EConstr.rel_context env in let env0 = reset_with_named_context sign env in Context.Rel.fold_outside f rels ~init:env0 let assums_of_rel_context sign = Context.Rel.fold_outside (fun decl l -> match decl with | RelDecl.LocalDef _ -> l | RelDecl.LocalAssum (na,t) -> (na, t)::l) sign ~init:[] let map_rel_context_in_env f env sign = let rec aux env acc = function | d::sign -> aux (push_rel d env) (RelDecl.map_constr (f env) d :: acc) sign | [] -> acc in aux env [] (List.rev sign) let map_rel_context_with_binders = Context.Rel.map_with_binders let substl_rel_context = Vars.substl_rel_context let lift_rel_context = Vars.lift_rel_context let smash_rel_context = Vars.smash_rel_context let fold_named_context_both_sides f l ~init = List.fold_right_and_left f l init let mem_named_context_val id ctxt = try ignore(Environ.lookup_named_ctxt id ctxt); true with Not_found -> false let map_rel_decl f = function | RelDecl.LocalAssum (id, t) -> RelDecl.LocalAssum (id, f t) | RelDecl.LocalDef (id, b, t) -> RelDecl.LocalDef (id, f b, f t) let map_named_decl f = function | NamedDecl.LocalAssum (id, t) -> NamedDecl.LocalAssum (id, f t) | NamedDecl.LocalDef (id, b, t) -> NamedDecl.LocalDef (id, f b, f t) let compact_named_context sign = let compact l decl = match decl, l with | NamedDecl.LocalAssum (i,t), [] -> [CompactedDecl.LocalAssum ([i],t)] | NamedDecl.LocalDef (i,c,t), [] -> [CompactedDecl.LocalDef ([i],c,t)] | NamedDecl.LocalAssum (i1,t1), CompactedDecl.LocalAssum (li,t2) :: q -> if Constr.equal t1 t2 then CompactedDecl.LocalAssum (i1::li, t2) :: q else CompactedDecl.LocalAssum ([i1],t1) :: CompactedDecl.LocalAssum (li,t2) :: q | NamedDecl.LocalDef (i1,c1,t1), CompactedDecl.LocalDef (li,c2,t2) :: q -> if Constr.equal c1 c2 && Constr.equal t1 t2 then CompactedDecl.LocalDef (i1::li, c2, t2) :: q else CompactedDecl.LocalDef ([i1],c1,t1) :: CompactedDecl.LocalDef (li,c2,t2) :: q | NamedDecl.LocalAssum (i,t), q -> CompactedDecl.LocalAssum ([i],t) :: q | NamedDecl.LocalDef (i,c,t), q -> CompactedDecl.LocalDef ([i],c,t) :: q in sign |> Context.Named.fold_inside compact ~init:[] |> List.rev let clear_named_body id env = let open NamedDecl in let aux _ = function | LocalDef (id',c,t) when Id.equal id id'.binder_name -> push_named (LocalAssum (id',t)) | d -> push_named d in fold_named_context aux env ~init:(reset_context env) let global_vars_set env sigma constr = let rec filtrec acc c = match EConstr.destRef sigma c with | gr, _ -> Id.Set.union (vars_of_global env gr) acc | exception DestKO -> EConstr.fold sigma filtrec acc c in filtrec Id.Set.empty constr let global_vars env sigma ids = Id.Set.elements (global_vars_set env sigma ids) let global_vars_set_of_decl env sigma = function | NamedDecl.LocalAssum (_,t) -> global_vars_set env sigma t | NamedDecl.LocalDef (_,c,t) -> Id.Set.union (global_vars_set env sigma t) (global_vars_set env sigma c) let dependency_closure env sigma sign hyps = if Id.Set.is_empty hyps then [] else let (_,lh) = Context.Named.fold_inside (fun (hs,hl) d -> let x = NamedDecl.get_id d in if Id.Set.mem x hs then (Id.Set.union (global_vars_set_of_decl env sigma d) (Id.Set.remove x hs), x::hl) else (hs,hl)) ~init:(hyps,[]) sign in List.rev lh let global_app_of_constr sigma c = let open GlobRef in match EConstr.kind sigma c with | Const (c, u) -> (ConstRef c, u), None | Ind (i, u) -> (IndRef i, u), None | Construct (c, u) -> (ConstructRef c, u), None | Var id -> (VarRef id, EConstr.EInstance.empty), None | Proj (p, c) -> (ConstRef (Projection.constant p), EConstr.EInstance.empty), Some c | _ -> raise Not_found let prod_applist sigma c l = let open EConstr in let rec app subst c l = match EConstr.kind sigma c, l with | Prod(_,_,c), arg::l -> app (arg::subst) c l | _, [] -> Vars.substl subst c | _ -> anomaly (Pp.str "Not enough prod's.") in app [] c l let prod_applist_assum sigma n c l = let open EConstr in let rec app n subst c l = if Int.equal n 0 then if l == [] then Vars.substl subst c else anomaly (Pp.str "Not enough arguments.") else match EConstr.kind sigma c, l with | Prod(_,_,c), arg::l -> app (n-1) (arg::subst) c l | LetIn(_,b,_,c), _ -> app (n-1) (Vars.substl subst b::subst) c l | _ -> anomaly (Pp.str "Not enough prod/let's.") in app n [] c l (* Cut a context ctx in 2 parts (ctx1,ctx2) with ctx1 containing k non let-in variables skips let-in's; let-in's in the middle are put in ctx2 *) let context_chop k ctx = let rec chop_aux acc = function | (0, l2) -> (List.rev acc, l2) | (n, (RelDecl.LocalDef _ as h)::t) -> chop_aux (h::acc) (n, t) | (n, (h::t)) -> chop_aux (h::acc) (pred n, t) | (_, []) -> anomaly (Pp.str "context_chop.") in chop_aux [] (k,ctx) (* Do not skip let-in's *) let env_rel_context_chop k env = let open EConstr in let rels = rel_context env in let ctx1,ctx2 = List.chop k rels in push_rel_context ctx2 (reset_with_named_context (named_context_val env) env), ctx1 end include Internal coq-8.15.0/engine/termops.mli000066400000000000000000000345221417001151100160250ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* env -> env val push_rels_assum : (Name.t Context.binder_annot * Constr.types) list -> env -> env val push_named_rec_types : Name.t Context.binder_annot array * Constr.types array * 'a -> env -> env val lookup_rel_id : Id.t -> ('c, 't) Context.Rel.pt -> int * 'c option * 't (** Associates the contents of an identifier in a [rel_context]. Raise [Not_found] if there is no such identifier. *) (** Functions that build argument lists matching a block of binders or a context. [rel_vect n m] builds [|Rel (n+m);...;Rel(n+1)|] *) val rel_vect : int -> int -> Constr.constr array val rel_list : int -> int -> constr list (** iterators/destructors on terms *) val mkProd_or_LetIn : rel_declaration -> types -> types val mkProd_wo_LetIn : rel_declaration -> types -> types val it_mkProd : types -> (Name.t Context.binder_annot * types) list -> types val it_mkLambda : constr -> (Name.t Context.binder_annot * types) list -> constr val it_mkProd_or_LetIn : types -> rel_context -> types val it_mkProd_wo_LetIn : types -> rel_context -> types val it_mkLambda_or_LetIn : Constr.constr -> Constr.rel_context -> Constr.constr val it_mkNamedProd_or_LetIn : types -> named_context -> types val it_mkNamedProd_wo_LetIn : Constr.types -> Constr.named_context -> Constr.types val it_mkNamedLambda_or_LetIn : constr -> named_context -> constr (* Ad hoc version reinserting letin, assuming the body is defined in the context where the letins are expanded *) val it_mkLambda_or_LetIn_from_no_LetIn : Constr.constr -> Constr.rel_context -> Constr.constr (** {6 Generic iterators on constr} *) val map_constr_with_binders_left_to_right : Environ.env -> Evd.evar_map -> (rel_declaration -> 'a -> 'a) -> ('a -> constr -> constr) -> 'a -> constr -> constr val map_constr_with_full_binders : Environ.env -> Evd.evar_map -> (rel_declaration -> 'a -> 'a) -> ('a -> constr -> constr) -> 'a -> constr -> constr (** [fold_constr_with_binders g f n acc c] folds [f n] on the immediate subterms of [c] starting from [acc] and proceeding from left to right according to the usual representation of the constructions as [fold_constr] but it carries an extra data [n] (typically a lift index) which is processed by [g] (which typically add 1 to [n]) at each binder traversal; it is not recursive *) val fold_constr_with_binders : Evd.evar_map -> ('a -> 'a) -> ('a -> 'b -> constr -> 'b) -> 'a -> 'b -> constr -> 'b val fold_constr_with_full_binders : Environ.env -> Evd.evar_map -> (rel_declaration -> 'a -> 'a) -> ('a -> 'b -> constr -> 'b) -> 'a -> 'b -> constr -> 'b (**********************************************************************) val strip_head_cast : Evd.evar_map -> constr -> constr val drop_extra_implicit_args : Evd.evar_map -> constr -> constr (** occur checks *) exception Occur val occur_meta : Evd.evar_map -> constr -> bool val occur_existential : Evd.evar_map -> constr -> bool val occur_meta_or_existential : Evd.evar_map -> constr -> bool val occur_metavariable : Evd.evar_map -> metavariable -> constr -> bool val occur_evar : Evd.evar_map -> Evar.t -> constr -> bool val occur_var : env -> Evd.evar_map -> Id.t -> constr -> bool val occur_var_indirectly : env -> Evd.evar_map -> Id.t -> constr -> GlobRef.t option val occur_var_in_decl : env -> Evd.evar_map -> Id.t -> named_declaration -> bool val occur_vars : env -> Evd.evar_map -> Id.Set.t -> constr -> bool val occur_vars_in_decl : env -> Evd.evar_map -> Id.Set.t -> named_declaration -> bool (** As {!occur_var} but assume the identifier not to be a section variable *) val local_occur_var : Evd.evar_map -> Id.t -> constr -> bool val local_occur_var_in_decl : Evd.evar_map -> Id.t -> named_declaration -> bool val free_rels : Evd.evar_map -> constr -> Int.Set.t (* Return the list of unbound rels and unqualified reference (same strategy as in Namegen) *) val free_rels_and_unqualified_refs : Evd.evar_map -> constr -> Int.Set.t * Id.Set.t (** [dependent m t] tests whether [m] is a subterm of [t] *) val dependent : Evd.evar_map -> constr -> constr -> bool val dependent_no_evar : Evd.evar_map -> constr -> constr -> bool val dependent_in_decl : Evd.evar_map -> constr -> named_declaration -> bool val count_occurrences : Evd.evar_map -> constr -> constr -> int val collect_metas : Evd.evar_map -> constr -> int list val collect_vars : Evd.evar_map -> constr -> Id.Set.t (** for visible vars only *) (* Substitution of metavariables *) type meta_value_map = (metavariable * Constr.constr) list val subst_meta : meta_value_map -> Constr.constr -> Constr.constr val isMetaOf : Evd.evar_map -> metavariable -> constr -> bool (** Type assignment for metavariables *) type meta_type_map = (metavariable * Constr.types) list (** [pop c] lifts by -1 the positive indexes in [c] *) val pop : constr -> constr (** {6 ... } *) (** Substitution of an arbitrary large term. Uses equality modulo reduction of let *) (** [replace_term_gen eq arity e c] replaces matching subterms according to [eq] by [e] in [c]. If [arity] is non-zero applications of larger length are handled atomically. *) val replace_term_gen : Evd.evar_map -> (Evd.evar_map -> int -> constr -> bool) -> int -> constr -> constr -> constr (** [subst_term d c] replaces [d] by [Rel 1] in [c] *) val subst_term : Evd.evar_map -> constr -> constr -> constr (** [replace_term d e c] replaces [d] by [e] in [c] *) val replace_term : Evd.evar_map -> constr -> constr -> constr -> constr (** Alternative term equalities *) val base_sort_cmp : Reduction.conv_pb -> Sorts.t -> Sorts.t -> bool val compare_constr_univ : Evd.evar_map -> (Reduction.conv_pb -> constr -> constr -> bool) -> Reduction.conv_pb -> constr -> constr -> bool val constr_cmp : Evd.evar_map -> Reduction.conv_pb -> constr -> constr -> bool val eq_constr : Evd.evar_map -> constr -> constr -> bool (* FIXME rename: erases universes*) val eta_reduce_head : Evd.evar_map -> constr -> constr (** [prod_applist] [forall (x1:B1;...;xn:Bn), B] [a1...an] @return [B[a1...an]] *) val prod_applist : Evd.evar_map -> constr -> constr list -> constr (** In [prod_applist_assum n c args], [c] is supposed to have the form [∀Γ.c] with [Γ] of length [m] and possibly with let-ins; it returns [c] with the assumptions of [Γ] instantiated by [args] and the local definitions of [Γ] expanded. Note that [n] counts both let-ins and prods, while the length of [args] only counts prods. In other words, varying [n] changes how many trailing let-ins are expanded. *) val prod_applist_assum : Evd.evar_map -> int -> constr -> constr list -> constr (** Remove recursively the casts around a term i.e. [strip_outer_cast (Cast (Cast ... (Cast c, t) ... ))] is [c]. *) val strip_outer_cast : Evd.evar_map -> constr -> constr exception CannotFilter (** Lightweight first-order filtering procedure. Unification variables ar represented by (untyped) Evars. [filtering c1 c2] returns the substitution n'th evar -> (context,term), or raises [CannotFilter]. Warning: Outer-kernel sort subtyping are taken into account: c1 has to be smaller than c2 wrt. sorts. *) type subst = (rel_context * constr) Evar.Map.t val filtering : Evd.evar_map -> rel_context -> Reduction.conv_pb -> constr -> constr -> subst val decompose_prod_letin : Evd.evar_map -> constr -> int * rel_context * constr val align_prod_letin : Evd.evar_map -> constr -> constr -> rel_context * constr (** [nb_lam] {% $ %}[x_1:T_1]...[x_n:T_n]c{% $ %} where {% $ %}c{% $ %} is not an abstraction gives {% $ %}n{% $ %} (casts are ignored) *) val nb_lam : Evd.evar_map -> constr -> int (** Similar to [nb_lam], but gives the number of products instead *) val nb_prod : Evd.evar_map -> constr -> int (** Similar to [nb_prod], but zeta-contracts let-in on the way *) val nb_prod_modulo_zeta : Evd.evar_map -> constr -> int (** Get the last arg of a constr intended to be an application *) val last_arg : Evd.evar_map -> constr -> constr (** Force the decomposition of a term as an applicative one *) val decompose_app_vect : Evd.evar_map -> constr -> constr * constr array val adjust_app_list_size : constr -> constr list -> constr -> constr list -> (constr * constr list * constr * constr list) val adjust_app_array_size : constr -> constr array -> constr -> constr array -> (constr * constr array * constr * constr array) (** name contexts *) type names_context = Name.t list val add_name : Name.t -> names_context -> names_context val lookup_name_of_rel : int -> names_context -> Name.t val lookup_rel_of_name : Id.t -> names_context -> int val empty_names_context : names_context val ids_of_rel_context : ('c, 't) Context.Rel.pt -> Id.t list val ids_of_named_context : ('c, 't) Context.Named.pt -> Id.t list val ids_of_context : env -> Id.t list val names_of_rel_context : env -> names_context (* [context_chop n Γ] returns (Γ₁,Γ₂) such that [Γ]=[Γ₂Γ₁], [Γ₁] has [n] hypotheses, excluding local definitions, and [Γ₁], if not empty, starts with an hypothesis (i.e. [Γ₁] has the form empty or [x:A;Γ₁'] *) val context_chop : int -> Constr.rel_context -> Constr.rel_context * Constr.rel_context (* [env_rel_context_chop n env] extracts out the [n] top declarations of the rel_context part of [env], counting both local definitions and hypotheses *) val env_rel_context_chop : int -> env -> env * rel_context (** Set of local names *) val vars_of_env: env -> Id.Set.t val add_vname : Id.Set.t -> Name.t -> Id.Set.t (** other signature iterators *) val process_rel_context : (rel_declaration -> env -> env) -> env -> env val assums_of_rel_context : ('c, 't) Context.Rel.pt -> (Name.t Context.binder_annot * 't) list val lift_rel_context : int -> Constr.rel_context -> Constr.rel_context [@@ocaml.deprecated "Use synonymous [Vars.lift_rel_context]."] val substl_rel_context : Constr.constr list -> Constr.rel_context -> Constr.rel_context [@@ocaml.deprecated "Use synonymous [Vars.substl_rel_context]."] val smash_rel_context : Constr.rel_context -> Constr.rel_context [@@ocaml.deprecated "Use synonymous [Vars.smash_rel_context]."] val map_rel_context_with_binders : (int -> 'c -> 'c) -> ('c, 'c) Context.Rel.pt -> ('c, 'c) Context.Rel.pt [@@ocaml.deprecated "Use synonymous [Context.Rel.map_with_binders]."] val map_rel_context_in_env : (env -> Constr.constr -> Constr.constr) -> env -> Constr.rel_context -> Constr.rel_context val fold_named_context_both_sides : ('a -> Constr.named_declaration -> Constr.named_declaration list -> 'a) -> Constr.named_context -> init:'a -> 'a val mem_named_context_val : Id.t -> named_context_val -> bool val compact_named_context : Constr.named_context -> Constr.compacted_context val map_rel_decl : ('a -> 'b) -> ('a, 'a) Context.Rel.Declaration.pt -> ('b, 'b) Context.Rel.Declaration.pt val map_named_decl : ('a -> 'b) -> ('a, 'a) Context.Named.Declaration.pt -> ('b, 'b) Context.Named.Declaration.pt val clear_named_body : Id.t -> env -> env val global_vars : env -> Evd.evar_map -> constr -> Id.t list val global_vars_set : env -> Evd.evar_map -> constr -> Id.Set.t val global_vars_set_of_decl : env -> Evd.evar_map -> named_declaration -> Id.Set.t val global_app_of_constr : Evd.evar_map -> constr -> (GlobRef.t * EInstance.t) * constr option (** Gives an ordered list of hypotheses, closed by dependencies, containing a given set *) val dependency_closure : env -> Evd.evar_map -> named_context -> Id.Set.t -> Id.t list (** Test if an identifier is the basename of a global reference *) val is_section_variable : env -> Id.t -> bool val global_of_constr : Evd.evar_map -> constr -> GlobRef.t * EInstance.t [@@ocaml.deprecated "Use [EConstr.destRef] instead (throws DestKO instead of Not_found)."] val is_global : Evd.evar_map -> GlobRef.t -> constr -> bool [@@ocaml.deprecated "Use [EConstr.isRefX] instead."] val isGlobalRef : Evd.evar_map -> constr -> bool [@@ocaml.deprecated "Use [EConstr.isRef] instead."] val is_template_polymorphic_ind : env -> Evd.evar_map -> constr -> bool val is_Prop : Evd.evar_map -> constr -> bool val is_Set : Evd.evar_map -> constr -> bool val is_Type : Evd.evar_map -> constr -> bool val reference_of_level : Evd.evar_map -> Univ.Level.t -> Libnames.qualid option (** {5 Debug pretty-printers} *) open Evd val pr_existential_key : env -> evar_map -> Evar.t -> Pp.t val evar_suggested_name : env -> evar_map -> Evar.t -> Id.t val pr_evar_info : env -> evar_map -> evar_info -> Pp.t val pr_evar_constraints : evar_map -> evar_constraint list -> Pp.t val pr_evar_map : ?with_univs:bool -> int option -> env -> evar_map -> Pp.t val pr_evar_map_filter : ?with_univs:bool -> (Evar.t -> evar_info -> bool) -> env -> evar_map -> Pp.t val pr_metaset : Metaset.t -> Pp.t val pr_evar_universe_context : UState.t -> Pp.t val pr_evd_level : evar_map -> Univ.Level.t -> Pp.t module Internal : sig (** NOTE: to print terms you always want to use functions in Printer, not these ones which are for very special cases. *) (** debug printers: print raw form for terms, both with evar-substitution and without. *) val debug_print_constr : constr -> Pp.t val debug_print_constr_env : env -> evar_map -> constr -> Pp.t (** Pretty-printer hook: [print_constr_env env sigma c] will pretty print c if the pretty printing layer has been linked into the Coq binary. *) val print_constr_env : env -> Evd.evar_map -> constr -> Pp.t (** [set_print_constr f] sets f to be the pretty printer *) val set_print_constr : (env -> Evd.evar_map -> constr -> Pp.t) -> unit (** Printers for contexts *) val print_named_context : env -> Pp.t val pr_rel_decl : env -> Constr.rel_declaration -> Pp.t val print_rel_context : env -> Pp.t val print_env : env -> Pp.t end coq-8.15.0/engine/uState.ml000066400000000000000000000666761417001151100154470ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* match l, r with | Some _, _ -> l | _, _ -> r) s t let union uctx uctx' = if uctx == uctx' then uctx else if is_empty uctx' then uctx else let local = ContextSet.union uctx.local uctx'.local in let seff = Level.Set.union uctx.seff_univs uctx'.seff_univs in let names = uname_union (fst uctx.names) (fst uctx'.names) in let names_rev = Level.Map.lunion (snd uctx.names) (snd uctx'.names) in let newus = Level.Set.diff (ContextSet.levels uctx'.local) (ContextSet.levels uctx.local) in let newus = Level.Set.diff newus (Level.Map.domain uctx.univ_variables) in let weak = UPairSet.union uctx.weak_constraints uctx'.weak_constraints in let declarenew g = Level.Set.fold (fun u g -> UGraph.add_universe u ~lbound:uctx.universes_lbound ~strict:false g) newus g in { names = (names, names_rev); local = local; seff_univs = seff; univ_variables = Level.Map.subst_union uctx.univ_variables uctx'.univ_variables; univ_algebraic = Level.Set.union uctx.univ_algebraic uctx'.univ_algebraic; initial_universes = declarenew uctx.initial_universes; universes = (if local == uctx.local then uctx.universes else let cstrsr = ContextSet.constraints uctx'.local in UGraph.merge_constraints cstrsr (declarenew uctx.universes)); universes_lbound = uctx.universes_lbound; weak_constraints = weak} let context_set uctx = uctx.local let constraints uctx = snd uctx.local let compute_instance_binders rbinders inst = let map lvl = try Name (Option.get (Level.Map.find lvl rbinders).uname) with Option.IsNone | Not_found -> Anonymous in Array.map map (Instance.to_array inst) let context uctx = let (_, rbinders) = uctx.names in ContextSet.to_context (compute_instance_binders rbinders) uctx.local type named_universes_entry = universes_entry * UnivNames.universe_binders let univ_entry ~poly uctx = let (binders, _) = uctx.names in let entry = if poly then Polymorphic_entry (context uctx) else Monomorphic_entry (context_set uctx) in entry, binders let of_context_set local = { empty with local } type universe_opt_subst = UnivSubst.universe_opt_subst let subst uctx = uctx.univ_variables let nf_universes uctx c = UnivSubst.nf_evars_and_universes_opt_subst (fun _ -> None) (subst uctx) c let ugraph uctx = uctx.universes let initial_graph uctx = uctx.initial_universes let algebraics uctx = uctx.univ_algebraic let add_names ?loc s l (names, names_rev) = if UNameMap.mem s names then user_err ?loc Pp.(str "Universe " ++ Names.Id.print s ++ str" already bound."); (UNameMap.add s l names, Level.Map.add l { uname = Some s; uloc = loc } names_rev) let add_loc l loc (names, names_rev) = match loc with | None -> (names, names_rev) | Some _ -> (names, Level.Map.add l { uname = None; uloc = loc } names_rev) let of_binders names = let rev_map = UNameMap.fold (fun id l rmap -> Level.Map.add l { uname = Some id; uloc = None } rmap) names Level.Map.empty in { empty with names = (names, rev_map) } let universe_of_name uctx s = UNameMap.find s (fst uctx.names) let universe_binders uctx = let named, _ = uctx.names in named let instantiate_variable l b v = try v := Level.Map.set l (Some b) !v with Not_found -> assert false exception UniversesDiffer let drop_weak_constraints = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Cumulativity";"Weak";"Constraints"] ~value:false let process_universe_constraints uctx cstrs = let open UnivSubst in let open UnivProblem in let univs = uctx.universes in let vars = ref uctx.univ_variables in let weak = ref uctx.weak_constraints in let normalize u = normalize_univ_variable_opt_subst !vars u in let nf_constraint = function | ULub (u, v) -> ULub (level_subst_of normalize u, level_subst_of normalize v) | UWeak (u, v) -> UWeak (level_subst_of normalize u, level_subst_of normalize v) | UEq (u, v) -> UEq (subst_univs_universe normalize u, subst_univs_universe normalize v) | ULe (u, v) -> ULe (subst_univs_universe normalize u, subst_univs_universe normalize v) in let is_local l = Level.Map.mem l !vars in let varinfo x = match Universe.level x with | None -> Inl x | Some l -> Inr l in let equalize_variables fo l l' r r' local = (* Assumes l = [l',0] and r = [r',0] *) let () = if is_local l' then instantiate_variable l' r vars else if is_local r' then instantiate_variable r' l vars else if not (UGraph.check_eq_level univs l' r') then (* Two rigid/global levels, none of them being local, one of them being Prop/Set, disallow *) if Level.is_small l' || Level.is_small r' then raise (UniverseInconsistency (Eq, l, r, None)) else if fo then raise UniversesDiffer in enforce_eq_level l' r' local in let equalize_universes l r local = match varinfo l, varinfo r with | Inr l', Inr r' -> equalize_variables false l l' r r' local | Inr l, Inl r | Inl r, Inr l -> let alg = Level.Set.mem l uctx.univ_algebraic in let inst = univ_level_rem l r r in if alg && not (Level.Set.mem l (Universe.levels inst)) then (instantiate_variable l inst vars; local) else let lu = Universe.make l in if univ_level_mem l r then enforce_leq inst lu local else raise (UniverseInconsistency (Eq, lu, r, None)) | Inl _, Inl _ (* both are algebraic *) -> if UGraph.check_eq univs l r then local else raise (UniverseInconsistency (Eq, l, r, None)) in let unify_universes cst local = let cst = nf_constraint cst in if UnivProblem.is_trivial cst then local else match cst with | ULe (l, r) -> begin match Univ.Universe.level r with | None -> if UGraph.check_leq univs l r then local else user_err Pp.(str "Algebraic universe on the right") | Some r' -> if Level.is_small r' then if not (Universe.is_levels l) then (* l contains a +1 and r=r' small so l <= r impossible *) raise (UniverseInconsistency (Le, l, r, None)) else if UGraph.check_leq univs l r then match Univ.Universe.level l with | Some l -> Univ.Constraints.add (l, Le, r') local | None -> local else let levels = Universe.levels l in let fold l' local = let l = Universe.make l' in if Level.is_small l' || is_local l' then equalize_variables false l l' r r' local else raise (UniverseInconsistency (Le, l, r, None)) in Level.Set.fold fold levels local else match Univ.Universe.level l with | Some l -> Univ.Constraints.add (l, Le, r') local | None -> (* We insert the constraint in the graph even if the graph already contains it. Indeed, checking the existance of the constraint is costly when the constraint does not already exist directly as a single edge in the graph, but adding an edge in the graph which is implied by others is cheap. Hence, by doing this, we avoid a costly check here, and make further checks of this constraint easier since it will exist directly in the graph. *) enforce_leq l r local end | ULub (l, r) -> equalize_variables true (Universe.make l) l (Universe.make r) r local | UWeak (l, r) -> if not (drop_weak_constraints ()) then weak := UPairSet.add (l,r) !weak; local | UEq (l, r) -> equalize_universes l r local in let unify_universes cst local = if not (UGraph.type_in_type univs) then unify_universes cst local else try unify_universes cst local with UniverseInconsistency _ -> local in let local = UnivProblem.Set.fold unify_universes cstrs Constraints.empty in !vars, !weak, local let add_constraints uctx cstrs = let univs, old_cstrs = uctx.local in let cstrs' = Constraints.fold (fun (l,d,r) acc -> let l = Universe.make l and r = Universe.make r in let cstr' = let open UnivProblem in match d with | Lt -> ULe (Universe.super l, r) | Le -> ULe (l, r) | Eq -> UEq (l, r) in UnivProblem.Set.add cstr' acc) cstrs UnivProblem.Set.empty in let vars, weak, cstrs' = process_universe_constraints uctx cstrs' in { uctx with local = (univs, Constraints.union old_cstrs cstrs'); univ_variables = vars; universes = UGraph.merge_constraints cstrs' uctx.universes; weak_constraints = weak; } let add_universe_constraints uctx cstrs = let univs, local = uctx.local in let vars, weak, local' = process_universe_constraints uctx cstrs in { uctx with local = (univs, Constraints.union local local'); univ_variables = vars; universes = UGraph.merge_constraints local' uctx.universes; weak_constraints = weak; } let constrain_variables diff uctx = let univs, local = uctx.local in let univs, vars, local = Level.Set.fold (fun l (univs, vars, cstrs) -> try match Level.Map.find l vars with | Some u -> (Level.Set.add l univs, Level.Map.remove l vars, Constraints.add (l, Eq, Option.get (Universe.level u)) cstrs) | None -> (univs, vars, cstrs) with Not_found | Option.IsNone -> (univs, vars, cstrs)) diff (univs, uctx.univ_variables, local) in { uctx with local = (univs, local); univ_variables = vars } let id_of_level uctx l = try Some (Option.get (Level.Map.find l (snd uctx.names)).uname) with Not_found | Option.IsNone -> None let qualid_of_level uctx l = let map, map_rev = uctx.names in try Some (Libnames.qualid_of_ident (Option.get (Level.Map.find l map_rev).uname)) with Not_found | Option.IsNone -> UnivNames.qualid_of_level map l let pr_uctx_level uctx l = match qualid_of_level uctx l with | Some qid -> Libnames.pr_qualid qid | None -> Level.pr l type ('a, 'b) gen_universe_decl = { univdecl_instance : 'a; (* Declared universes *) univdecl_extensible_instance : bool; (* Can new universes be added *) univdecl_constraints : 'b; (* Declared constraints *) univdecl_extensible_constraints : bool (* Can new constraints be added *) } type universe_decl = (lident list, Constraints.t) gen_universe_decl let default_univ_decl = { univdecl_instance = []; univdecl_extensible_instance = true; univdecl_constraints = Constraints.empty; univdecl_extensible_constraints = true } let pr_error_unbound_universes left uctx = let open Pp in let n = Level.Set.cardinal left in let prlev u = let info = Level.Map.find_opt u (snd uctx.names) in h (pr_uctx_level uctx u ++ (match info with | None | Some {uloc=None} -> mt () | Some {uloc=Some loc} -> spc() ++ str"(" ++ Loc.pr loc ++ str")")) in (hv 0 (str (CString.plural n "Universe") ++ spc () ++ (prlist_with_sep spc prlev (Level.Set.elements left)) ++ spc () ++ str (CString.conjugate_verb_to_be n) ++ str" unbound.")) exception UnboundUnivs of Level.Set.t * t (* Deliberately using no location as the location of the univs doesn't correspond to the failing command. *) let error_unbound_universes left uctx = raise (UnboundUnivs (left,uctx)) let _ = CErrors.register_handler (function | UnboundUnivs (left,uctx) -> Some (pr_error_unbound_universes left uctx) | _ -> None) let universe_context ~names ~extensible uctx = let levels = ContextSet.levels uctx.local in let newinst, left = List.fold_right (fun { CAst.loc; v = id } (newinst, acc) -> let l = try universe_of_name uctx id with Not_found -> assert false in (l :: newinst, Level.Set.remove l acc)) names ([], levels) in if not extensible && not (Level.Set.is_empty left) then error_unbound_universes left uctx else let left = ContextSet.sort_levels (Array.of_list (Level.Set.elements left)) in let inst = Array.append (Array.of_list newinst) left in let inst = Instance.of_array inst in (inst, ContextSet.constraints uctx.local) let check_universe_context_set ~names ~extensible uctx = if extensible then () else let left = List.fold_left (fun left { CAst.loc; v = id } -> let l = try universe_of_name uctx id with Not_found -> assert false in Level.Set.remove l left) (ContextSet.levels uctx.local) names in if not (Level.Set.is_empty left) then error_unbound_universes left uctx let check_implication uctx cstrs cstrs' = let gr = initial_graph uctx in let grext = UGraph.merge_constraints cstrs gr in let cstrs' = Constraints.filter (fun c -> not (UGraph.check_constraint grext c)) cstrs' in if Constraints.is_empty cstrs' then () else CErrors.user_err Pp.(str "Universe constraints are not implied by the ones declared: " ++ pr_constraints (pr_uctx_level uctx) cstrs') let check_mono_univ_decl uctx decl = let () = let names = decl.univdecl_instance in let extensible = decl.univdecl_extensible_instance in check_universe_context_set ~names ~extensible uctx in if not decl.univdecl_extensible_constraints then check_implication uctx decl.univdecl_constraints (ContextSet.constraints uctx.local); uctx.local let check_univ_decl ~poly uctx decl = if not decl.univdecl_extensible_constraints then check_implication uctx decl.univdecl_constraints (ContextSet.constraints uctx.local); let names = decl.univdecl_instance in let extensible = decl.univdecl_extensible_instance in let (binders, rbinders) = uctx.names in if poly then let inst, csts = universe_context ~names ~extensible uctx in let nas = compute_instance_binders rbinders inst in let uctx = UContext.make nas (inst, csts) in Polymorphic_entry uctx, binders else let () = check_universe_context_set ~names ~extensible uctx in Monomorphic_entry uctx.local, binders let is_bound l lbound = match lbound with | UGraph.Bound.Prop -> Level.is_prop l | UGraph.Bound.Set -> Level.is_set l let restrict_universe_context ~lbound (univs, csts) keep = let removed = Level.Set.diff univs keep in if Level.Set.is_empty removed then univs, csts else let allunivs = Constraints.fold (fun (u,_,v) all -> Level.Set.add u (Level.Set.add v all)) csts univs in let g = UGraph.initial_universes in let g = Level.Set.fold (fun v g -> if Level.is_small v then g else UGraph.add_universe v ~lbound ~strict:false g) allunivs g in let g = UGraph.merge_constraints csts g in let allkept = Level.Set.union (UGraph.domain UGraph.initial_universes) (Level.Set.diff allunivs removed) in let csts = UGraph.constraints_for ~kept:allkept g in let csts = Constraints.filter (fun (l,d,r) -> not ((is_bound l lbound && d == Le) || (Level.is_prop l && d == Lt && Level.is_set r))) csts in (Level.Set.inter univs keep, csts) let restrict uctx vars = let vars = Level.Set.union vars uctx.seff_univs in let vars = Names.Id.Map.fold (fun na l vars -> Level.Set.add l vars) (fst uctx.names) vars in let uctx' = restrict_universe_context ~lbound:uctx.universes_lbound uctx.local vars in { uctx with local = uctx' } type rigid = | UnivRigid | UnivFlexible of bool (** Is substitution by an algebraic ok? *) let univ_rigid = UnivRigid let univ_flexible = UnivFlexible false let univ_flexible_alg = UnivFlexible true (** ~sideff indicates that it is ok to redeclare a universe. ~extend also merges the universe context in the local constraint structures and not only in the graph. This depends if the context we merge comes from a side effect that is already inlined or defined separately. In the later case, there is no extension, see [emit_side_effects] for example. *) let merge ?loc ~sideff rigid uctx uctx' = let levels = ContextSet.levels uctx' in let uctx = match rigid with | UnivRigid -> uctx | UnivFlexible b -> let fold u accu = if Level.Map.mem u accu then accu else Level.Map.add u None accu in let uvars' = Level.Set.fold fold levels uctx.univ_variables in if b then { uctx with univ_variables = uvars'; univ_algebraic = Level.Set.union uctx.univ_algebraic levels } else { uctx with univ_variables = uvars' } in let local = ContextSet.append uctx' uctx.local in let declare g = Level.Set.fold (fun u g -> try UGraph.add_universe ~lbound:uctx.universes_lbound ~strict:false u g with UGraph.AlreadyDeclared when sideff -> g) levels g in let names = let fold u accu = let modify _ info = match info.uloc with | None -> { info with uloc = loc } | Some _ -> info in try Level.Map.modify u modify accu with Not_found -> Level.Map.add u { uname = None; uloc = loc } accu in (fst uctx.names, Level.Set.fold fold levels (snd uctx.names)) in let initial = declare uctx.initial_universes in let univs = declare uctx.universes in let universes = UGraph.merge_constraints (ContextSet.constraints uctx') univs in { uctx with names; local; universes; initial_universes = initial } let merge_subst uctx s = { uctx with univ_variables = Level.Map.subst_union uctx.univ_variables s } let demote_seff_univs univs uctx = let seff = Level.Set.union uctx.seff_univs univs in { uctx with seff_univs = seff } let demote_global_univs env uctx = let env_ugraph = Environ.universes env in let global_univs = UGraph.domain env_ugraph in let global_constraints, _ = UGraph.constraints_of_universes env_ugraph in let promoted_uctx = ContextSet.(of_set global_univs |> add_constraints global_constraints) in { uctx with local = ContextSet.diff uctx.local promoted_uctx } let merge_seff uctx uctx' = let levels = ContextSet.levels uctx' in let declare g = Level.Set.fold (fun u g -> try UGraph.add_universe ~lbound:uctx.universes_lbound ~strict:false u g with UGraph.AlreadyDeclared -> g) levels g in let initial_universes = declare uctx.initial_universes in let univs = declare uctx.universes in let universes = UGraph.merge_constraints (ContextSet.constraints uctx') univs in { uctx with universes; initial_universes } let emit_side_effects eff u = let uctx = Safe_typing.universes_of_private eff in let u = demote_seff_univs (fst uctx) u in merge_seff u uctx let update_sigma_univs uctx ugraph = let univs = UGraph.set_cumulative_sprop (elaboration_sprop_cumul()) ugraph in let eunivs = { uctx with initial_universes = univs; universes = univs } in merge_seff eunivs eunivs.local let add_universe ?loc name strict lbound uctx u = let initial_universes = UGraph.add_universe ~lbound ~strict u uctx.initial_universes in let universes = UGraph.add_universe ~lbound ~strict u uctx.universes in let local = ContextSet.add_universe u uctx.local in let names = match name with | Some n -> add_names ?loc n u uctx.names | None -> add_loc u loc uctx.names in { uctx with names; local; initial_universes; universes } let new_univ_variable ?loc rigid name uctx = let u = UnivGen.fresh_level () in let uctx = match rigid with | UnivRigid -> uctx | UnivFlexible allow_alg -> let univ_variables = Level.Map.add u None uctx.univ_variables in if allow_alg then let univ_algebraic = Level.Set.add u uctx.univ_algebraic in { uctx with univ_variables; univ_algebraic } else { uctx with univ_variables } in let uctx = add_universe ?loc name false uctx.universes_lbound uctx u in uctx, u let add_global_univ uctx u = add_universe None true UGraph.Bound.Set uctx u let make_with_initial_binders ~lbound univs us = let uctx = make ~lbound univs in List.fold_left (fun uctx { CAst.loc; v = id } -> fst (new_univ_variable ?loc univ_rigid (Some id) uctx)) uctx us let from_env ?(binders=[]) env = make_with_initial_binders ~lbound:(Environ.universes_lbound env) (Environ.universes env) binders let make_flexible_variable uctx ~algebraic u = let {local = cstrs; univ_variables = uvars; univ_algebraic = avars; universes=g; } = uctx in assert (try Level.Map.find u uvars == None with Not_found -> true); match UGraph.choose (fun v -> not (Level.equal u v) && (algebraic || not (Level.Set.mem v avars))) g u with | Some v -> let uvars' = Level.Map.add u (Some (Universe.make v)) uvars in { uctx with univ_variables = uvars'; } | None -> let uvars' = Level.Map.add u None uvars in let avars' = if algebraic then let uu = Universe.make u in let substu_not_alg u' v = Option.cata (fun vu -> Universe.equal uu vu && not (Level.Set.mem u' avars)) false v in let has_upper_constraint () = Constraints.exists (fun (l,d,r) -> d == Lt && Level.equal l u) (ContextSet.constraints cstrs) in if not (Level.Map.exists substu_not_alg uvars || has_upper_constraint ()) then Level.Set.add u avars else avars else avars in { uctx with univ_variables = uvars'; univ_algebraic = avars' } let make_nonalgebraic_variable uctx u = { uctx with univ_algebraic = Level.Set.remove u uctx.univ_algebraic } let make_flexible_nonalgebraic uctx = { uctx with univ_algebraic = Level.Set.empty } let is_sort_variable uctx s = match s with | Sorts.Type u -> (match universe_level u with | Some l as x -> if Level.Set.mem l (ContextSet.levels uctx.local) then x else None | None -> None) | _ -> None let subst_univs_context_with_def def usubst (uctx, cst) = (Level.Set.diff uctx def, UnivSubst.subst_univs_constraints usubst cst) let is_trivial_leq (l,d,r) = Level.is_prop l && (d == Le || d == Lt) && Level.is_set r (* Prop < i <-> Set+1 <= i <-> Set < i *) let translate_cstr (l,d,r as cstr) = if Level.equal Level.prop l && d == Lt && not (Level.equal Level.set r) then (Level.set, d, r) else cstr let refresh_constraints univs (ctx, cstrs) = let cstrs', univs' = Constraints.fold (fun c (cstrs', univs as acc) -> let c = translate_cstr c in if is_trivial_leq c then acc else (Constraints.add c cstrs', UGraph.enforce_constraint c univs)) cstrs (Constraints.empty, univs) in ((ctx, cstrs'), univs') let normalize_variables uctx = let normalized_variables, def, subst = UnivSubst.normalize_univ_variables uctx.univ_variables in let uctx_local = subst_univs_context_with_def def (make_subst subst) uctx.local in let uctx_local', univs = refresh_constraints uctx.initial_universes uctx_local in { uctx with local = uctx_local'; univ_variables = normalized_variables; universes = univs } let abstract_undefined_variables uctx = let vars' = Level.Map.fold (fun u v acc -> if v == None then Level.Set.remove u acc else acc) uctx.univ_variables uctx.univ_algebraic in { uctx with local = ContextSet.empty; univ_algebraic = vars' } let fix_undefined_variables uctx = let algs', vars' = Level.Map.fold (fun u v (algs, vars as acc) -> if v == None then (Level.Set.remove u algs, Level.Map.remove u vars) else acc) uctx.univ_variables (uctx.univ_algebraic, uctx.univ_variables) in { uctx with univ_variables = vars'; univ_algebraic = algs' } let minimize uctx = let open UnivMinim in let lbound = uctx.universes_lbound in let ((vars',algs'), us') = normalize_context_set ~lbound uctx.universes uctx.local uctx.univ_variables uctx.univ_algebraic uctx.weak_constraints in if ContextSet.equal us' uctx.local then uctx else let us', universes = refresh_constraints uctx.initial_universes us' in { names = uctx.names; local = us'; seff_univs = uctx.seff_univs; (* not sure about this *) univ_variables = vars'; univ_algebraic = algs'; universes = universes; universes_lbound = lbound; initial_universes = uctx.initial_universes; weak_constraints = UPairSet.empty; (* weak constraints are consumed *) } let pr_weak prl {weak_constraints=weak} = let open Pp in prlist_with_sep fnl (fun (u,v) -> prl u ++ str " ~ " ++ prl v) (UPairSet.elements weak) let pr_universe_body = function | None -> Pp.mt () | Some x -> Pp.(str " := " ++ Univ.Universe.pr x) let pr_universe_opt_subst = Univ.Level.Map.pr pr_universe_body coq-8.15.0/engine/uState.mli000066400000000000000000000177011417001151100156010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* UGraph.t -> t [@@ocaml.deprecated "Use from_env"] val make_with_initial_binders : lbound:UGraph.Bound.t -> UGraph.t -> lident list -> t [@@ocaml.deprecated "Use from_env"] val from_env : ?binders:lident list -> Environ.env -> t (** Main entry point at the beginning of a declaration declaring the binding names as rigid universes. *) val of_binders : UnivNames.universe_binders -> t (** Main entry point when only names matter, e.g. for printing. *) val of_context_set : Univ.ContextSet.t -> t (** Main entry point when starting from the instance of a global reference, e.g. when building a scheme. *) (** Misc *) val is_empty : t -> bool val union : t -> t -> t (** {5 Projections and other destructors} *) val context_set : t -> Univ.ContextSet.t (** The local context of the state, i.e. a set of bound variables together with their associated constraints. *) type universe_opt_subst = UnivSubst.universe_opt_subst (* Reexport because UnivSubst is private *) val subst : t -> UnivSubst.universe_opt_subst (** The local universes that are unification variables *) val nf_universes : t -> Constr.t -> Constr.t (** Apply the local substitution [subst] *) val ugraph : t -> UGraph.t (** The current graph extended with the local constraints *) val initial_graph : t -> UGraph.t (** The initial graph with just the declarations of new universes. *) val algebraics : t -> Univ.Level.Set.t (** The subset of unification variables that can be instantiated with algebraic universes as they appear in inferred types only. *) val constraints : t -> Univ.Constraints.t (** Shorthand for {!context_set} composed with {!ContextSet.constraints}. *) val context : t -> Univ.UContext.t (** Shorthand for {!context_set} with {!Context_set.to_context}. *) type named_universes_entry = universes_entry * UnivNames.universe_binders val univ_entry : poly:bool -> t -> named_universes_entry (** Pick from {!context} or {!context_set} based on [poly]. *) val universe_binders : t -> UnivNames.universe_binders (** Return local names of universes. *) (** {5 Constraints handling} *) val add_constraints : t -> Univ.Constraints.t -> t (** @raise UniversesDiffer when universes differ *) val add_universe_constraints : t -> UnivProblem.Set.t -> t (** @raise UniversesDiffer when universes differ *) (** {5 Names} *) val universe_of_name : t -> Id.t -> Univ.Level.t (** Retrieve the universe associated to the name. *) (** {5 Unification} *) (** [restrict_universe_context lbound (univs,csts) keep] restricts [univs] to the universes in [keep]. The constraints [csts] are adjusted so that transitive constraints between remaining universes (those in [keep] and those not in [univs]) are preserved. *) val restrict_universe_context : lbound:UGraph.Bound.t -> ContextSet.t -> Level.Set.t -> ContextSet.t (** [restrict uctx ctx] restricts the local universes of [uctx] to [ctx] extended by local named universes and side effect universes (from [demote_seff_univs]). Transitive constraints between retained universes are preserved. *) val restrict : t -> Univ.Level.Set.t -> t type rigid = | UnivRigid | UnivFlexible of bool (** Is substitution by an algebraic ok? *) val univ_rigid : rigid val univ_flexible : rigid val univ_flexible_alg : rigid val merge : ?loc:Loc.t -> sideff:bool -> rigid -> t -> Univ.ContextSet.t -> t val merge_subst : t -> UnivSubst.universe_opt_subst -> t val emit_side_effects : Safe_typing.private_constants -> t -> t val demote_global_univs : Environ.env -> t -> t (** Removes from the uctx_local part of the UState the universes and constraints that are present in the universe graph in the input env (supposedly the global ones) *) val demote_seff_univs : Univ.Level.Set.t -> t -> t (** Mark the universes as not local any more, because they have been globally declared by some side effect. You should be using emit_side_effects instead. *) val new_univ_variable : ?loc:Loc.t -> rigid -> Id.t option -> t -> t * Univ.Level.t (** Declare a new local universe; use rigid if a global or bound universe; use flexible for a universe existential variable; use univ_flexible_alg for a universe existential variable allowed to be instantiated with an algebraic universe *) val add_global_univ : t -> Univ.Level.t -> t (** [make_flexible_variable g algebraic l] Turn the variable [l] flexible, and algebraic if [algebraic] is true and [l] can be. That is if there are no strict upper constraints on [l] and and it does not appear in the instance of any non-algebraic universe. Otherwise the variable is just made flexible. If [l] is already algebraic it will remain so even with [algebraic:false]. *) val make_flexible_variable : t -> algebraic:bool -> Univ.Level.t -> t val make_nonalgebraic_variable : t -> Univ.Level.t -> t (** Make the level non algebraic. Undefined behaviour on already-defined algebraics. *) (** Turn all undefined flexible algebraic variables into simply flexible ones. Can be used in case the variables might appear in universe instances (typically for polymorphic program obligations). *) val make_flexible_nonalgebraic : t -> t val is_sort_variable : t -> Sorts.t -> Univ.Level.t option val normalize_variables : t -> t val constrain_variables : Univ.Level.Set.t -> t -> t val abstract_undefined_variables : t -> t val fix_undefined_variables : t -> t (** Universe minimization *) val minimize : t -> t type ('a, 'b) gen_universe_decl = { univdecl_instance : 'a; (* Declared universes *) univdecl_extensible_instance : bool; (* Can new universes be added *) univdecl_constraints : 'b; (* Declared constraints *) univdecl_extensible_constraints : bool (* Can new constraints be added *) } type universe_decl = (lident list, Univ.Constraints.t) gen_universe_decl val default_univ_decl : universe_decl (** [check_univ_decl ctx decl] If non extensible in [decl], check that the local universes (resp. universe constraints) in [ctx] are implied by [decl]. Return a [universes_entry] containing the local universes of [ctx] and their constraints. When polymorphic, the universes corresponding to [decl.univdecl_instance] come first in the order defined by that list. *) val check_univ_decl : poly:bool -> t -> universe_decl -> named_universes_entry val check_mono_univ_decl : t -> universe_decl -> Univ.ContextSet.t (** {5 TODO: Document me} *) val update_sigma_univs : t -> UGraph.t -> t (** {5 Pretty-printing} *) val pr_uctx_level : t -> Univ.Level.t -> Pp.t val qualid_of_level : t -> Univ.Level.t -> Libnames.qualid option (** Only looks in the local names, not in the nametab. *) val id_of_level : t -> Univ.Level.t -> Id.t option val pr_weak : (Univ.Level.t -> Pp.t) -> t -> Pp.t val pr_universe_opt_subst : universe_opt_subst -> Pp.t coq-8.15.0/engine/univGen.ml000066400000000000000000000102721417001151100155720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Some Pp.(str "Universe instance length is " ++ int actual ++ str " but should be " ++ int expect ++ str ".") | _ -> None) (* Generator of levels *) let new_univ_id = let cnt = ref 0 in fun () -> incr cnt; !cnt let new_univ_global () = let s = if Flags.async_proofs_is_worker() then !Flags.async_proofs_worker_id else "" in Univ.Level.UGlobal.make (Global.current_dirpath ()) s (new_univ_id ()) let fresh_level () = Univ.Level.make (new_univ_global ()) let fresh_instance auctx = let inst = Array.init (AbstractContext.size auctx) (fun _ -> fresh_level()) in let ctx = Array.fold_right Level.Set.add inst Level.Set.empty in let inst = Instance.of_array inst in inst, (ctx, AbstractContext.instantiate inst auctx) let existing_instance ?loc auctx inst = let () = let actual = Array.length (Instance.to_array inst) and expect = AbstractContext.size auctx in if not (Int.equal actual expect) then raise (UniverseLengthMismatch { actual; expect }) else () in inst, (Level.Set.empty, AbstractContext.instantiate inst auctx) let fresh_instance_from ?loc ctx = function | Some inst -> existing_instance ?loc ctx inst | None -> fresh_instance ctx (** Fresh universe polymorphic construction *) let fresh_global_instance ?loc ?names env gr = let auctx = Environ.universes_of_global env gr in let u, ctx = fresh_instance_from ?loc auctx names in u, ctx let fresh_constant_instance env c = let u, ctx = fresh_global_instance env (GlobRef.ConstRef c) in (c, u), ctx let fresh_inductive_instance env ind = let u, ctx = fresh_global_instance env (GlobRef.IndRef ind) in (ind, u), ctx let fresh_constructor_instance env c = let u, ctx = fresh_global_instance env (GlobRef.ConstructRef c) in (c, u), ctx let fresh_array_instance env = let auctx = CPrimitives.typ_univs CPrimitives.PT_array in let u, ctx = fresh_instance_from auctx None in u, ctx let fresh_global_instance ?loc ?names env gr = let u, ctx = fresh_global_instance ?loc ?names env gr in mkRef (gr, u), ctx let constr_of_monomorphic_global env gr = if not (Environ.is_polymorphic env gr) then fst (fresh_global_instance env gr) else CErrors.user_err Pp.(str "globalization of polymorphic reference " ++ Nametab.pr_global_env Id.Set.empty gr ++ str " would forget universes.") let fresh_sort_in_family = function | InSProp -> Sorts.sprop, ContextSet.empty | InProp -> Sorts.prop, ContextSet.empty | InSet -> Sorts.set, ContextSet.empty | InType -> let u = fresh_level () in sort_of_univ (Univ.Universe.make u), ContextSet.singleton u let new_global_univ () = let u = fresh_level () in (Univ.Universe.make u, ContextSet.singleton u) let fresh_universe_context_set_instance ctx = if ContextSet.is_empty ctx then Level.Map.empty, ctx else let (univs, cst) = ContextSet.levels ctx, ContextSet.constraints ctx in let univs',subst = Level.Set.fold (fun u (univs',subst) -> let u' = fresh_level () in (Level.Set.add u' univs', Level.Map.add u u' subst)) univs (Level.Set.empty, Level.Map.empty) in let cst' = subst_univs_level_constraints subst cst in subst, (univs', cst') coq-8.15.0/engine/univGen.mli000066400000000000000000000051661417001151100157510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Level.UGlobal.t val fresh_level : unit -> Level.t val new_global_univ : unit -> Universe.t in_universe_context_set (** Build a fresh instance for a given context, its associated substitution and the instantiated constraints. *) val fresh_instance : AbstractContext.t -> Instance.t in_universe_context_set val fresh_instance_from : ?loc:Loc.t -> AbstractContext.t -> Instance.t option -> Instance.t in_universe_context_set val fresh_sort_in_family : Sorts.family -> Sorts.t in_universe_context_set val fresh_constant_instance : env -> Constant.t -> pconstant in_universe_context_set val fresh_inductive_instance : env -> inductive -> pinductive in_universe_context_set val fresh_constructor_instance : env -> constructor -> pconstructor in_universe_context_set val fresh_array_instance : env -> Instance.t in_universe_context_set val fresh_global_instance : ?loc:Loc.t -> ?names:Univ.Instance.t -> env -> GlobRef.t -> constr in_universe_context_set (** Get fresh variables for the universe context. Useful to make tactics that manipulate constrs in universe contexts polymorphic. *) val fresh_universe_context_set_instance : ContextSet.t -> universe_level_subst * ContextSet.t (** Create a fresh global in the environment argument, without side effects. BEWARE: this raises an error on polymorphic constants/inductives: the constraints should be properly added to an evd. See Evd.fresh_global, Evarutil.new_global, and pf_constr_of_global for the proper way to get a fresh copy of a polymorphic global reference. *) val constr_of_monomorphic_global : env -> GlobRef.t -> constr coq-8.15.0/engine/univMinim.ml000066400000000000000000000372141417001151100161370ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Level.Map.add u [t] map (** Precondition: flexible <= ctx *) let choose_canonical ctx flexible algs s = let global = Level.Set.diff s ctx in let flexible, rigid = Level.Set.partition flexible (Level.Set.inter s ctx) in (* If there is a global universe in the set, choose it *) if not (Level.Set.is_empty global) then let canon = Level.Set.choose global in canon, (Level.Set.remove canon global, rigid, flexible) else (* No global in the equivalence class, choose a rigid one *) if not (Level.Set.is_empty rigid) then let canon = Level.Set.choose rigid in canon, (global, Level.Set.remove canon rigid, flexible) else (* There are only flexible universes in the equivalence class, choose a non-algebraic. *) let algs, nonalgs = Level.Set.partition (fun x -> Level.Set.mem x algs) flexible in if not (Level.Set.is_empty nonalgs) then let canon = Level.Set.choose nonalgs in canon, (global, rigid, Level.Set.remove canon flexible) else let canon = Level.Set.choose algs in canon, (global, rigid, Level.Set.remove canon flexible) (* Eq < Le < Lt *) let compare_constraint_type d d' = match d, d' with | Eq, Eq -> 0 | Eq, _ -> -1 | _, Eq -> 1 | Le, Le -> 0 | Le, _ -> -1 | _, Le -> 1 | Lt, Lt -> 0 type lowermap = constraint_type Level.Map.t let lower_union = let merge k a b = match a, b with | Some _, None -> a | None, Some _ -> b | None, None -> None | Some l, Some r -> if compare_constraint_type l r >= 0 then a else b in Level.Map.merge merge let lower_add l c m = try let c' = Level.Map.find l m in if compare_constraint_type c c' > 0 then Level.Map.add l c m else m with Not_found -> Level.Map.add l c m let lower_of_list l = List.fold_left (fun acc (d,l) -> Level.Map.add l d acc) Level.Map.empty l type lbound = { enforce : bool; alg : bool; lbound: Universe.t; lower : lowermap } module LBMap : sig type t = private { lbmap : lbound Level.Map.t; lbrev : (Level.t * lowermap) Universe.Map.t } val empty : t val add : Level.t -> lbound -> t -> t end = struct type t = { lbmap : lbound Level.Map.t; lbrev : (Level.t * lowermap) Universe.Map.t } (* lbrev is uniquely given from lbmap as a partial reverse mapping *) let empty = { lbmap = Level.Map.empty; lbrev = Universe.Map.empty } let add u bnd m = let lbmap = Level.Map.add u bnd m.lbmap in let lbrev = if not bnd.alg && bnd.enforce then match Universe.Map.find bnd.lbound m.lbrev with | (v, _) -> if Level.compare u v <= 0 then Universe.Map.add bnd.lbound (u, bnd.lower) m.lbrev else m.lbrev | exception Not_found -> Universe.Map.add bnd.lbound (u, bnd.lower) m.lbrev else m.lbrev in { lbmap; lbrev } end let find_inst insts v = Universe.Map.find v insts.LBMap.lbrev let compute_lbound left = (* The universe variable was not fixed yet. Compute its level using its lower bound. *) let sup l lbound = match lbound with | None -> Some l | Some l' -> Some (Universe.sup l l') in List.fold_left (fun lbound (d, l) -> if d == Le (* l <= ?u *) then sup l lbound else (* l < ?u *) (assert (d == Lt); if not (Universe.level l == None) then sup (Universe.super l) lbound else None)) None left let instantiate_with_lbound u lbound lower ~alg ~enforce (ctx, us, algs, insts, cstrs) = if enforce then let inst = Universe.make u in let cstrs' = enforce_leq lbound inst cstrs in (ctx, us, Level.Set.remove u algs, LBMap.add u {enforce;alg;lbound;lower} insts, cstrs'), {enforce; alg; lbound=inst; lower} else (* Actually instantiate *) (Univ.Level.Set.remove u ctx, Univ.Level.Map.add u (Some lbound) us, algs, LBMap.add u {enforce;alg;lbound;lower} insts, cstrs), {enforce; alg; lbound; lower} type constraints_map = (Univ.constraint_type * Univ.Level.Map.key) list Univ.Level.Map.t let _pr_constraints_map (cmap:constraints_map) = let open Pp in Level.Map.fold (fun l cstrs acc -> Level.pr l ++ str " => " ++ prlist_with_sep spc (fun (d,r) -> pr_constraint_type d ++ Level.pr r) cstrs ++ fnl () ++ acc) cmap (mt ()) let remove_alg l (ctx, us, algs, insts, cstrs) = (ctx, us, Level.Set.remove l algs, insts, cstrs) let not_lower lower (d,l) = (* We're checking if (d,l) is already implied by the lower constraints on some level u. If it represents l < u (d is Lt or d is Le and i > 0, the i < 0 case is impossible due to invariants of Univ), and the lower constraints only have l <= u then it is not implied. *) Univ.Universe.exists (fun (l,i) -> let d = if i == 0 then d else match d with | Le -> Lt | d -> d in try let d' = Level.Map.find l lower in (* If d is stronger than the already implied lower * constraints we must keep it. *) compare_constraint_type d d' > 0 with Not_found -> (* No constraint existing on l *) true) l exception UpperBoundedAlg (** [enforce_uppers upper lbound cstrs] interprets [upper] as upper constraints to [lbound], adding them to [cstrs]. @raise UpperBoundedAlg if any [upper] constraints are strict and [lbound] algebraic. *) let enforce_uppers upper lbound cstrs = List.fold_left (fun cstrs (d, r) -> if d == Univ.Le then enforce_leq lbound (Universe.make r) cstrs else match Universe.level lbound with | Some lev -> Constraints.add (lev, d, r) cstrs | None -> raise UpperBoundedAlg) cstrs upper let minimize_univ_variables ctx us algs left right cstrs = let left, lbounds = Univ.Level.Map.fold (fun r lower (left, lbounds as acc) -> if Univ.Level.Map.mem r us || not (Univ.Level.Set.mem r ctx) then acc else (* Fixed universe, just compute its glb for sharing *) let lbounds = match compute_lbound (List.map (fun (d,l) -> d, Universe.make l) lower) with | None -> lbounds | Some lbound -> LBMap.add r {enforce=true; alg=false; lbound; lower=lower_of_list lower} lbounds in (Univ.Level.Map.remove r left, lbounds)) left (left, LBMap.empty) in let rec instance (ctx, us, algs, insts, cstrs as acc) u = let acc, left, lower = match Level.Map.find u left with | exception Not_found -> acc, [], Level.Map.empty | l -> let acc, left, newlow, lower = List.fold_left (fun (acc, left, newlow, lower') (d, l) -> let acc', {enforce=enf;alg;lbound=l';lower} = aux acc l in let l' = if enf then Universe.make l else l' in acc', (d, l') :: left, lower_add l d newlow, lower_union lower lower') (acc, [], Level.Map.empty, Level.Map.empty) l in let left = CList.uniquize (List.filter (not_lower lower) left) in (acc, left, Level.Map.lunion newlow lower) in let instantiate_lbound lbound = let alg = Level.Set.mem u algs in if alg then (* u is algebraic: we instantiate it with its lower bound, if any, or enforce the constraints if it is bounded from the top. *) let lower = Level.Set.fold Level.Map.remove (Universe.levels lbound) lower in instantiate_with_lbound u lbound lower ~alg:true ~enforce:false acc else (* u is non algebraic *) match Universe.level lbound with | Some l -> (* The lowerbound is directly a level *) (* u is not algebraic but has no upper bounds, we instantiate it with its lower bound if it is a different level, otherwise we keep it. *) let lower = Level.Map.remove l lower in if not (Level.equal l u) then (* Should check that u does not have upper constraints that are not already in right *) let acc = remove_alg l acc in instantiate_with_lbound u lbound lower ~alg:false ~enforce:false acc else acc, {enforce=true; alg=false; lbound; lower} | None -> begin match find_inst insts lbound with | can, lower -> (* Another universe represents the same lower bound, we can share them with no harm. *) let lower = Level.Map.remove can lower in instantiate_with_lbound u (Universe.make can) lower ~alg:false ~enforce:false acc | exception Not_found -> (* We set u as the canonical universe representing lbound *) instantiate_with_lbound u lbound lower ~alg:false ~enforce:true acc end in let enforce_uppers ((ctx,us,algs,insts,cstrs), b as acc) = match Level.Map.find u right with | exception Not_found -> acc | upper -> let upper = List.filter (fun (d, r) -> not (Level.Map.mem r us)) upper in let cstrs = enforce_uppers upper b.lbound cstrs in (ctx, us, algs, insts, cstrs), b in if not (Level.Set.mem u ctx) then enforce_uppers (acc, {enforce=true; alg=false; lbound=Universe.make u; lower}) else let lbound = compute_lbound left in match lbound with | None -> (* Nothing to do *) enforce_uppers (acc, {enforce=true;alg=false;lbound=Universe.make u; lower}) | Some lbound -> try enforce_uppers (instantiate_lbound lbound) with UpperBoundedAlg -> enforce_uppers (acc, {enforce=true; alg=false; lbound=Universe.make u; lower}) and aux (ctx, us, algs, seen, cstrs as acc) u = try acc, Level.Map.find u seen.LBMap.lbmap with Not_found -> instance acc u in Level.Map.fold (fun u v (ctx, us, algs, seen, cstrs as acc) -> if v == None then fst (aux acc u) else Level.Set.remove u ctx, us, Level.Set.remove u algs, seen, cstrs) us (ctx, us, algs, lbounds, cstrs) module UPairs = OrderedType.UnorderedPair(Univ.Level) module UPairSet = Set.Make (UPairs) let is_bound l lbound = match lbound with | UGraph.Bound.Prop -> Level.is_prop l | UGraph.Bound.Set -> Level.is_set l (* if [is_minimal u] then constraints [u <= v] may be dropped and get used only for set_minimization. *) let is_minimal ~lbound u = Level.is_sprop u || Level.is_prop u || is_bound u lbound (* TODO check is_small/sprop *) let normalize_context_set ~lbound g ctx us algs weak = let (ctx, csts) = ContextSet.levels ctx, ContextSet.constraints ctx in (* Keep the Prop/Set <= i constraints separate for minimization *) let smallles, csts = Constraints.partition (fun (l,d,r) -> d == Le && is_minimal ~lbound l) csts in let smallles = if get_set_minimization () then Constraints.filter (fun (l,d,r) -> Level.Map.mem r us && not (Level.is_sprop l)) smallles else Constraints.empty in let smallles = Constraints.map (fun (_,_,r) -> Level.set, Le, r) smallles in let csts, partition = (* We first put constraints in a normal-form: all self-loops are collapsed to equalities. *) let g = UGraph.initial_universes_with g in let g = Level.Set.fold (fun v g -> UGraph.add_universe ~lbound ~strict:false v g) ctx g in let add_soft u g = if not (Level.is_small u || Level.Set.mem u ctx) then try UGraph.add_universe ~lbound ~strict:false u g with UGraph.AlreadyDeclared -> g else g in let g = Constraints.fold (fun (l, d, r) g -> add_soft r (add_soft l g)) csts g in let g = UGraph.merge_constraints csts g in UGraph.constraints_of_universes g in (* We ignore the trivial Prop/Set <= i constraints. *) let noneqs = Constraints.filter (fun (l,d,r) -> not ((d == Le && is_bound l lbound) || (Level.is_prop l && d == Lt && Level.is_set r))) csts in let noneqs = Constraints.union noneqs smallles in let flex x = Level.Map.mem x us in let ctx, us, eqs = List.fold_left (fun (ctx, us, cstrs) s -> let canon, (global, rigid, flexible) = choose_canonical ctx flex algs s in (* Add equalities for globals which can't be merged anymore. *) let cstrs = Level.Set.fold (fun g cst -> Constraints.add (canon, Eq, g) cst) global cstrs in (* Also add equalities for rigid variables *) let cstrs = Level.Set.fold (fun g cst -> Constraints.add (canon, Eq, g) cst) rigid cstrs in let canonu = Some (Universe.make canon) in let us = Level.Set.fold (fun f -> Level.Map.add f canonu) flexible us in (Level.Set.diff ctx flexible, us, cstrs)) (ctx, us, Constraints.empty) partition in (* Process weak constraints: when one side is flexible and the 2 universes are unrelated unify them. *) let ctx, us, g = UPairSet.fold (fun (u,v) (ctx, us, g as acc) -> let norm = level_subst_of (normalize_univ_variable_opt_subst us) in let u = norm u and v = norm v in let set_to a b = (Level.Set.remove a ctx, Level.Map.add a (Some (Universe.make b)) us, UGraph.enforce_constraint (a,Eq,b) g) in if UGraph.check_constraint g (u,Le,v) || UGraph.check_constraint g (v,Le,u) then acc else if Level.Map.mem u us then set_to u v else if Level.Map.mem v us then set_to v u else acc) weak (ctx, us, g) in (* Noneqs is now in canonical form w.r.t. equality constraints, and contains only inequality constraints. *) let noneqs = let norm = level_subst_of (normalize_univ_variable_opt_subst us) in Constraints.fold (fun (u,d,v) noneqs -> let u = norm u and v = norm v in if d != Lt && Level.equal u v then noneqs else Constraints.add (u,d,v) noneqs) noneqs Constraints.empty in (* Compute the left and right set of flexible variables, constraints mentioning other variables remain in noneqs. *) let noneqs, ucstrsl, ucstrsr = Constraints.fold (fun (l,d,r as cstr) (noneq, ucstrsl, ucstrsr) -> let lus = Level.Map.mem l us and rus = Level.Map.mem r us in let ucstrsl' = if lus then add_list_map l (d, r) ucstrsl else ucstrsl and ucstrsr' = add_list_map r (d, l) ucstrsr in let noneqs = if lus || rus then noneq else Constraints.add cstr noneq in (noneqs, ucstrsl', ucstrsr')) noneqs (Constraints.empty, Level.Map.empty, Level.Map.empty) in (* Now we construct the instantiation of each variable. *) let ctx', us, algs, inst, noneqs = minimize_univ_variables ctx us algs ucstrsr ucstrsl noneqs in let us = normalize_opt_subst us in (us, algs), (ctx', Constraints.union noneqs eqs) coq-8.15.0/engine/univMinim.mli000066400000000000000000000030631417001151100163030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* UGraph.t -> ContextSet.t -> universe_opt_subst (* The defined and undefined variables *) -> Level.Set.t (* univ variables that can be substituted by algebraics *) -> UPairSet.t (* weak equality constraints *) -> (universe_opt_subst * Level.Set.t) in_universe_context_set coq-8.15.0/engine/univNames.ml000066400000000000000000000024561417001151100161310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (try Some (Nametab.shortest_qualid_of_universe ctx qid) with Not_found -> None) | None -> None let pr_with_global_universes ctx l = match qualid_of_level ctx l with | Some qid -> Libnames.pr_qualid qid | None -> Level.pr l (** Global universe information outside the kernel, to handle polymorphic universe names in sections that have to be discharged. *) (** Local universe names of polymorphic references *) type universe_binders = Level.t Names.Id.Map.t let empty_binders = Id.Map.empty type univ_name_list = Names.lname list coq-8.15.0/engine/univNames.mli000066400000000000000000000017571417001151100163050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* level mapping *) type universe_binders = Univ.Level.t Names.Id.Map.t val empty_binders : universe_binders type univ_name_list = Names.lname list val pr_with_global_universes : universe_binders -> Level.t -> Pp.t val qualid_of_level : universe_binders -> Level.t -> Libnames.qualid option coq-8.15.0/engine/univProblem.ml000066400000000000000000000062331417001151100164630ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Universe.equal u v | ULub (u, v) | UWeak (u, v) -> Level.equal u v let force = function | ULe _ | UEq _ | UWeak _ as cst -> cst | ULub (u,v) -> UEq (Universe.make u, Universe.make v) let check g = function | ULe (u,v) -> UGraph.check_leq g u v | UEq (u,v) -> UGraph.check_eq g u v | ULub (u,v) -> UGraph.check_eq_level g u v | UWeak _ -> true module Set = struct module S = Set.Make( struct type nonrec t = t let compare x y = match x, y with | ULe (u, v), ULe (u', v') -> let i = Universe.compare u u' in if Int.equal i 0 then Universe.compare v v' else i | UEq (u, v), UEq (u', v') -> let i = Universe.compare u u' in if Int.equal i 0 then Universe.compare v v' else if Universe.equal u v' && Universe.equal v u' then 0 else i | ULub (u, v), ULub (u', v') | UWeak (u, v), UWeak (u', v') -> let i = Level.compare u u' in if Int.equal i 0 then Level.compare v v' else if Level.equal u v' && Level.equal v u' then 0 else i | ULe _, _ -> -1 | _, ULe _ -> 1 | UEq _, _ -> -1 | _, UEq _ -> 1 | ULub _, _ -> -1 | _, ULub _ -> 1 end) include S let add cst s = if is_trivial cst then s else add cst s let pr_one = let open Pp in function | ULe (u, v) -> Universe.pr u ++ str " <= " ++ Universe.pr v | UEq (u, v) -> Universe.pr u ++ str " = " ++ Universe.pr v | ULub (u, v) -> Level.pr u ++ str " /\\ " ++ Level.pr v | UWeak (u, v) -> Level.pr u ++ str " ~ " ++ Level.pr v let pr c = let open Pp in fold (fun cst pp_std -> pp_std ++ pr_one cst ++ fnl ()) c (str "") let equal x y = x == y || equal x y let force s = map force s let check g s = for_all (check g) s end type 'a constraint_function = 'a -> 'a -> Set.t -> Set.t let enforce_eq_instances_univs strict x y c = let mk u v = if strict then ULub (u, v) else UEq (Universe.make u, Universe.make v) in let ax = Instance.to_array x and ay = Instance.to_array y in if Array.length ax != Array.length ay then CErrors.anomaly Pp.(str "Invalid argument: enforce_eq_instances_univs called with" ++ str " instances of different lengths."); CArray.fold_right2 (fun x y -> Set.add (mk x y)) ax ay c coq-8.15.0/engine/univProblem.mli000066400000000000000000000030011417001151100166220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool val check : UGraph.t -> t -> bool module Set : sig include Set.S with type elt = t val pr : t -> Pp.t (** Replace ULub constraints by UEq *) val force : t -> t val check : UGraph.t -> t -> bool end type 'a constraint_function = 'a -> 'a -> Set.t -> Set.t val enforce_eq_instances_univs : bool -> Instance.t constraint_function coq-8.15.0/engine/univSubst.ml000066400000000000000000000102401417001151100161540ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* enforce_eq u v | Le -> enforce_leq u v | Lt -> enforce_leq (super u) v let subst_univs_level fn l = try Some (fn l) with Not_found -> None let subst_univs_constraint fn (u,d,v as c) cstrs = let u' = subst_univs_level fn u in let v' = subst_univs_level fn v in match u', v' with | None, None -> Constraints.add c cstrs | Some u, None -> enforce_univ_constraint (u,d,Universe.make v) cstrs | None, Some v -> enforce_univ_constraint (Universe.make u,d,v) cstrs | Some u, Some v -> enforce_univ_constraint (u,d,v) cstrs let subst_univs_constraints subst csts = Constraints.fold (fun c cstrs -> subst_univs_constraint subst c cstrs) csts Constraints.empty let level_subst_of f = fun l -> try let u = f l in match Universe.level u with | None -> l | Some l -> l with Not_found -> l let normalize_univ_variable ~find = let rec aux cur = let b = find cur in let b' = subst_univs_universe aux b in if Universe.equal b' b then b else b' in aux type universe_opt_subst = Universe.t option universe_map let normalize_univ_variable_opt_subst ectx = let find l = match Univ.Level.Map.find l ectx with | Some b -> b | None -> raise Not_found in normalize_univ_variable ~find let normalize_universe_opt_subst subst = let normlevel = normalize_univ_variable_opt_subst subst in subst_univs_universe normlevel let normalize_opt_subst ctx = let normalize = normalize_universe_opt_subst ctx in Univ.Level.Map.mapi (fun u -> function | None -> None | Some v -> Some (normalize v)) ctx let normalize_univ_variables ctx = let ctx = normalize_opt_subst ctx in let def, subst = Univ.Level.Map.fold (fun u v (def, subst) -> match v with | None -> (def, subst) | Some b -> (Univ.Level.Set.add u def, Univ.Level.Map.add u b subst)) ctx (Univ.Level.Set.empty, Univ.Level.Map.empty) in ctx, def, subst let subst_univs_fn_puniverses f (c, u as cu) = let u' = Instance.subst_fn f u in if u' == u then cu else (c, u') let nf_evars_and_universes_opt_subst f subst = let subst = normalize_univ_variable_opt_subst subst in let lsubst = level_subst_of subst in let rec aux c = match kind c with | Evar (evk, args) -> let args' = List.Smart.map aux args in (match try f (evk, args') with Not_found -> None with | None -> if args == args' then c else mkEvar (evk, args') | Some c -> aux c) | Const pu -> let pu' = subst_univs_fn_puniverses lsubst pu in if pu' == pu then c else mkConstU pu' | Ind pu -> let pu' = subst_univs_fn_puniverses lsubst pu in if pu' == pu then c else mkIndU pu' | Construct pu -> let pu' = subst_univs_fn_puniverses lsubst pu in if pu' == pu then c else mkConstructU pu' | Sort (Type u) -> let u' = Univ.subst_univs_universe subst u in if u' == u then c else mkSort (sort_of_univ u') | Case (ci,u,pms,p,iv,t,br) -> let u' = Instance.subst_fn lsubst u in if u' == u then Constr.map aux c else Constr.map aux (mkCase (ci,u',pms,p,iv,t,br)) | Array (u,elems,def,ty) -> let u' = Univ.Instance.subst_fn lsubst u in let elems' = CArray.Smart.map aux elems in let def' = aux def in let ty' = aux ty in if u == u' && elems == elems' && def == def' && ty == ty' then c else mkArray (u',elems',def',ty') | _ -> Constr.map aux c in aux coq-8.15.0/engine/univSubst.mli000066400000000000000000000026121417001151100163310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* universe_level_subst_fn val subst_univs_constraints : universe_subst_fn -> Constraints.t -> Constraints.t type universe_opt_subst = Universe.t option universe_map val normalize_univ_variables : universe_opt_subst -> universe_opt_subst * Level.Set.t * universe_subst val normalize_univ_variable_opt_subst : universe_opt_subst -> (Level.t -> Universe.t) val normalize_universe_opt_subst : universe_opt_subst -> (Universe.t -> Universe.t) val normalize_opt_subst : universe_opt_subst -> universe_opt_subst (** Full universes substitutions into terms *) val nf_evars_and_universes_opt_subst : (existential -> constr option) -> universe_opt_subst -> constr -> constr coq-8.15.0/gramlib/000077500000000000000000000000001417001151100137735ustar00rootroot00000000000000coq-8.15.0/gramlib/LICENSE000066400000000000000000000032511417001151100150010ustar00rootroot00000000000000gramlib was derived from Daniel de Rauglaudre's camlp5 (https://github.com/camlp5/camlp5) whose licence follows: * Copyright (c) 2007-2017, INRIA (Institut National de Recherches en * Informatique et Automatique). All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of INRIA, nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY INRIA AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INRIA AND * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. coq-8.15.0/gramlib/dune000066400000000000000000000001241417001151100146460ustar00rootroot00000000000000(library (name gramlib) (public_name coq-core.gramlib) (libraries coq-core.lib)) coq-8.15.0/gramlib/gramext.ml000066400000000000000000000002701417001151100157730ustar00rootroot00000000000000(* camlp5r *) (* gramext.ml,v *) (* Copyright (c) INRIA 2007-2017 *) type position = | First | Last | Before of string | After of string type g_assoc = NonA | RightA | LeftA coq-8.15.0/gramlib/gramext.mli000066400000000000000000000002711417001151100161450ustar00rootroot00000000000000(* camlp5r *) (* gramext.mli,v *) (* Copyright (c) INRIA 2007-2017 *) type position = | First | Last | Before of string | After of string type g_assoc = NonA | RightA | LeftA coq-8.15.0/gramlib/grammar.ml000066400000000000000000002057521417001151100157660ustar00rootroot00000000000000(* camlp5r *) (* grammar.ml,v *) (* Copyright (c) INRIA 2007-2017 *) open Gramext open Format open Util (* Functorial interface *) type norec type mayrec module type S = sig type te type 'c pattern type ty_pattern = TPattern : 'a pattern -> ty_pattern module Parsable : sig type t val make : ?loc:Loc.t -> char Stream.t -> t val comments : t -> ((int * int) * string) list end module Entry : sig type 'a t val make : string -> 'a t val create : string -> 'a t val parse : 'a t -> Parsable.t -> 'a val name : 'a t -> string type 'a parser_fun = { parser_fun : te LStream.t -> 'a } val of_parser : string -> 'a parser_fun -> 'a t val parse_token_stream : 'a t -> te LStream.t -> 'a val print : Format.formatter -> 'a t -> unit val is_empty : 'a t -> bool end module rec Symbol : sig type ('self, 'trec, 'a) t val nterm : 'a Entry.t -> ('self, norec, 'a) t val nterml : 'a Entry.t -> string -> ('self, norec, 'a) t val list0 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list0sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val list1 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list1sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val opt : ('self, 'trec, 'a) t -> ('self, 'trec, 'a option) t val self : ('self, mayrec, 'self) t val next : ('self, mayrec, 'self) t val token : 'c pattern -> ('self, norec, 'c) t val tokens : ty_pattern list -> ('self, norec, unit) t val rules : 'a Rules.t list -> ('self, norec, 'a) t end and Rule : sig type ('self, 'trec, 'f, 'r) t val stop : ('self, norec, 'r, 'r) t val next : ('self, _, 'a, 'r) t -> ('self, _, 'b) Symbol.t -> ('self, mayrec, 'b -> 'a, 'r) t val next_norec : ('self, norec, 'a, 'r) Rule.t -> ('self, norec, 'b) Symbol.t -> ('self, norec, 'b -> 'a, 'r) t end and Rules : sig type 'a t val make : (_, norec, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end module Production : sig type 'a t val make : ('a, _, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end type 'a single_extend_statement = string option * Gramext.g_assoc option * 'a Production.t list type 'a extend_statement = | Reuse of string option * 'a Production.t list | Fresh of Gramext.position * 'a single_extend_statement list val generalize_symbol : ('a, 'tr, 'c) Symbol.t -> ('a, norec, 'c) Symbol.t option (* Used in custom entries, should tweak? *) val level_of_nonterm : ('a, norec, 'c) Symbol.t -> string option end module type ExtS = sig include S val safe_extend : 'a Entry.t -> 'a extend_statement -> unit val safe_delete_rule : 'a Entry.t -> 'a Production.t -> unit module Unsafe : sig val clear_entry : 'a Entry.t -> unit end end (* Implementation *) module GMake (L : Plexing.S) = struct type te = L.te type 'c pattern = 'c L.pattern type ty_pattern = TPattern : 'a pattern -> ty_pattern type 'a parser_t = L.te LStream.t -> 'a type grammar = { gtokens : (string * string option, int ref) Hashtbl.t } let egram = { gtokens = Hashtbl.create 301 } (** Used to propagate possible presence of SELF/NEXT in a rule (binary and) *) type ('a, 'b, 'c) ty_and_rec = | NoRec2 : (norec, norec, norec) ty_and_rec | MayRec2 : ('a, 'b, mayrec) ty_and_rec (** Used to propagate possible presence of SELF/NEXT in a tree (ternary and) *) type ('a, 'b, 'c, 'd) ty_and_rec3 = | NoRec3 : (norec, norec, norec, norec) ty_and_rec3 | MayRec3 : ('a, 'b, 'c, mayrec) ty_and_rec3 type 'a ty_entry = { ename : string; mutable estart : int -> 'a parser_t; mutable econtinue : int -> int -> 'a -> 'a parser_t; mutable edesc : 'a ty_desc; } and 'a ty_desc = | Dlevels of 'a ty_level list | Dparser of 'a parser_t and 'a ty_level = Level : (_, _, 'a) ty_rec_level -> 'a ty_level and ('trecs, 'trecp, 'a) ty_rec_level = { assoc : g_assoc; lname : string option; lsuffix : ('a, 'trecs, 'a -> Loc.t -> 'a) ty_tree; lprefix : ('a, 'trecp, Loc.t -> 'a) ty_tree; } and ('self, 'trec, 'a) ty_symbol = | Stoken : 'c pattern -> ('self, norec, 'c) ty_symbol | Stokens : ty_pattern list -> ('self, norec, unit) ty_symbol | Slist1 : ('self, 'trec, 'a) ty_symbol -> ('self, 'trec, 'a list) ty_symbol | Slist1sep : ('self, 'trec, 'a) ty_symbol * ('self, norec, unit) ty_symbol * bool -> ('self, 'trec, 'a list) ty_symbol | Slist0 : ('self, 'trec, 'a) ty_symbol -> ('self, 'trec, 'a list) ty_symbol | Slist0sep : ('self, 'trec, 'a) ty_symbol * ('self, norec, unit) ty_symbol * bool -> ('self, 'trec, 'a list) ty_symbol | Sopt : ('self, 'trec, 'a) ty_symbol -> ('self, 'trec, 'a option) ty_symbol | Sself : ('self, mayrec, 'self) ty_symbol | Snext : ('self, mayrec, 'self) ty_symbol | Snterm : 'a ty_entry -> ('self, norec, 'a) ty_symbol (* norec but the entry can nevertheless introduce a loop with the current entry*) | Snterml : 'a ty_entry * string -> ('self, norec, 'a) ty_symbol | Stree : ('self, 'trec, Loc.t -> 'a) ty_tree -> ('self, 'trec, 'a) ty_symbol and ('self, _, _, 'r) ty_rule = | TStop : ('self, norec, 'r, 'r) ty_rule | TNext : ('trr, 'trs, 'tr) ty_and_rec * ('self, 'trr, 'a, 'r) ty_rule * ('self, 'trs, 'b) ty_symbol -> ('self, 'tr, 'b -> 'a, 'r) ty_rule and ('self, 'trec, 'a) ty_tree = | Node : ('trn, 'trs, 'trb, 'tr) ty_and_rec3 * ('self, 'trn, 'trs, 'trb, 'b, 'a) ty_node -> ('self, 'tr, 'a) ty_tree | LocAct : 'k * 'k list -> ('self, norec, 'k) ty_tree | DeadEnd : ('self, norec, 'k) ty_tree and ('self, 'trec, 'trecs, 'trecb, 'a, 'r) ty_node = { node : ('self, 'trec, 'a) ty_symbol; son : ('self, 'trecs, 'a -> 'r) ty_tree; brother : ('self, 'trecb, 'r) ty_tree; } type 'a ty_rules = | TRules : (_, norec, 'act, Loc.t -> 'a) ty_rule * 'act -> 'a ty_rules type 'a ty_production = | TProd : ('a, _, 'act, Loc.t -> 'a) ty_rule * 'act -> 'a ty_production let rec derive_eps : type s r a. (s, r, a) ty_symbol -> bool = function Slist0 _ -> true | Slist0sep (_, _, _) -> true | Sopt _ -> true | Stree t -> tree_derive_eps t | Slist1 _ -> false | Slist1sep (_, _, _) -> false | Snterm _ -> false | Snterml (_, _) -> false | Snext -> false | Sself -> false | Stoken _ -> false | Stokens _ -> false and tree_derive_eps : type s tr a. (s, tr, a) ty_tree -> bool = function LocAct (_, _) -> true | Node (_, {node = s; brother = bro; son = son}) -> derive_eps s && tree_derive_eps son || tree_derive_eps bro | DeadEnd -> false (** FIXME: find a way to do that type-safely *) let eq_entry : type a1 a2. a1 ty_entry -> a2 ty_entry -> (a1, a2) eq option = fun e1 e2 -> if (Obj.magic e1) == (Obj.magic e2) then Some (Obj.magic Refl) else None let tok_pattern_eq_list pl1 pl2 = let f (TPattern p1) (TPattern p2) = Option.has_some (L.tok_pattern_eq p1 p2) in if List.for_all2eq f pl1 pl2 then Some Refl else None let rec eq_symbol : type s r1 r2 a1 a2. (s, r1, a1) ty_symbol -> (s, r2, a2) ty_symbol -> (a1, a2) eq option = fun s1 s2 -> match s1, s2 with Snterm e1, Snterm e2 -> eq_entry e1 e2 | Snterml (e1, l1), Snterml (e2, l2) -> if String.equal l1 l2 then eq_entry e1 e2 else None | Slist0 s1, Slist0 s2 -> begin match eq_symbol s1 s2 with None -> None | Some Refl -> Some Refl end | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) -> if b1 = b2 then match eq_symbol s1 s2 with | None -> None | Some Refl -> match eq_symbol sep1 sep2 with | None -> None | Some Refl -> Some Refl else None | Slist1 s1, Slist1 s2 -> begin match eq_symbol s1 s2 with None -> None | Some Refl -> Some Refl end | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) -> if b1 = b2 then match eq_symbol s1 s2 with | None -> None | Some Refl -> match eq_symbol sep1 sep2 with | None -> None | Some Refl -> Some Refl else None | Sopt s1, Sopt s2 -> begin match eq_symbol s1 s2 with None -> None | Some Refl -> Some Refl end | Stree _, Stree _ -> None | Sself, Sself -> Some Refl | Snext, Snext -> Some Refl | Stoken p1, Stoken p2 -> L.tok_pattern_eq p1 p2 | Stokens pl1, Stokens pl2 -> tok_pattern_eq_list pl1 pl2 | _ -> None let is_before : type s1 s2 r1 r2 a1 a2. (s1, r1, a1) ty_symbol -> (s2, r2, a2) ty_symbol -> bool = fun s1 s2 -> match s1, s2 with | Stoken p1, Stoken p2 -> snd (L.tok_pattern_strings p1) <> None && snd (L.tok_pattern_strings p2) = None | Stoken _, _ -> true | _ -> false (** Ancillary datatypes *) type 'a ty_rec = MayRec : mayrec ty_rec | NoRec : norec ty_rec type ('a, 'b, 'c) ty_and_ex = | NR00 : (mayrec, mayrec, mayrec) ty_and_ex | NR01 : (mayrec, norec, mayrec) ty_and_ex | NR10 : (norec, mayrec, mayrec) ty_and_ex | NR11 : (norec, norec, norec) ty_and_ex type ('a, 'b) ty_mayrec_and_ex = | MayRecNR : ('a, 'b, _) ty_and_ex -> ('a, 'b) ty_mayrec_and_ex type ('s, 'a) ty_mayrec_symbol = | MayRecSymbol : ('s, _, 'a) ty_symbol -> ('s, 'a) ty_mayrec_symbol type ('s, 'a) ty_mayrec_tree = | MayRecTree : ('s, 'tr, 'a) ty_tree -> ('s, 'a) ty_mayrec_tree type ('s, 'a, 'r) ty_mayrec_rule = | MayRecRule : ('s, _, 'a, 'r) ty_rule -> ('s, 'a, 'r) ty_mayrec_rule type ('self, 'trec, _) ty_symbols = | TNil : ('self, norec, unit) ty_symbols | TCns : ('trh, 'trt, 'tr) ty_and_rec * ('self, 'trh, 'a) ty_symbol * ('self, 'trt, 'b) ty_symbols -> ('self, 'tr, 'a * 'b) ty_symbols (** ('i, 'p, 'f, 'r) rel_prod0 ~ ∃ α₁ ... αₙ. p ≡ αₙ * ... α₁ * 'i ∧ f ≡ α₁ -> ... -> αₙ -> 'r *) type ('i, _, 'f, _) rel_prod0 = | Rel0 : ('i, 'i, 'f, 'f) rel_prod0 | RelS : ('i, 'p, 'f, 'a -> 'r) rel_prod0 -> ('i, 'a * 'p, 'f, 'r) rel_prod0 type ('p, 'k, 'r) rel_prod = (unit, 'p, 'k, 'r) rel_prod0 type ('s, 'tr, 'i, 'k, 'r) any_symbols = | AnyS : ('s, 'tr, 'p) ty_symbols * ('i, 'p, 'k, 'r) rel_prod0 -> ('s, 'tr, 'i, 'k, 'r) any_symbols type ('s, 'tr, 'k, 'r) ty_belast_rule = | Belast : ('trr, 'trs, 'tr) ty_and_rec * ('s, 'trr, 'k, 'a -> 'r) ty_rule * ('s, 'trs, 'a) ty_symbol -> ('s, 'tr, 'k, 'r) ty_belast_rule (* unfortunately, this is quadratic, but ty_rules aren't too long * (99% of the time of length less or equal 10 and maximum is 22 * when compiling Coq and its standard library) *) let rec get_symbols : type s trec k r. (s, trec, k, r) ty_rule -> (s, trec, unit, k, r) any_symbols = let rec belast_rule : type s trr trs tr a k r. (trr, trs, tr) ty_and_rec -> (s, trr, k, r) ty_rule -> (s, trs, a) ty_symbol -> (s, tr, a -> k, r) ty_belast_rule = fun ar r s -> match ar, r with | NoRec2, TStop -> Belast (NoRec2, TStop, s) | MayRec2, TStop -> Belast (MayRec2, TStop, s) | NoRec2, TNext (NoRec2, r, s') -> let Belast (NoRec2, r, s') = belast_rule NoRec2 r s' in Belast (NoRec2, TNext (NoRec2, r, s), s') | MayRec2, TNext (_, r, s') -> let Belast (_, r, s') = belast_rule MayRec2 r s' in Belast (MayRec2, TNext (MayRec2, r, s), s') in function | TStop -> AnyS (TNil, Rel0) | TNext (MayRec2, r, s) -> let Belast (MayRec2, r, s) = belast_rule MayRec2 r s in let AnyS (r, pf) = get_symbols r in AnyS (TCns (MayRec2, s, r), RelS pf) | TNext (NoRec2, r, s) -> let Belast (NoRec2, r, s) = belast_rule NoRec2 r s in let AnyS (r, pf) = get_symbols r in AnyS (TCns (NoRec2, s, r), RelS pf) let get_rec_symbols (type s tr p) (s : (s, tr, p) ty_symbols) : tr ty_rec = match s with TCns (MayRec2, _, _) -> MayRec | TCns (NoRec2, _, _) -> NoRec | TNil -> NoRec let get_rec_tree (type s tr f) (s : (s, tr, f) ty_tree) : tr ty_rec = match s with Node (MayRec3, _) -> MayRec | Node (NoRec3, _) -> NoRec | LocAct _ -> NoRec | DeadEnd -> NoRec let and_symbols_tree (type s trs trt p f) (s : (s, trs, p) ty_symbols) (t : (s, trt, f) ty_tree) : (trs, trt) ty_mayrec_and_ex = match get_rec_symbols s, get_rec_tree t with | MayRec, MayRec -> MayRecNR NR00 | MayRec, NoRec -> MayRecNR NR01 | NoRec, MayRec -> MayRecNR NR10 | NoRec, NoRec -> MayRecNR NR11 let and_and_tree (type s tr' trt tr trn trs trb f) (ar : (tr', trt, tr) ty_and_rec) (arn : (trn, trs, trb, trt) ty_and_rec3) (t : (s, trb, f) ty_tree) : (tr', trb, tr) ty_and_rec = match ar, arn, get_rec_tree t with | MayRec2, _, MayRec -> MayRec2 | MayRec2, _, NoRec -> MayRec2 | NoRec2, NoRec3, NoRec -> NoRec2 let insert_tree (type s trs trt tr p k a) entry_name (ar : (trs, trt, tr) ty_and_ex) (gsymbols : (s, trs, p) ty_symbols) (pf : (p, k, a) rel_prod) (action : k) (tree : (s, trt, a) ty_tree) : (s, tr, a) ty_tree = let rec insert : type trs trt tr p f k. (trs, trt, tr) ty_and_ex -> (s, trs, p) ty_symbols -> (p, k, f) rel_prod -> (s, trt, f) ty_tree -> k -> (s, tr, f) ty_tree = fun ar symbols pf tree action -> match symbols, pf with TCns (ars, s, sl), RelS pf -> (* descent in tree at symbol [s] *) insert_in_tree ar ars s sl pf tree action | TNil, Rel0 -> (* insert the action *) let node (type tb) ({node = s; son = son; brother = bro} : (_, _, _, tb, _, _) ty_node) = let ar : (norec, tb, tb) ty_and_ex = match get_rec_tree bro with MayRec -> NR10 | NoRec -> NR11 in {node = s; son = son; brother = insert ar TNil Rel0 bro action} in match ar, tree with | NR10, Node (_, n) -> Node (MayRec3, node n) | NR11, Node (NoRec3, n) -> Node (NoRec3, node n) | NR11, LocAct (old_action, action_list) -> (* What to do about this warning? For now it is disabled *) if false then begin let msg = " Grammar extension: " ^ (if entry_name = "" then "" else "in ["^entry_name^"%s], ") ^ "some rule has been masked" in Feedback.msg_warning (Pp.str msg) end; LocAct (action, old_action :: action_list) | NR11, DeadEnd -> LocAct (action, []) and insert_in_tree : type trs trs' trs'' trt tr a p f k. (trs'', trt, tr) ty_and_ex -> (trs, trs', trs'') ty_and_rec -> (s, trs, a) ty_symbol -> (s, trs', p) ty_symbols -> (p, k, a -> f) rel_prod -> (s, trt, f) ty_tree -> k -> (s, tr, f) ty_tree = fun ar ars s sl pf tree action -> let ar : (trs'', trt, tr) ty_and_rec = match ar with NR11 -> NoRec2 | NR00 -> MayRec2 | NR01 -> MayRec2 | NR10 -> MayRec2 in match try_insert ar ars s sl pf tree action with Some t -> t | None -> let node ar = {node = s; son = insert ar sl pf DeadEnd action; brother = tree} in match ar, ars, get_rec_symbols sl with | MayRec2, MayRec2, MayRec -> Node (MayRec3, node NR01) | MayRec2, _, NoRec -> Node (MayRec3, node NR11) | NoRec2, NoRec2, NoRec -> Node (NoRec3, node NR11) and try_insert : type trs trs' trs'' trt tr a p f k. (trs'', trt, tr) ty_and_rec -> (trs, trs', trs'') ty_and_rec -> (s, trs, a) ty_symbol -> (s, trs', p) ty_symbols -> (p, k, a -> f) rel_prod -> (s, trt, f) ty_tree -> k -> (s, tr, f) ty_tree option = fun ar ars symb symbl pf tree action -> match tree with Node (arn, {node = symb1; son = son; brother = bro}) -> (* merging rule [symb; symbl -> action] in tree [symb1; son | bro] *) begin match eq_symbol symb symb1 with | Some Refl -> (* reducing merge of [symb; symbl -> action] with [symb1; son] to merge of [symbl -> action] with [son] *) let MayRecNR arss = and_symbols_tree symbl son in let son = insert arss symbl pf son action in let node = {node = symb1; son = son; brother = bro} in (* propagate presence of SELF/NEXT *) begin match ar, ars, arn, arss with | MayRec2, _, _, _ -> Some (Node (MayRec3, node)) | NoRec2, NoRec2, NoRec3, NR11 -> Some (Node (NoRec3, node)) end | None -> let ar' = and_and_tree ar arn bro in if is_before symb1 symb || derive_eps symb && not (derive_eps symb1) then (* inserting new rule after current rule, i.e. in [bro] *) let bro = match try_insert ar' ars symb symbl pf bro action with Some bro -> (* could insert in [bro] *) bro | None -> (* not ok to insert in [bro] or after; we insert now *) let MayRecNR arss = and_symbols_tree symbl DeadEnd in let son = insert arss symbl pf DeadEnd action in let node = {node = symb; son = son; brother = bro} in (* propagate presence of SELF/NEXT *) match ar, ars, arn, arss with | MayRec2, _, _, _ -> Node (MayRec3, node) | NoRec2, NoRec2, NoRec3, NR11 -> Node (NoRec3, node) in let node = {node = symb1; son = son; brother = bro} in (* propagate presence of SELF/NEXT *) match ar, arn with | MayRec2, _ -> Some (Node (MayRec3, node)) | NoRec2, NoRec3 -> Some (Node (NoRec3, node)) else (* should insert in [bro] or before the tree [symb1; son | bro] *) match try_insert ar' ars symb symbl pf bro action with Some bro -> (* could insert in [bro] *) let node = {node = symb1; son = son; brother = bro} in begin match ar, arn with | MayRec2, _ -> Some (Node (MayRec3, node)) | NoRec2, NoRec3 -> Some (Node (NoRec3, node)) end | None -> (* should insert before [symb1; son | bro] *) None end | LocAct (_, _) -> None | DeadEnd -> None in insert ar gsymbols pf tree action let insert_tree_norec (type s p k a) entry_name (gsymbols : (s, norec, p) ty_symbols) (pf : (p, k, a) rel_prod) (action : k) (tree : (s, norec, a) ty_tree) : (s, norec, a) ty_tree = insert_tree entry_name NR11 gsymbols pf action tree let insert_tree (type s trs trt p k a) entry_name (gsymbols : (s, trs, p) ty_symbols) (pf : (p, k, a) rel_prod) (action : k) (tree : (s, trt, a) ty_tree) : (s, a) ty_mayrec_tree = let MayRecNR ar = and_symbols_tree gsymbols tree in MayRecTree (insert_tree entry_name ar gsymbols pf action tree) let srules (type self a) (rl : a ty_rules list) : (self, norec, a) ty_symbol = let rec retype_tree : type s a. (s, norec, a) ty_tree -> (self, norec, a) ty_tree = function | Node (NoRec3, {node = s; son = son; brother = bro}) -> Node (NoRec3, {node = retype_symbol s; son = retype_tree son; brother = retype_tree bro}) | LocAct (k, kl) -> LocAct (k, kl) | DeadEnd -> DeadEnd and retype_symbol : type s a. (s, norec, a) ty_symbol -> (self, norec, a) ty_symbol = function | Stoken p -> Stoken p | Stokens l -> Stokens l | Slist1 s -> Slist1 (retype_symbol s) | Slist1sep (s, sep, b) -> Slist1sep (retype_symbol s, retype_symbol sep, b) | Slist0 s -> Slist0 (retype_symbol s) | Slist0sep (s, sep, b) -> Slist0sep (retype_symbol s, retype_symbol sep, b) | Sopt s -> Sopt (retype_symbol s) | Snterm e -> Snterm e | Snterml (e, l) -> Snterml (e, l) | Stree t -> Stree (retype_tree t) in let rec retype_rule : type s k r. (s, norec, k, r) ty_rule -> (self, norec, k, r) ty_rule = function | TStop -> TStop | TNext (NoRec2, r, s) -> TNext (NoRec2, retype_rule r, retype_symbol s) in let t = List.fold_left (fun tree (TRules (symbols, action)) -> let symbols = retype_rule symbols in let AnyS (symbols, pf) = get_symbols symbols in insert_tree_norec "" symbols pf action tree) DeadEnd rl in Stree t let is_level_labelled n (Level lev) = match lev.lname with Some n1 -> n = n1 | None -> false let insert_level (type s tr p k) entry_name (symbols : (s, tr, p) ty_symbols) (pf : (p, k, Loc.t -> s) rel_prod) (action : k) (slev : s ty_level) : s ty_level = match symbols with | TCns (_, Sself, symbols) -> (* Insert a rule of the form "SELF; ...." *) let Level slev = slev in let RelS pf = pf in let MayRecTree lsuffix = insert_tree entry_name symbols pf action slev.lsuffix in Level {assoc = slev.assoc; lname = slev.lname; lsuffix = lsuffix; lprefix = slev.lprefix} | _ -> (* Insert a rule not starting with SELF *) let Level slev = slev in let MayRecTree lprefix = insert_tree entry_name symbols pf action slev.lprefix in Level {assoc = slev.assoc; lname = slev.lname; lsuffix = slev.lsuffix; lprefix = lprefix} let empty_lev lname assoc = let assoc = match assoc with Some a -> a | None -> LeftA in Level {assoc = assoc; lname = lname; lsuffix = DeadEnd; lprefix = DeadEnd} let err_no_level lev e = let msg = sprintf "Grammar.extend: No level labelled \"%s\" in entry \"%s\"" lev e in failwith msg let get_position entry position levs = match position with First -> [], levs | Last -> levs, [] | Before n -> let rec get = function [] -> err_no_level n entry.ename | lev :: levs -> if is_level_labelled n lev then [], lev :: levs else let (levs1, levs2) = get levs in lev :: levs1, levs2 in get levs | After n -> let rec get = function [] -> err_no_level n entry.ename | lev :: levs -> if is_level_labelled n lev then [lev], levs else let (levs1, levs2) = get levs in lev :: levs1, levs2 in get levs let get_level entry name levs = match name with | Some n -> let rec get = function [] -> err_no_level n entry.ename | lev :: levs -> if is_level_labelled n lev then [], lev, levs else let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2 in get levs | None -> begin match levs with lev :: levs -> [], lev, levs | [] -> let msg = sprintf "Grammar.extend: No top level in entry \"%s\"" entry.ename in failwith msg end let change_to_self0 (type s) (type trec) (type a) (entry : s ty_entry) : (s, trec, a) ty_symbol -> (s, a) ty_mayrec_symbol = function | Snterm e -> begin match eq_entry e entry with | None -> MayRecSymbol (Snterm e) | Some Refl -> MayRecSymbol (Sself) end | x -> MayRecSymbol x let rec change_to_self : type s trec a r. s ty_entry -> (s, trec, a, r) ty_rule -> (s, a, r) ty_mayrec_rule = fun e r -> match r with | TStop -> MayRecRule TStop | TNext (_, r, t) -> let MayRecRule r = change_to_self e r in let MayRecSymbol t = change_to_self0 e t in MayRecRule (TNext (MayRec2, r, t)) let insert_token gram tok = L.tok_using tok; let r = let tok = L.tok_pattern_strings tok in try Hashtbl.find gram.gtokens tok with Not_found -> let r = ref 0 in Hashtbl.add gram.gtokens tok r; r in incr r let insert_tokens gram symbols = let rec insert : type s trec a. (s, trec, a) ty_symbol -> unit = function | Slist0 s -> insert s | Slist1 s -> insert s | Slist0sep (s, t, _) -> insert s; insert t | Slist1sep (s, t, _) -> insert s; insert t | Sopt s -> insert s | Stree t -> tinsert t | Stoken tok -> insert_token gram tok | Stokens (TPattern tok::_) -> insert_token gram tok (* Only the first token is liable to trigger a keyword effect *) | Stokens [] -> assert false | Snterm _ -> () | Snterml (_, _) -> () | Snext -> () | Sself -> () and tinsert : type s tr a. (s, tr, a) ty_tree -> unit = function Node (_, {node = s; brother = bro; son = son}) -> insert s; tinsert bro; tinsert son | LocAct (_, _) -> () | DeadEnd -> () and linsert : type s tr p. (s, tr, p) ty_symbols -> unit = function | TNil -> () | TCns (_, s, r) -> insert s; linsert r in linsert symbols type 'a single_extend_statement = string option * Gramext.g_assoc option * 'a ty_production list type 'a extend_statement = | Reuse of string option * 'a ty_production list | Fresh of Gramext.position * 'a single_extend_statement list let add_prod entry lev (TProd (symbols, action)) = let MayRecRule symbols = change_to_self entry symbols in let AnyS (symbols, pf) = get_symbols symbols in insert_tokens egram symbols; insert_level entry.ename symbols pf action lev let levels_of_rules entry st = let elev = match entry.edesc with Dlevels elev -> elev | Dparser _ -> let msg = sprintf "Grammar.extend: entry not extensible: \"%s\"" entry.ename in failwith msg in match st with | Reuse (name, []) -> elev | Reuse (name, prods) -> let (levs1, lev, levs2) = get_level entry name elev in let lev = List.fold_left (fun lev prod -> add_prod entry lev prod) lev prods in levs1 @ [lev] @ levs2 | Fresh (position, rules) -> let (levs1, levs2) = get_position entry position elev in let fold levs (lname, assoc, prods) = let lev = empty_lev lname assoc in let lev = List.fold_left (fun lev prod -> add_prod entry lev prod) lev prods in lev :: levs in let levs = List.fold_left fold [] rules in levs1 @ List.rev levs @ levs2 let logically_eq_symbols entry = let rec eq_symbols : type s1 s2 trec1 trec2 a1 a2. (s1, trec1, a1) ty_symbol -> (s2, trec2, a2) ty_symbol -> bool = fun s1 s2 -> match s1, s2 with Snterm e1, Snterm e2 -> e1.ename = e2.ename | Snterm e1, Sself -> e1.ename = entry.ename | Sself, Snterm e2 -> entry.ename = e2.ename | Snterml (e1, l1), Snterml (e2, l2) -> e1.ename = e2.ename && l1 = l2 | Slist0 s1, Slist0 s2 -> eq_symbols s1 s2 | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) -> eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2 | Slist1 s1, Slist1 s2 -> eq_symbols s1 s2 | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) -> eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2 | Sopt s1, Sopt s2 -> eq_symbols s1 s2 | Stree t1, Stree t2 -> eq_trees t1 t2 | Stoken p1, Stoken p2 -> L.tok_pattern_eq p1 p2 <> None | Stokens pl1, Stokens pl2 -> tok_pattern_eq_list pl1 pl2 <> None | Sself, Sself -> true | Snext, Snext -> true | _ -> false and eq_trees : type s1 s2 tr1 tr2 a1 a2. (s1, tr1, a1) ty_tree -> (s2, tr2, a2) ty_tree -> bool = fun t1 t2 -> match t1, t2 with Node (_, n1), Node (_, n2) -> eq_symbols n1.node n2.node && eq_trees n1.son n2.son && eq_trees n1.brother n2.brother | LocAct _, LocAct _ -> true | LocAct _, DeadEnd -> true | DeadEnd, LocAct _ -> true | DeadEnd, DeadEnd -> true | _ -> false in eq_symbols (* [delete_rule_in_tree] returns [Some (dsl, t)] if success [dsl] = Some (list of deleted nodes) if branch deleted None if action replaced by previous version of action [t] = remaining tree [None] if failure *) type 's ex_symbols = | ExS : ('s, 'tr, 'p) ty_symbols -> 's ex_symbols let delete_rule_in_tree entry = let rec delete_in_tree : type s tr tr' p r. (s, tr, p) ty_symbols -> (s, tr', r) ty_tree -> (s ex_symbols option * (s, r) ty_mayrec_tree) option = fun symbols tree -> match symbols, tree with | TCns (_, s, sl), Node (_, n) -> if logically_eq_symbols entry s n.node then delete_son sl n else begin match delete_in_tree symbols n.brother with Some (dsl, MayRecTree t) -> Some (dsl, MayRecTree (Node (MayRec3, {node = n.node; son = n.son; brother = t}))) | None -> None end | TCns (_, s, sl), _ -> None | TNil, Node (_, n) -> begin match delete_in_tree TNil n.brother with Some (dsl, MayRecTree t) -> Some (dsl, MayRecTree (Node (MayRec3, {node = n.node; son = n.son; brother = t}))) | None -> None end | TNil, DeadEnd -> None | TNil, LocAct (_, []) -> Some (Some (ExS TNil), MayRecTree DeadEnd) | TNil, LocAct (_, action :: list) -> Some (None, MayRecTree (LocAct (action, list))) and delete_son : type s p tr trn trs trb a r. (s, tr, p) ty_symbols -> (s, trn, trs, trb, a, r) ty_node -> (s ex_symbols option * (s, r) ty_mayrec_tree) option = fun sl n -> match delete_in_tree sl n.son with Some (Some (ExS dsl), MayRecTree DeadEnd) -> Some (Some (ExS (TCns (MayRec2, n.node, dsl))), MayRecTree n.brother) | Some (Some (ExS dsl), MayRecTree t) -> let t = Node (MayRec3, {node = n.node; son = t; brother = n.brother}) in Some (Some (ExS (TCns (MayRec2, n.node, dsl))), MayRecTree t) | Some (None, MayRecTree t) -> let t = Node (MayRec3, {node = n.node; son = t; brother = n.brother}) in Some (None, MayRecTree t) | None -> None in delete_in_tree let decr_keyw_use_in_token gram tok = let tok' = L.tok_pattern_strings tok in let r = Hashtbl.find gram.gtokens tok' in decr r; if !r == 0 then begin Hashtbl.remove gram.gtokens tok'; L.tok_removing tok end let rec decr_keyw_use : type s tr a. _ -> (s, tr, a) ty_symbol -> unit = fun gram -> function Stoken tok -> decr_keyw_use_in_token gram tok | Stokens (TPattern tok :: _) -> decr_keyw_use_in_token gram tok | Stokens [] -> assert false | Slist0 s -> decr_keyw_use gram s | Slist1 s -> decr_keyw_use gram s | Slist0sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2 | Slist1sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2 | Sopt s -> decr_keyw_use gram s | Stree t -> decr_keyw_use_in_tree gram t | Sself -> () | Snext -> () | Snterm _ -> () | Snterml (_, _) -> () and decr_keyw_use_in_tree : type s tr a. _ -> (s, tr, a) ty_tree -> unit = fun gram -> function DeadEnd -> () | LocAct (_, _) -> () | Node (_, n) -> decr_keyw_use gram n.node; decr_keyw_use_in_tree gram n.son; decr_keyw_use_in_tree gram n.brother and decr_keyw_use_in_list : type s tr p. _ -> (s, tr, p) ty_symbols -> unit = fun gram -> function | TNil -> () | TCns (_, s, l) -> decr_keyw_use gram s; decr_keyw_use_in_list gram l let rec delete_rule_in_suffix entry symbols = function Level lev :: levs -> begin match delete_rule_in_tree entry symbols lev.lsuffix with Some (dsl, MayRecTree t) -> begin match dsl with Some (ExS dsl) -> decr_keyw_use_in_list egram dsl | None -> () end; begin match t, lev.lprefix with DeadEnd, DeadEnd -> levs | _ -> let lev = {assoc = lev.assoc; lname = lev.lname; lsuffix = t; lprefix = lev.lprefix} in Level lev :: levs end | None -> let levs = delete_rule_in_suffix entry symbols levs in Level lev :: levs end | [] -> raise Not_found let rec delete_rule_in_prefix entry symbols = function Level lev :: levs -> begin match delete_rule_in_tree entry symbols lev.lprefix with Some (dsl, MayRecTree t) -> begin match dsl with Some (ExS dsl) -> decr_keyw_use_in_list egram dsl | None -> () end; begin match t, lev.lsuffix with DeadEnd, DeadEnd -> levs | _ -> let lev = {assoc = lev.assoc; lname = lev.lname; lsuffix = lev.lsuffix; lprefix = t} in Level lev :: levs end | None -> let levs = delete_rule_in_prefix entry symbols levs in Level lev :: levs end | [] -> raise Not_found let delete_rule_in_level_list (type s tr p) (entry : s ty_entry) (symbols : (s, tr, p) ty_symbols) levs = match symbols with TCns (_, Sself, symbols) -> delete_rule_in_suffix entry symbols levs | TCns (_, Snterm e, symbols') -> begin match eq_entry e entry with | None -> delete_rule_in_prefix entry symbols levs | Some Refl -> delete_rule_in_suffix entry symbols' levs end | _ -> delete_rule_in_prefix entry symbols levs let rec flatten_tree : type s tr a. (s, tr, a) ty_tree -> s ex_symbols list = function DeadEnd -> [] | LocAct (_, _) -> [ExS TNil] | Node (_, {node = n; brother = b; son = s}) -> List.map (fun (ExS l) -> ExS (TCns (MayRec2, n, l))) (flatten_tree s) @ flatten_tree b let utf8_print = ref true let utf8_string_escaped s = let b = Buffer.create (String.length s) in let rec loop i = if i = String.length s then Buffer.contents b else begin begin match s.[i] with '"' -> Buffer.add_string b "\\\"" | '\\' -> Buffer.add_string b "\\\\" | '\n' -> Buffer.add_string b "\\n" | '\t' -> Buffer.add_string b "\\t" | '\r' -> Buffer.add_string b "\\r" | '\b' -> Buffer.add_string b "\\b" | c -> Buffer.add_char b c end; loop (i + 1) end in loop 0 let string_escaped s = if !utf8_print then utf8_string_escaped s else String.escaped s let print_str ppf s = fprintf ppf "\"%s\"" (string_escaped s) let print_token b ppf p = match L.tok_pattern_strings p with | "", Some s -> print_str ppf s | con, Some prm -> if b then fprintf ppf "%s@ %a" con print_str prm else fprintf ppf "(%s@ %a)" con print_str prm | con, None -> fprintf ppf "%s" con let print_tokens ppf = function | [] -> assert false | TPattern p :: pl -> fprintf ppf "[%a%a]" (print_token true) p (fun ppf -> List.iter (function TPattern p -> fprintf ppf ";@ "; print_token true ppf p)) pl let rec print_symbol : type s tr r. formatter -> (s, tr, r) ty_symbol -> unit = fun ppf -> function | Slist0 s -> fprintf ppf "LIST0 %a" print_symbol1 s | Slist0sep (s, t, osep) -> fprintf ppf "LIST0 %a SEP %a%s" print_symbol1 s print_symbol1 t (if osep then " OPT_SEP" else "") | Slist1 s -> fprintf ppf "LIST1 %a" print_symbol1 s | Slist1sep (s, t, osep) -> fprintf ppf "LIST1 %a SEP %a%s" print_symbol1 s print_symbol1 t (if osep then " OPT_SEP" else "") | Sopt s -> fprintf ppf "OPT %a" print_symbol1 s | Stoken p -> print_token true ppf p | Stokens [TPattern p] -> print_token true ppf p | Stokens pl -> print_tokens ppf pl | Snterml (e, l) -> fprintf ppf "%s%s@ LEVEL@ %a" e.ename "" print_str l | s -> print_symbol1 ppf s and print_symbol1 : type s tr r. formatter -> (s, tr, r) ty_symbol -> unit = fun ppf -> function | Snterm e -> fprintf ppf "%s%s" e.ename "" | Sself -> pp_print_string ppf "SELF" | Snext -> pp_print_string ppf "NEXT" | Stoken p -> print_token false ppf p | Stokens [TPattern p] -> print_token false ppf p | Stokens pl -> print_tokens ppf pl | Stree t -> print_level ppf pp_print_space (flatten_tree t) | s -> fprintf ppf "(%a)" print_symbol s and print_rule : type s tr p. formatter -> (s, tr, p) ty_symbols -> unit = fun ppf symbols -> fprintf ppf "@["; let rec fold : type s tr p. _ -> (s, tr, p) ty_symbols -> unit = fun sep symbols -> match symbols with | TNil -> () | TCns (_, symbol, symbols) -> fprintf ppf "%t%a" sep print_symbol symbol; fold (fun ppf -> fprintf ppf ";@ ") symbols in let () = fold (fun ppf -> ()) symbols in fprintf ppf "@]" and print_level : type s. _ -> _ -> s ex_symbols list -> _ = fun ppf pp_print_space rules -> fprintf ppf "@[[ "; let () = Format.pp_print_list ~pp_sep:(fun ppf () -> fprintf ppf "%a| " pp_print_space ()) (fun ppf (ExS rule) -> print_rule ppf rule) ppf rules in fprintf ppf " ]@]" let print_levels ppf elev = Format.pp_print_list ~pp_sep:(fun ppf () -> fprintf ppf "@,| ") (fun ppf (Level lev) -> let rules = List.map (fun (ExS t) -> ExS (TCns (MayRec2, Sself, t))) (flatten_tree lev.lsuffix) @ flatten_tree lev.lprefix in fprintf ppf "@["; begin match lev.lname with Some n -> fprintf ppf "%a@;<1 2>" print_str n | None -> () end; begin match lev.assoc with LeftA -> fprintf ppf "LEFTA" | RightA -> fprintf ppf "RIGHTA" | NonA -> fprintf ppf "NONA" end; fprintf ppf "@]@;<1 2>"; print_level ppf pp_force_newline rules) ppf elev let print_entry ppf e = fprintf ppf "@[[ "; begin match e.edesc with Dlevels elev -> print_levels ppf elev | Dparser _ -> fprintf ppf "" end; fprintf ppf " ]@]" let name_of_symbol : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> string = fun entry -> function Snterm e -> "[" ^ e.ename ^ "]" | Snterml (e, l) -> "[" ^ e.ename ^ " level " ^ l ^ "]" | Sself -> "[" ^ entry.ename ^ "]" | Snext -> "[" ^ entry.ename ^ "]" | Stoken tok -> L.tok_text tok | Stokens tokl -> String.concat " " (List.map (function TPattern tok -> L.tok_text tok) tokl) | Slist0 _ -> assert false | Slist1sep _ -> assert false | Slist1 _ -> assert false | Slist0sep _ -> assert false | Sopt _ -> assert false | Stree _ -> assert false type ('r, 'f) tok_list = | TokNil : ('f, 'f) tok_list | TokCns : 'a pattern * ('r, 'f) tok_list -> ('a -> 'r, 'f) tok_list type ('s, 'f) tok_tree = TokTree : 'a pattern * ('s, _, 'a -> 'r) ty_tree * ('r, 'f) tok_list -> ('s, 'f) tok_tree let rec get_token_list : type s tr a r f. s ty_entry -> a pattern -> (r, f) tok_list -> (s, tr, a -> r) ty_tree -> (s, f) tok_tree option = fun entry last_tok rev_tokl tree -> match tree with Node (_, {node = Stoken tok; son = son; brother = DeadEnd}) -> get_token_list entry tok (TokCns (last_tok, rev_tokl)) son | _ -> match rev_tokl with | TokNil -> None | _ -> Some (TokTree (last_tok, tree, rev_tokl)) let rec name_of_symbol_failed : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> _ = fun entry -> function | Slist0 s -> name_of_symbol_failed entry s | Slist0sep (s, _, _) -> name_of_symbol_failed entry s | Slist1 s -> name_of_symbol_failed entry s | Slist1sep (s, _, _) -> name_of_symbol_failed entry s | Sopt s -> name_of_symbol_failed entry s | Stree t -> name_of_tree_failed entry t | s -> name_of_symbol entry s and name_of_tree_failed : type s tr a. s ty_entry -> (s, tr, a) ty_tree -> _ = fun entry -> function Node (_, {node = s; brother = bro; son = son}) -> let tokl = match s with Stoken tok -> get_token_list entry tok TokNil son | _ -> None in begin match tokl with None -> let txt = name_of_symbol_failed entry s in let txt = match s, son with Sopt _, Node _ -> txt ^ " or " ^ name_of_tree_failed entry son | _ -> txt in let txt = match bro with DeadEnd -> txt | LocAct (_, _) -> txt | Node _ -> txt ^ " or " ^ name_of_tree_failed entry bro in txt | Some (TokTree (last_tok, _, rev_tokl)) -> let rec build_str : type a b. string -> (a, b) tok_list -> string = fun s -> function | TokNil -> s | TokCns (tok, t) -> build_str (L.tok_text tok ^ " " ^ s) t in build_str (L.tok_text last_tok) rev_tokl end | DeadEnd -> "???" | LocAct (_, _) -> "action" let tree_failed (type s tr a) (entry : s ty_entry) (prev_symb_result : a) (prev_symb : (s, tr, a) ty_symbol) tree = let txt = name_of_tree_failed entry tree in let txt = match prev_symb with Slist0 s -> let txt1 = name_of_symbol_failed entry s in txt1 ^ " or " ^ txt ^ " expected" | Slist1 s -> let txt1 = name_of_symbol_failed entry s in txt1 ^ " or " ^ txt ^ " expected" | Slist0sep (s, sep, _) -> begin match prev_symb_result with [] -> let txt1 = name_of_symbol_failed entry s in txt1 ^ " or " ^ txt ^ " expected" | _ -> let txt1 = name_of_symbol_failed entry sep in txt1 ^ " or " ^ txt ^ " expected" end | Slist1sep (s, sep, _) -> begin match prev_symb_result with [] -> let txt1 = name_of_symbol_failed entry s in txt1 ^ " or " ^ txt ^ " expected" | _ -> let txt1 = name_of_symbol_failed entry sep in txt1 ^ " or " ^ txt ^ " expected" end | Sopt _ -> txt ^ " expected" | Stree _ -> txt ^ " expected" | _ -> txt ^ " expected after " ^ name_of_symbol_failed entry prev_symb in txt ^ " (in [" ^ entry.ename ^ "])" let symb_failed entry prev_symb_result prev_symb symb = let tree = Node (MayRec3, {node = symb; brother = DeadEnd; son = DeadEnd}) in tree_failed entry prev_symb_result prev_symb tree let level_number entry lab = let rec lookup levn = function [] -> failwith ("unknown level " ^ lab) | lev :: levs -> if is_level_labelled lab lev then levn else lookup (succ levn) levs in match entry.edesc with Dlevels elev -> lookup 0 elev | Dparser _ -> raise Not_found let rec top_symb : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> (s, norec, a) ty_symbol = fun entry -> function Sself -> Snterm entry | Snext -> Snterm entry | Snterml (e, _) -> Snterm e | Slist1sep (s, sep, b) -> Slist1sep (top_symb entry s, sep, b) | _ -> raise Stream.Failure let entry_of_symb : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> a ty_entry = fun entry -> function Sself -> entry | Snext -> entry | Snterm e -> e | Snterml (e, _) -> e | _ -> raise Stream.Failure let top_tree : type s tr a. s ty_entry -> (s, tr, a) ty_tree -> (s, tr, a) ty_tree = fun entry -> function Node (MayRec3, {node = s; brother = bro; son = son}) -> Node (MayRec3, {node = top_symb entry s; brother = bro; son = son}) | Node (NoRec3, {node = s; brother = bro; son = son}) -> Node (NoRec3, {node = top_symb entry s; brother = bro; son = son}) | LocAct (_, _) -> raise Stream.Failure | DeadEnd -> raise Stream.Failure let skip_if_empty bp p strm = if LStream.count strm == bp then fun a -> p strm else raise Stream.Failure let continue entry bp a symb son p1 (strm__ : _ LStream.t) = let a = (entry_of_symb entry symb).econtinue 0 bp a strm__ in let act = try p1 strm__ with Stream.Failure -> raise (Stream.Error (tree_failed entry a symb son)) in fun _ -> act a (** Recover from a success on [symb] with result [a] followed by a failure on [son] in a rule of the form [a = symb; son] *) let do_recover parser_of_tree entry nlevn alevn bp a symb son (strm__ : _ LStream.t) = try (* Try to replay the son with the top occurrence of NEXT (by default at level nlevn) and trailing SELF (by default at alevn) replaced with self at top level; This allows for instance to recover from a failure on the second SELF of « SELF; "\/"; SELF » by doing as if it were « SELF; "\/"; same-entry-at-top-level » with application e.g. to accept "A \/ forall x, x = x" w/o requiring the expected parentheses as in "A \/ (forall x, x = x)". *) parser_of_tree entry nlevn alevn (top_tree entry son) strm__ with Stream.Failure -> try (* Discard the rule if what has been consumed before failing is the empty sequence (due to some OPT or LIST0); example: « OPT "!"; ident » fails to see an ident and the OPT was resolved into the empty sequence, with application e.g. to being able to safely write « LIST1 [ OPT "!"; id = ident -> id] ». *) skip_if_empty bp (fun (strm__ : _ LStream.t) -> raise Stream.Failure) strm__ with Stream.Failure -> (* In case of success on just SELF, NEXT or an explicit call to a subentry followed by a failure on the rest (son), retry parsing as if this entry had been called at its toplevel; example: « "{"; entry-at-some-level; "}" » fails on "}" and is retried with « "{"; same-entry-at-top-level; "}" », allowing e.g. to parse « {1 + 1} » while « {(1 + 1)} » would have been expected according to the level. *) continue entry bp a symb son (parser_of_tree entry nlevn alevn son) strm__ let recover parser_of_tree entry nlevn alevn bp a symb son strm = do_recover parser_of_tree entry nlevn alevn bp a symb son strm let item_skipped = ref false let call_and_push ps al strm = item_skipped := false; let a = ps strm in let al = if !item_skipped then al else a :: al in item_skipped := false; al let token_ematch tok = let tematch = L.tok_match tok in fun tok -> tematch tok (** nlevn: level for Snext alevn: level for recursive calls on the left-hand side of the rule (depending on associativity) *) let rec parser_of_tree : type s tr r. s ty_entry -> int -> int -> (s, tr, r) ty_tree -> r parser_t = fun entry nlevn alevn -> function DeadEnd -> (fun (strm__ : _ LStream.t) -> raise Stream.Failure) | LocAct (act, _) -> (fun (strm__ : _ LStream.t) -> act) | Node (_, {node = Sself; son = LocAct (act, _); brother = DeadEnd}) -> (* SELF on the right-hand side of the last rule *) (fun (strm__ : _ LStream.t) -> let a = entry.estart alevn strm__ in act a) | Node (_, {node = Sself; son = LocAct (act, _); brother = bro}) -> (* SELF on the right-hand side of a rule *) let p2 = parser_of_tree entry nlevn alevn bro in (fun (strm__ : _ LStream.t) -> match try Some (entry.estart alevn strm__) with Stream.Failure -> None with Some a -> act a | _ -> p2 strm__) | Node (_, {node = Stoken tok; son = son; brother = DeadEnd}) -> parser_of_token_list entry nlevn alevn tok son | Node (_, {node = Stoken tok; son = son; brother = bro}) -> let p2 = parser_of_tree entry nlevn alevn bro in let p1 = parser_of_token_list entry nlevn alevn tok son in (fun (strm__ : _ LStream.t) -> try p1 strm__ with Stream.Failure -> p2 strm__) | Node (_, {node = s; son = son; brother = DeadEnd}) -> let ps = parser_of_symbol entry nlevn s in let p1 = parser_of_tree entry nlevn alevn son in let p1 = parser_cont p1 entry nlevn alevn s son in (fun (strm__ : _ LStream.t) -> let bp = LStream.count strm__ in let a = ps strm__ in let act = try p1 bp a strm__ with Stream.Failure -> raise (Stream.Error (tree_failed entry a s son)) in act a) | Node (_, {node = s; son = son; brother = bro}) -> let ps = parser_of_symbol entry nlevn s in let p1 = parser_of_tree entry nlevn alevn son in let p1 = parser_cont p1 entry nlevn alevn s son in let p2 = parser_of_tree entry nlevn alevn bro in (fun (strm : _ LStream.t) -> let bp = LStream.count strm in match try Some (ps strm) with Stream.Failure -> None with Some a -> begin match (try Some (p1 bp a strm) with Stream.Failure -> None) with Some act -> act a | None -> raise (Stream.Error (tree_failed entry a s son)) end | None -> p2 strm) and parser_cont : type s tr tr' a r. (a -> r) parser_t -> s ty_entry -> int -> int -> (s, tr, a) ty_symbol -> (s, tr', a -> r) ty_tree -> int -> a -> (a -> r) parser_t = fun p1 entry nlevn alevn s son bp a (strm__ : _ LStream.t) -> try p1 strm__ with Stream.Failure -> recover parser_of_tree entry nlevn alevn bp a s son strm__ (** [parser_of_token_list] attempts to look-ahead an arbitrary-long finite sequence of tokens. E.g., in [ [ "foo"; "bar1"; "bar3"; ... -> action1 | "foo"; "bar2"; ... -> action2 | other-rules ] ] compiled as: [ [ "foo"; ["bar1"; "bar3"; ... -> action1 |"bar2"; ... -> action2] | other-rules ] ] this is able to look ahead "foo"; "bar1"; "bar3" and if not found "foo"; "bar1", then, if still not found, "foo"; "bar2" _without_ consuming the tokens until it is sure that a longest chain of tokens (before finding non-terminals or the end of the production) is found (and backtracking to [other-rules] if no such longest chain can be found). *) and parser_of_token_list : type s tr lt r. s ty_entry -> int -> int -> lt pattern -> (s, tr, lt -> r) ty_tree -> r parser_t = fun entry nlevn alevn tok tree -> let rec loop : type tr lt r. int -> lt pattern -> (s, tr, r) ty_tree -> lt -> r parser_t = fun n last_tok tree -> match tree with | Node (_, {node = Stoken tok; son = son; brother = bro}) -> let tematch = token_ematch tok in let p2 = loop n last_tok bro in let p1 = loop (n+1) tok son in fun last_a strm -> (match (try Some (tematch (LStream.peek_nth n strm)) with Stream.Failure -> None) with | Some a -> (match try Some (p1 a strm) with Stream.Failure -> None with | Some act -> act a | None -> p2 last_a strm) | None -> p2 last_a strm) | DeadEnd -> fun last_a strm -> raise Stream.Failure | _ -> let ps = parser_of_tree entry nlevn alevn tree in fun last_a strm -> for _i = 1 to n do LStream.junk strm done; match try Some (ps strm) with Stream.Failure -> (* Tolerance: retry w/o granting the level constraint (see recover) *) try Some (parser_of_tree entry nlevn alevn (top_tree entry tree) strm) with Stream.Failure -> None with | Some act -> act | None -> raise (Stream.Error (tree_failed entry last_a (Stoken last_tok) tree)) in let ps = loop 1 tok tree in let tematch = token_ematch tok in fun strm -> match LStream.peek strm with | Some tok -> let a = tematch tok in let act = ps a strm in act a | None -> raise Stream.Failure and parser_of_symbol : type s tr a. s ty_entry -> int -> (s, tr, a) ty_symbol -> a parser_t = fun entry nlevn -> function | Slist0 s -> let ps = call_and_push (parser_of_symbol entry nlevn s) in let rec loop al (strm__ : _ LStream.t) = match try Some (ps al strm__) with Stream.Failure -> None with Some al -> loop al strm__ | _ -> al in (fun (strm__ : _ LStream.t) -> let a = loop [] strm__ in List.rev a) | Slist0sep (symb, sep, false) -> let ps = call_and_push (parser_of_symbol entry nlevn symb) in let pt = parser_of_symbol entry nlevn sep in let rec kont al (strm__ : _ LStream.t) = match try Some (pt strm__) with Stream.Failure -> None with Some v -> let al = try ps al strm__ with Stream.Failure -> raise (Stream.Error (symb_failed entry v sep symb)) in kont al strm__ | _ -> al in (fun (strm__ : _ LStream.t) -> match try Some (ps [] strm__) with Stream.Failure -> None with Some al -> let a = kont al strm__ in List.rev a | _ -> []) | Slist0sep (symb, sep, true) -> let ps = call_and_push (parser_of_symbol entry nlevn symb) in let pt = parser_of_symbol entry nlevn sep in let rec kont al (strm__ : _ LStream.t) = match try Some (pt strm__) with Stream.Failure -> None with Some v -> begin match (try Some (ps al strm__) with Stream.Failure -> None) with Some al -> kont al strm__ | _ -> al end | _ -> al in (fun (strm__ : _ LStream.t) -> match try Some (ps [] strm__) with Stream.Failure -> None with Some al -> let a = kont al strm__ in List.rev a | _ -> []) | Slist1 s -> let ps = call_and_push (parser_of_symbol entry nlevn s) in let rec loop al (strm__ : _ LStream.t) = match try Some (ps al strm__) with Stream.Failure -> None with Some al -> loop al strm__ | _ -> al in (fun (strm__ : _ LStream.t) -> let al = ps [] strm__ in let a = loop al strm__ in List.rev a) | Slist1sep (symb, sep, false) -> let ps = call_and_push (parser_of_symbol entry nlevn symb) in let pt = parser_of_symbol entry nlevn sep in let rec kont al (strm__ : _ LStream.t) = match try Some (pt strm__) with Stream.Failure -> None with Some v -> let al = try ps al strm__ with Stream.Failure -> let a = try parse_top_symb entry symb strm__ with Stream.Failure -> raise (Stream.Error (symb_failed entry v sep symb)) in a :: al in kont al strm__ | _ -> al in (fun (strm__ : _ LStream.t) -> let al = ps [] strm__ in let a = kont al strm__ in List.rev a) | Slist1sep (symb, sep, true) -> let ps = call_and_push (parser_of_symbol entry nlevn symb) in let pt = parser_of_symbol entry nlevn sep in let rec kont al (strm__ : _ LStream.t) = match try Some (pt strm__) with Stream.Failure -> None with Some v -> begin match (try Some (ps al strm__) with Stream.Failure -> None) with Some al -> kont al strm__ | _ -> match try Some (parse_top_symb entry symb strm__) with Stream.Failure -> None with Some a -> kont (a :: al) strm__ | _ -> al end | _ -> al in (fun (strm__ : _ LStream.t) -> let al = ps [] strm__ in let a = kont al strm__ in List.rev a) | Sopt s -> let ps = parser_of_symbol entry nlevn s in (fun (strm__ : _ LStream.t) -> match try Some (ps strm__) with Stream.Failure -> None with Some a -> Some a | _ -> None) | Stree t -> let pt = parser_of_tree entry 1 0 t in (fun (strm__ : _ LStream.t) -> let bp = LStream.count strm__ in let a = pt strm__ in let ep = LStream.count strm__ in let loc = LStream.interval_loc bp ep strm__ in a loc) | Snterm e -> (fun (strm__ : _ LStream.t) -> e.estart 0 strm__) | Snterml (e, l) -> (fun (strm__ : _ LStream.t) -> e.estart (level_number e l) strm__) | Sself -> (fun (strm__ : _ LStream.t) -> entry.estart 0 strm__) | Snext -> (fun (strm__ : _ LStream.t) -> entry.estart nlevn strm__) | Stoken tok -> parser_of_token entry tok | Stokens tokl -> parser_of_tokens entry tokl and parser_of_token : type s a. s ty_entry -> a pattern -> a parser_t = fun entry tok -> let f = L.tok_match tok in fun strm -> match LStream.peek strm with Some tok -> let r = f tok in LStream.junk strm; r | None -> raise Stream.Failure and parser_of_tokens : type s. s ty_entry -> ty_pattern list -> unit parser_t = fun entry tokl -> let rec loop n = function | [] -> fun strm -> for _i = 1 to n do LStream.junk strm done; () | TPattern tok :: tokl -> let tematch = token_ematch tok in fun strm -> ignore (tematch (LStream.peek_nth n strm)); loop (n+1) tokl strm in loop 0 tokl and parse_top_symb : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> a parser_t = fun entry symb -> parser_of_symbol entry 0 (top_symb entry symb) (** [start_parser_of_levels entry clevn levels levn strm] goes top-down from level [clevn] to the last level, ignoring rules between [levn] and [clevn], as if starting from [max(clevn,levn)]. On each rule of the form [prefix] (where [prefix] is a rule not starting with [SELF]), it tries to consume the stream [strm]. The interesting case is [entry.estart] which is [start_parser_of_levels entry 0 entry.edesc], thus practically going from [levn] to the end. More schematically, assuming each level has the form level n: [ a = SELF; b = suffix_tree_n -> action_n(a,b) | a = prefix_tree_n -> action'_n(a) ] then the main loop does the following: estart n = if prefix_tree_n matches the stream as a then econtinue n (action'_n(a)) else start (n+1) econtinue n a = if suffix_tree_n matches the stream as b then econtinue n (action_n(a,b)) else if n=0 then a else econtinue (n-1) a *) let rec start_parser_of_levels entry clevn = function [] -> (fun levn (strm__ : _ LStream.t) -> raise Stream.Failure) | Level lev :: levs -> let p1 = start_parser_of_levels entry (succ clevn) levs in match lev.lprefix with DeadEnd -> p1 | tree -> let alevn = match lev.assoc with LeftA | NonA -> succ clevn | RightA -> clevn in let p2 = parser_of_tree entry (succ clevn) alevn tree in match levs with [] -> (fun levn strm -> (* this code should be there but is commented to preserve compatibility with previous versions... with this code, the grammar entry e: [[ "x"; a = e | "y" ]] should fail because it should be: e: [RIGHTA[ "x"; a = e | "y" ]]... if levn > clevn then match strm with parser [] else *) let (strm__ : _ LStream.t) = strm in let bp = LStream.count strm__ in let act = p2 strm__ in let ep = LStream.count strm__ in let a = act (LStream.interval_loc bp ep strm__) in entry.econtinue levn bp a strm) | _ -> fun levn strm -> if levn > clevn then (* Skip rules before [levn] *) p1 levn strm else let (strm__ : _ LStream.t) = strm in let bp = LStream.count strm__ in match try Some (p2 strm__) with Stream.Failure -> None with Some act -> let ep = LStream.count strm__ in let a = act (LStream.interval_loc bp ep strm__) in entry.econtinue levn bp a strm | _ -> p1 levn strm__ (** [continue_parser_of_levels entry clevn levels levn bp a strm] goes bottom-up from the last level to level [clevn], ignoring rules between [levn] and [clevn], as if stopping at [max(clevn,levn)]. It tries to consume the stream [strm] on the suffix of rules of the form [SELF; suffix] knowing that [a] is what consumed [SELF] at level [levn] (or [levn+1] depending on associativity). The interesting case is [entry.econtinue levn bp a] which is [try continue_parser_of_levels entry 0 entry.edesc levn bp a with Failure -> a], thus practically going from the end to [levn]. *) let rec continue_parser_of_levels entry clevn = function [] -> (fun levn bp a (strm__ : _ LStream.t) -> raise Stream.Failure) | Level lev :: levs -> let p1 = continue_parser_of_levels entry (succ clevn) levs in match lev.lsuffix with DeadEnd -> p1 | tree -> let alevn = match lev.assoc with LeftA | NonA -> succ clevn | RightA -> clevn in let p2 = parser_of_tree entry (succ clevn) alevn tree in fun levn bp a strm -> if levn > clevn then (* Skip rules before [levn] *) p1 levn bp a strm else let (strm__ : _ LStream.t) = strm in try p1 levn bp a strm__ with Stream.Failure -> let act = p2 strm__ in let ep = LStream.count strm__ in let a = act a (LStream.interval_loc bp ep strm__) in entry.econtinue levn bp a strm let continue_parser_of_entry entry = match entry.edesc with Dlevels elev -> let p = continue_parser_of_levels entry 0 elev in (fun levn bp a (strm__ : _ LStream.t) -> try p levn bp a strm__ with Stream.Failure -> a) | Dparser p -> fun levn bp a (strm__ : _ LStream.t) -> raise Stream.Failure let empty_entry ename levn strm = raise (Stream.Error ("entry [" ^ ename ^ "] is empty")) let start_parser_of_entry entry = match entry.edesc with Dlevels [] -> empty_entry entry.ename | Dlevels elev -> start_parser_of_levels entry 0 elev | Dparser p -> fun levn strm -> p strm (* Extend syntax *) let init_entry_functions entry = entry.estart <- (fun lev strm -> let f = start_parser_of_entry entry in entry.estart <- f; f lev strm); entry.econtinue <- (fun lev bp a strm -> let f = continue_parser_of_entry entry in entry.econtinue <- f; f lev bp a strm) let extend_entry entry statement = let elev = levels_of_rules entry statement in entry.edesc <- Dlevels elev; init_entry_functions entry (* Deleting a rule *) let delete_rule entry sl = match entry.edesc with Dlevels levs -> let levs = delete_rule_in_level_list entry sl levs in entry.edesc <- Dlevels levs; entry.estart <- (fun lev strm -> let f = start_parser_of_entry entry in entry.estart <- f; f lev strm); entry.econtinue <- (fun lev bp a strm -> let f = continue_parser_of_entry entry in entry.econtinue <- f; f lev bp a strm) | Dparser _ -> () (* Normal interface *) module Parsable = struct type t = { pa_tok_strm : L.te LStream.t ; lexer_state : L.State.t ref } let parse_parsable entry p = let efun = entry.estart 0 in let ts = p.pa_tok_strm in let get_parsing_loc () = (* Build the loc spanning from just after what is consumed (count) up to the further token known to have been read (max_peek). Being a parsing error, there needs to be a next token that caused the failure, except when the rule is empty (e.g. an empty custom entry); thus, we need to ensure that the token at location cnt has been peeked (which in turn ensures that the max peek is at least the current position) *) let _ = LStream.peek ts in let loc' = LStream.max_peek_loc ts in let loc = LStream.get_loc (LStream.count ts) ts in Loc.merge loc loc' in try efun ts with Stream.Failure -> let loc = get_parsing_loc () in Loc.raise ~loc (Stream.Error ("illegal begin of " ^ entry.ename)) | Stream.Error _ as exc -> let loc = get_parsing_loc () in Loc.raise ~loc exc | exc -> (* An error produced by the evaluation of the right-hand side *) (* of a rule, or a signal such as Sys.Break; we leave to the *) (* error the responsibility of locating itself *) let exc,info = Exninfo.capture exc in Exninfo.iraise (exc,info) let parse_parsable e p = L.State.set !(p.lexer_state); try let c = parse_parsable e p in p.lexer_state := L.State.get (); c with exn -> let exn,info = Exninfo.capture exn in L.State.drop (); Exninfo.iraise (exn,info) let make ?loc cs = let lexer_state = ref (L.State.init ()) in L.State.set !lexer_state; let ts = L.tok_func ?loc cs in lexer_state := L.State.get (); {pa_tok_strm = ts; lexer_state} let comments p = L.State.get_comments !(p.lexer_state) end module Entry = struct type 'a t = 'a ty_entry let make n = { ename = n; estart = empty_entry n; econtinue = (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure); edesc = Dlevels []} let create = make let parse (e : 'a t) p : 'a = Parsable.parse_parsable e p let parse_token_stream (e : 'a t) ts : 'a = e.estart 0 ts let name e = e.ename type 'a parser_fun = { parser_fun : te LStream.t -> 'a } let of_parser n { parser_fun = (p : te LStream.t -> 'a) } : 'a t = { ename = n; estart = (fun _ -> p); econtinue = (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure); edesc = Dparser p} let print ppf e = fprintf ppf "%a@." print_entry e let is_empty e = match e.edesc with | Dparser _ -> failwith "Arbitrary parser entry" | Dlevels elev -> List.is_empty elev end module rec Symbol : sig type ('self, 'trec, 'a) t = ('self, 'trec, 'a) ty_symbol val nterm : 'a Entry.t -> ('self, norec, 'a) t val nterml : 'a Entry.t -> string -> ('self, norec, 'a) t val list0 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list0sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val list1 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list1sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val opt : ('self, 'trec, 'a) t -> ('self, 'trec, 'a option) t val self : ('self, mayrec, 'self) t val next : ('self, mayrec, 'self) t val token : 'c pattern -> ('self, norec, 'c) t val tokens : ty_pattern list -> ('self, norec, unit) t val rules : 'a Rules.t list -> ('self, norec, 'a) t end = struct type ('self, 'trec, 'a) t = ('self, 'trec, 'a) ty_symbol let nterm e = Snterm e let nterml e l = Snterml (e, l) let list0 s = Slist0 s let list0sep s sep b = Slist0sep (s, sep, b) let list1 s = Slist1 s let list1sep s sep b = Slist1sep (s, sep, b) let opt s = Sopt s let self = Sself let next = Snext let token tok = Stoken tok let tokens tokl = Stokens tokl let rules (t : 'a Rules.t list) = srules t end and Rule : sig type ('self, 'trec, 'f, 'r) t = ('self, 'trec, 'f, 'r) ty_rule val stop : ('self, norec, 'r, 'r) t val next : ('self, _, 'a, 'r) t -> ('self, _, 'b) Symbol.t -> ('self, mayrec, 'b -> 'a, 'r) t val next_norec : ('self, norec, 'a, 'r) Rule.t -> ('self, norec, 'b) Symbol.t -> ('self, norec, 'b -> 'a, 'r) t end = struct type ('self, 'trec, 'f, 'r) t = ('self, 'trec, 'f, 'r) ty_rule let stop = TStop let next r s = TNext (MayRec2, r, s) let next_norec r s = TNext (NoRec2, r, s) end and Rules : sig type 'a t = 'a ty_rules val make : (_, norec, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end = struct type 'a t = 'a ty_rules let make p act = TRules (p, act) end module Production = struct type 'a t = 'a ty_production let make p act = TProd (p, act) end module Unsafe = struct let clear_entry e = e.estart <- (fun _ (strm__ : _ LStream.t) -> raise Stream.Failure); e.econtinue <- (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure); match e.edesc with Dlevels _ -> e.edesc <- Dlevels [] | Dparser _ -> () end let safe_extend (e : 'a Entry.t) data = extend_entry e data let safe_delete_rule e (TProd (r,_act)) = let AnyS (symbols, _) = get_symbols r in delete_rule e symbols let level_of_nonterm sym = match sym with | Snterml (_,l) -> Some l | _ -> None exception SelfSymbol let rec generalize_symbol : type a tr s. (s, tr, a) Symbol.t -> (s, norec, a) ty_symbol = function | Stoken tok -> Stoken tok | Stokens tokl -> Stokens tokl | Slist1 e -> Slist1 (generalize_symbol e) | Slist1sep (e, sep, b) -> let e = generalize_symbol e in let sep = generalize_symbol sep in Slist1sep (e, sep, b) | Slist0 e -> Slist0 (generalize_symbol e) | Slist0sep (e, sep, b) -> let e = generalize_symbol e in let sep = generalize_symbol sep in Slist0sep (e, sep, b) | Sopt e -> Sopt (generalize_symbol e) | Sself -> raise SelfSymbol | Snext -> raise SelfSymbol | Snterm e -> Snterm e | Snterml (e, l) -> Snterml (e, l) | Stree r -> Stree (generalize_tree r) and generalize_tree : type a tr s . (s, tr, a) ty_tree -> (s, norec, a) ty_tree = fun r -> match r with | Node (fi, n) -> let fi = match fi with | NoRec3 -> NoRec3 | MayRec3 -> raise SelfSymbol in let n = match n with | { node; son; brother } -> let node = generalize_symbol node in let son = generalize_tree son in let brother = generalize_tree brother in { node; son; brother } in Node (fi, n) | LocAct _ as r -> r | DeadEnd as r -> r let generalize_symbol s = try Some (generalize_symbol s) with SelfSymbol -> None end coq-8.15.0/gramlib/grammar.mli000066400000000000000000000103121417001151100161210ustar00rootroot00000000000000(* camlp5r *) (* grammar.mli,v *) (* Copyright (c) INRIA 2007-2017 *) (** Extensible grammars. This module implements the Camlp5 extensible grammars system. Grammars entries can be extended using the [EXTEND] statement, added by loading the Camlp5 [pa_extend.cmo] file. *) (** {6 Functorial interface} *) (** Alternative for grammars use. Grammars are no more Ocaml values: there is no type for them. Modules generated preserve the rule "an entry cannot call an entry of another grammar" by normal OCaml typing. *) (** The input signature for the functor [Grammar.GMake]: [te] is the type of the tokens. *) type norec type mayrec module type S = sig type te type 'c pattern type ty_pattern = TPattern : 'a pattern -> ty_pattern module Parsable : sig type t val make : ?loc:Loc.t -> char Stream.t -> t val comments : t -> ((int * int) * string) list end module Entry : sig type 'a t val make : string -> 'a t val create : string -> 'a t (* compat *) val parse : 'a t -> Parsable.t -> 'a val name : 'a t -> string type 'a parser_fun = { parser_fun : te LStream.t -> 'a } val of_parser : string -> 'a parser_fun -> 'a t val parse_token_stream : 'a t -> te LStream.t -> 'a val print : Format.formatter -> 'a t -> unit val is_empty : 'a t -> bool end module rec Symbol : sig type ('self, 'trec, 'a) t val nterm : 'a Entry.t -> ('self, norec, 'a) t val nterml : 'a Entry.t -> string -> ('self, norec, 'a) t val list0 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list0sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val list1 : ('self, 'trec, 'a) t -> ('self, 'trec, 'a list) t val list1sep : ('self, 'trec, 'a) t -> ('self, norec, unit) t -> bool -> ('self, 'trec, 'a list) t val opt : ('self, 'trec, 'a) t -> ('self, 'trec, 'a option) t val self : ('self, mayrec, 'self) t val next : ('self, mayrec, 'self) t val token : 'c pattern -> ('self, norec, 'c) t val tokens : ty_pattern list -> ('self, norec, unit) t val rules : 'a Rules.t list -> ('self, norec, 'a) t end and Rule : sig type ('self, 'trec, 'f, 'r) t val stop : ('self, norec, 'r, 'r) t val next : ('self, _, 'a, 'r) t -> ('self, _, 'b) Symbol.t -> ('self, mayrec, 'b -> 'a, 'r) t val next_norec : ('self, norec, 'a, 'r) Rule.t -> ('self, norec, 'b) Symbol.t -> ('self, norec, 'b -> 'a, 'r) t end and Rules : sig type 'a t val make : (_, norec, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end module Production : sig type 'a t val make : ('a, _, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end type 'a single_extend_statement = string option * Gramext.g_assoc option * 'a Production.t list type 'a extend_statement = | Reuse of string option * 'a Production.t list (** Extend an existing level by its optional given name. If None, picks the topmost level. *) | Fresh of Gramext.position * 'a single_extend_statement list (** Create a level at the given position. *) val generalize_symbol : ('a, 'tr, 'c) Symbol.t -> ('a, norec, 'c) Symbol.t option (* Used in custom entries, should tweak? *) val level_of_nonterm : ('a, norec, 'c) Symbol.t -> string option end (* Interface private to clients *) module type ExtS = sig include S val safe_extend : 'a Entry.t -> 'a extend_statement -> unit val safe_delete_rule : 'a Entry.t -> 'a Production.t -> unit module Unsafe : sig val clear_entry : 'a Entry.t -> unit end end (** Signature type of the functor [Grammar.GMake]. The types and functions are almost the same than in generic interface, but: - Grammars are not values. Functions holding a grammar as parameter do not have this parameter yet. - The type [parsable] is used in function [parse] instead of the char stream, avoiding the possible loss of tokens. - The type of tokens (expressions and patterns) can be any type (instead of (string * string)); the module parameter must specify a way to show them as (string * string) *) module GMake (L : Plexing.S) : ExtS with type te = L.te and type 'c pattern = 'c L.pattern coq-8.15.0/gramlib/plexing.ml000066400000000000000000000014251417001151100157750ustar00rootroot00000000000000(* camlp5r *) (* plexing.ml,v *) (* Copyright (c) INRIA 2007-2017 *) type 'te lexer_func = ?loc:Loc.t -> char Stream.t -> 'te LStream.t module type S = sig type te type 'c pattern val tok_pattern_eq : 'a pattern -> 'b pattern -> ('a, 'b) Util.eq option val tok_pattern_strings : 'c pattern -> string * string option val tok_func : te lexer_func val tok_using : 'c pattern -> unit val tok_removing : 'c pattern -> unit val tok_match : 'c pattern -> te -> 'c val tok_text : 'c pattern -> string (* State for the comments, at some point we should make it functional *) module State : sig type t val init : unit -> t val set : t -> unit val get : unit -> t val drop : unit -> unit val get_comments : t -> ((int * int) * string) list end end coq-8.15.0/gramlib/plexing.mli000066400000000000000000000020601417001151100161420ustar00rootroot00000000000000(* camlp5r *) (* plexing.mli,v *) (* Copyright (c) INRIA 2007-2017 *) (** Lexing for Camlp5 grammars. This module defines the Camlp5 lexer type to be used in extensible grammars (see module [Grammar]). It also provides some useful functions to create lexers. *) (** Lexer type *) (** Returning a stream equipped with a location function *) type 'te lexer_func = ?loc:Loc.t -> char Stream.t -> 'te LStream.t module type S = sig type te type 'c pattern val tok_pattern_eq : 'a pattern -> 'b pattern -> ('a, 'b) Util.eq option val tok_pattern_strings : 'c pattern -> string * string option val tok_func : te lexer_func val tok_using : 'c pattern -> unit val tok_removing : 'c pattern -> unit val tok_match : 'c pattern -> te -> 'c val tok_text : 'c pattern -> string (* State for the comments, at some point we should make it functional *) module State : sig type t val init : unit -> t val set : t -> unit val get : unit -> t val drop : unit -> unit val get_comments : t -> ((int * int) * string) list end end coq-8.15.0/ide/000077500000000000000000000000001417001151100131175ustar00rootroot00000000000000coq-8.15.0/ide/coqide/000077500000000000000000000000001417001151100143635ustar00rootroot00000000000000coq-8.15.0/ide/coqide/FAQ000066400000000000000000000047221417001151100147220ustar00rootroot00000000000000 CoqIDE FAQ Q0) What is CoqIDE? R0: A powerful graphical interface for Coq. See http://coq.inria.fr. for more informations. Q1) How to enable Emacs keybindings? R1: Insert gtk-key-theme-name = "Emacs" in your gtkrc file. The location of this file is system-dependent. If you're running Gnome, you may use the graphical configuration tools. Q2) How to enable antialiased fonts? R2) Set the GDK_USE_XFT variable to 1. This is by default with Gtk >= 2.2. If some of your fonts are not available, set GDK_USE_XFT to 0. Q4) How to use those Forall and Exists pretty symbols? R4) Thanks to the Notation features in Coq, you just need to insert these lines in your Coq Buffer : ====================================================================== Notation "∀ x : t, P" := (forall x:t, P) (at level 200, x ident). Notation "∃ x : t, P" := (exists x:t, P) (at level 200, x ident). ====================================================================== Copy/Paste of these lines from this file will not work outside of CoqIDE. You need to load a file containing these lines or to enter the "∀" using an input method (see Q5). To try it just use "Require utf8" from inside CoqIDE. To enable these notations automatically start coqide with coqide -l utf8 In the ide subdir of Coq library, you will find a sample utf8.v with some pretty simple notations. Q5) How to define an input method for non ASCII symbols? R5)-First solution : type "2200" to enter a forall in the script widow. 2200 is the hexadecimal code for forall in unicode charts and is encoded as "∀" in UTF-8. 2203 is for exists. See http://www.unicode.org for more codes. -Second solution : Use an input method editor, such as SCIM or iBus. The latter offers a module for LaTeX-like inputting. Q6) How to customize the shortcuts for menus? R6) Two solutions are offered: - Edit $XDG_CONFIG_HOME/coq/coqide.keys by hand or - If your system allows it, from CoqIDE, you may select a menu entry and press the desired shortcut. Q7) What encoding should I use? What is this \x{iiii} in my file? R7) The encoding option is related to the way files are saved. Keep it as UTF-8 until it becomes important for you to exchange files with non UTF-8 aware applications. If you choose something else than UTF-8, then missing characters will be encoded by \x{....} or \x{........} where each dot is an hex. digit. The number between braces is the hexadecimal UNICODE index for the missing character. coq-8.15.0/ide/coqide/MacOS/000077500000000000000000000000001417001151100153255ustar00rootroot00000000000000coq-8.15.0/ide/coqide/MacOS/coqfile.icns000066400000000000000000007121471417001151100176410ustar00rootroot00000000000000icnsgTOC His32s8mkil32 Ol8mkit32rt8mk@ic08~ic09his32ߵԳǙδʽsо˧гϿsݶ]kɾbm~v|twڐuǧҵs⻰Ө߹sɠ\̣j辳alNBQ>eBaT|芆ו~~~ޔ|s|\rxkᬢals8mkי}D &ncgfgghjhheaPirmnmqܦ 9nnpqrpoprrqpqqprj~7Aj?`?ᣌf?zwe@xvf?|~~uf@uvf?𝎫|jg@Өg@ѥɱg?֣g@ܳh?ܲi@A̯i?ڰg@ʰe@󿫴ăd?Ҧ{_@׶}\?¯Һ@󽮱}@赳@w@ۭ |@಻ =Ԑv D &mafgffgfghggdaPirmnnonmnqܦ 9noprronpsrqpqqprj~7Aj?`?܀]UZf?`FQORAye@DZNSQWMf?JUONMQJf@>SPh^VMf?]}OSg@}~sg@̗z󭄁g@vӽuzg@Ѓ}ėh?Ё;wh@pi?φtg@ﰃЁrrwj D &mafegeegeeb`P irmononononql8mk4S`\_^^^^^^^^^^^^^^^^^/^G^S^W^R_I\@a86#it32rviml kmiz(7" }Kԍ ; q!lyp ?woo  o o Bo/ oQpj o I݁ۀ݀.߁ o ɀ  o̻o Ǿ oȾ¼ o&ѽo%aa oT ˾ oվr oؾy o ſ y o  o  woS[qob龿q o㿾vl oż hk o ܻdze} oĻ il ^ oۼ"ԡTxcoHÝ̼$ϴf_so*qؾ̂(c{Yxo ̆&dSm o'Ѩ̇#__Xu o  ƿ̄%^vvUn oɅd}mv oò̄&οa| o@\ǩʂξaξʁʀoǡ|ĺοaɀ̃ˀo ڼ́οàʄ o Ͻ̓dʎʲ oǃ_xʏи ó]tʒ˄ہ o ́ hʑӡ ot ̆ɜ`^o  ̙̈зXu o ʥ ՙQ~ n  ̓ˁ̌wX n ʔ̊[g n ʔ̈ ԞOvn{nʔ Яd|̈tXn E~ ʔ ?Qpʈ ѶPd n ʖ ӢQu̅ ԄHp mʚpʉZam ̙ԙ}ʅ ~Orm ̘ ϻx̃חKel̘ý֣M_wlic ٻ̕g̀֟L\p l O ̩̑.ԠUՒL_rj ơ̑fWuG\mi ̴̎ՋBgңWG`oր e   ʍԥEwӲsLZpR3 ڼΌ#ҲL˪vQRjy .U  ԰ʉ ϼPðiPUh{ϵ Y] ºΉ5]V{waT`vжO .\տʇ2gKfuu{̭҃ ?̅6֊Ia}ĸ̹§ԛ-Ͽ5νMZkͼƱµߤ U υ4֎Fi÷+ ̃4rXſʄbdЀԪMZʁ^nθ jʃ2^wѿ  ʆ/]p# ̅njθ) ʅ/ՅbȺȵ+ ހȈ-Ѱaȶ+ ɺ ʇ+Ә+C[ʊ(̐(z̋(Ѷ%+ʋ&}ʯ# ʋe{ƛ  ʋ$cyɯ ʌ"]gн#̎ ^fľ =_ˌ kr Ɋjm Ё҆nlѽ  mm͹ DǗh^[^[X[]ZXVVY`\UXU_sɾ% Eծۼx*CµĹr*::gπʴn+ fA (NB /B o E ?ymsqrqpomleC d    0EHݳ viml kmiz(7"}K΍ ; q!ly p ?wo pp oPoP opj0϶ o H݀߁ ۯ~~ oȓ o ~ |o딁o(򗂌 o'~o%aaЃ oTꟂt o#Ȃxwo~v o# oo ooҁo{oS[̀nuobׂmu o܇gp o嘁 }an o s |c| o픁~hcta oς)~]hgoHY~$j{`roUq۪}gg^wo+ٳ~%ewVl o*զ'`x}ZZv o ɮ$]uiYn o apt oò &_o*@\ͧaϽ׺ o+¡~n`෪o*ܽ`Ыo*ν°cɪ o+ڸ]yԫ oն ]wڭ߁ oӃ fŮ otʬ]`o䱲Xt oԭS}n) oZ o ɯ Zgn'ӴQun&{n⼬ _tmY n$ E~Դ uBSm Qe n ۰ Tt yLp݁ m Ȭmƈ Xa m󶰛 ~ٺuQq mɫ wĭ Ldl uíM_vlic۱f M[p l O ΧW M^qj ȤaX mI]ni ηFhVK`qڀe ¢1HrlN[oR3ٽMnQTiz .UԲ!PeRWhzϵ Y] ů5[WwvbVauжO -\2aMfwu{̭҃ ?.ݿ5}K`}÷̸¨ԛ#ӷ5MZlνµDzöߢ Uѵ4JjŹ+ƭ4kZſԬ1_dЧMZά\o˵ j񷰲2]wμ0î/[q#/׭ikη(-ڳ.{eɼɶ+,Ჰ-bʸ+ɺ Ǯ++BZ Ҫ&(z뱲(%+ͫ zڀɮ#+Ŭc{ś+ǫ$`yȮ+ì"Ygн Zgſ +=_Ŭgs Ůfm  ūimѽ ,ʴin̹ IȘj_\_\Y\_[XWWZa]VYV_sɽ%Vԭٻv*UµĹr*;g%р˵m+ fANB/Bo E?ymtqrrqosrqpoommeC d    0EHݳ viml kmiz(7" }Kύ ; q!ly p ?wo p p o @oP oPpjÜ{eZTRTYf} o I1ܜhQJNSVWWVUQLKTto$eNTZ[YVTSRRSTWXUNR o JU\XSQPOOQTVFvo  iN_WSRRQ RRQPOXLonO^RTSRQORQo&L]QURQRO`o%aaQ[SURQPz oT zOYSSQRO` oPZTSQPV| oM\RSQRs o eUWRRQ PfoTZSRQRPaoN[RRQRP]oS[ L[QRQP[yobOZQRQPQP\x oWVRQPPQRSSQP[t orOUPQPPRSPNMNQ[r oJYORQ PRRNNV`hbRQRb| olPTORQPQRNPauc[SPQPUe o$PUQPQQPSPNa|sfdNSPQQPZjo7HYLUQPQSMUv|nsPUQSaqoXqߚMQSQMbiuQUQQPVauo,ۨVMOm'erSURRSXj o*٦js#`oTUSPV\v oˎ"\u\QR_]n o 󙂈]sSqs oò"]عo@\wu&`Ѽ•~o Ǻp_Ґ} o)_~ o+;дa| oޞ]{} oژ \yՂz oҋ b| otYc oԅ~Xt o rW|n  ߞc] o讂 Xgn'~sTtn{nߜzXg b\ n E} ̌dHWh }Re n ڇ uXq gPq m iUa m 󑆛 vܚeSo m | xީ pOcl쟃pҪuO]vlicܠd tO[p l OФuXnP]qjʪ+[Z`M\oi ѻ.jLivSN`se åvMi|`P[nR3׿|PnxbSWhy .U Ӷ!Qo~p^TYgyϵ Y] ɞ3WXqtdZctжO /\Ɨ1XPezuz~̭҃ ?/ܿ5hN^~˸¨ԛ# ث4M[mϾ»öɴķߢ U Ԑ3lOjȼ+4`\ľ1ZfѧMZ~Zo˵ j0[wν 0󩁊.Wr#/ɀ`mͷ( ԉ.ii˽˸* 䎅-{fͻ+ɺ *y+B[ }'(z 冉'%,~ xڀɮ#+_|ś+#[zȭ "UiҾ  Vk +=_`t _o {`oѽ ,}{}|}|}bo̹Wʚk`^`^Z^`]ZYX[b^WZX_sɽ% GҬٻw* EŸ²ƻr*2;g؁Ԁϸ©m+eANB/B o E ?ymtqrqrqpoommeC d    0EHݳ t8mk@     ra  $ %&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& & &l&=&&2&@&E! &I" &Q#&[#&d%&k'&q(&v)&x+&y+&y,&v,&q+&m*&h)&a(&\'&U&&P%&L%&G%&D% &A$ &># &;# &8" &5!&1%w,R& l[0 #-//////////////////////////////////////////////////////.*#  ic08~PNG  IHDR\rf$iCCPICC Profile8UoT>oR? XGůUS[IJ*$:7鶪O{7@Hkk?<kktq݋m6nƶد-mR;`zv x#=\% oYRڱ#&?>ҹЪn_;j;$}*}+(}'}/LtY"$].9⦅%{_a݊]hk5'SN{<_ t jM{-4%TńtY۟R6#v\喊x:'HO3^&0::m,L%3:qVE t]~Iv6Wٯ) |ʸ2]G4(6w‹$"AEv m[D;Vh[}چN|3HS:KtxU'D;77;_"e?Yqxl+@IDATxilU/oT$$F1bX360L#|kc6l>٘ faֈa`  TBKKhA*_ܷ̘̈=7o/xU7~ݏ?[Vw 51,IO0p0p"Noa p'N0x WIO0%HRm(0"i䎛r*իWoo>OI I t#ނPo;;5^Tк333G~GrO?tT*˿ORUh.'Q$ &\ťFo[ %VWaggkPsMԑx1H(D sAWVêG?e~=W諲 ,~a+G"]p C >OO_PGA`ɝwx"wb[~ @_յkY 4MEUs=,U' ƍXEMGsDR0| -""ԍSВ.zTc);`@-;WN<6>>^?>2 *]r677Wc@P?Ov`<{$ȸ~!_ac\.v3ڧ8[֖Knd-mZQAGumD%km|<਎5r@6Oa L/_UE9uaڂ'| YχjCN]d/P$ď2s|_Z5%6"Cr5qJeQJ0{}xˤJd!xYzl{W.^I4/IʆCC;nxf?G"#Wp…?''ʩV n).!F܁qpmǦ2 (pP؃`pvwBF'F+C߾I^R^p~Iء!@~ ˆWe (Ο?6"͑Fpaa;WUp9N-^-RqWݣ}w1]!/v%q<}w |/pat ,֟n4>l2݀;0Gu\?+:u*w}6.}71|I75]%7KRj}KZ;{'־USBj#b.,~wȉ_rOiC|]h'y5\ݑ0}o wK\uw0O3BIoҰ2q=c|9]|ׇ>v8{WQwI6*.mAny}?v 7&_X>`x衇ˆFP³ bhkA@Z1y,kW 񛝥1`wR?w៹aqNq-M'$'݈`X,ekx'40:)p:y'ON޹*$,9Wƒ %>ԡCXgGuhavoĜx[!-h /DqBbRV-JuVQ-)e˅uyCp[Z, ,(<f;(?4SVڴ2̰-p2#pa9LȻ.)?h 8ß#>#=Sm7Z V+6uVq) ev %d#]&_ m nQUJ>&0?hhhU`.cUDc!0J gCi n՗%P*-DJ jfzĄǖ~\T ea}MQMyT^a|_6R֢V*boI >KP o˜mTqqIY`P* '03sIy.*]!`(Ԕ1! cSw"+m` 4)g~cc&Dbp?8 fM47hKWcO(ceG4NktQQu.\45bǟ:K7m1Wd `41hK JBX[/>1&ç}8Ae,lٵYBQ 兴me0 D\}R8 c ~ j/}8W{^p #@Uݝ7$&/nv8*3w̟wgqh f@-!23Z,Uض(j)~u5`KZWQDmkڕYdFRfVbD -.SwǓ#n=.a930KbV%ߜ_AJܺ/Ű:s%\ U1.BaxR74.<'/ -iUTl/6ս>%pMļ10>o+3R>7p4BA/./YXeo03-C3 kXd&Ay#43W71BaP8Ÿ!oAӅgr')N&@|jzӛ~OwքIẠVSµm w=taR* c40W{ غ ~/Qc)?jLD5VL- AuݔM_~@i|u[ZfTJW*NdQحJ^7)fÒnC ]$+'Ti PBXtX|_ 噫jWD'[`Z?Mʄ_|ψъ#SoT0v;C0rCjX BycWȗn]ĐT,m e f$$M@Kn!媠V~@p,}WpE- J41!t2hH' ᡤҚTV45u]~+<Ft1dW(9+ݺ.-};vVŋ~BD/r8{׃f9!DkŏյJA bl3c ?0qS^-)F3ݴ QZ-+j0PjI@mloHuWJpx4^_[rӯ-q+:7lqBbx'Z`;l?\MP[֯<!?ywvuQ1ҩ0|CZڃO{{{V`lOIچh1l0SzuFeD0 e3N/[a[yEo\].Xη}S׀s+U Vd\n[z,hXꢴ3\ ."$q$Y -`u\ΜtAi$xEt;G?.1 ьJ@.OhZ~_~TºR|5,/;fkADjߐ`#]Pqr>r =p| v9$hU$dׯ_z*_jKLGSL٦Cb'IRā}tI V +!|9l wgGl%·ɉ1|D!7>`p02W)ə< &FAڌF }8a%q bJw@Zv[rlkLw9 撻W\b?}h# mDٖJŀՇͷ _W_{-p"K{ C#QkfcMUq6QpkVekȑn,\#fj/g72;mӮasu!|ӟU>/EQ`ZѪ _I jZuffY*7H!mdKr}: }OH~ژ/2V&_0SrsQFL\fe-B|7*PN7g|K]8"Szzϻ_pڵ: pYj1¡?]R b&``:% w}y/`-EJlkXpKʳWC¨ekq#}o w˗#39BZz˞rfX_JzF&JSa.CHli[^"xʁpxZx˷ke6D[dxm(erAn)1=H˝ b+ۦ :%W^x5}2dղ 2P GK_^RW>f>4vEsFúZWe566eFڶITglZםghL3vHGJj7e~.l߆q Ȩ'8j5&T.4`iDa=ų])%LB9Ⱥ 0,(?^$n&Nl =+w@f9& YҨq">:~#@lfWDս2 ΏQ %U82^Zxo)c_~hM YKOŐa^y8;AMNLiX}h& Um Oi1mP%#$ږ^~W3^I^%g(E4daRc^w^Gus T3nNB7 .nwow/s$#_'¥_j ,`Fob~Hym"X§Qm5!E<,,_ +/}:x4:!KT3m!BF0ѧ5ଉњ"{dT.`ggSe x]'uigqwn}.m7=AuahZ=;"xTV2le:@WZ6•/?S9qϘ_{I6LvЧ| " //Æs<243]R#.nz{Z^o<fp $E\4/^.'n‹8GzRX,nnGz;$ہYUej ]Կ=1>E0pj?0zZ~J3UI2n憤`1"nyǘQ8`I<Ú ÿc‚2x§%~,3-EIqQP 6^o*<2Lu18$Zz*1[ fP2*]fzg 0TbJDvlġRU Ht^dQ׬ᵿ5yO3d\iy>w`;5)9ʴi-7 npBXԢ%cFf2cgPR_Á[ʄP/ȌJu:I.[@uCh`gAIJ9ucuB.]]H.j\?KfxlӅ #FT.*Ai mV^:-X6,?°ĕ4 `Z@pp`XqLCȪ_Ѱ?Xg%i&4f1oIQp卯+y[n{|݁Յ9ͺX<yh _2+N N.ptpX5']iq'Bqup. 7\%lxo@g 0ۈ_O~@xᇍ . kOPLZyg5"cZ.KOYz"xXA,B eDś(ʼn8Ƿ~pfO,`WbXX cX98LцMF8sڻ53J9|QXφ# x>C :B X($$!(MMAPWb8GxKS v@uq)qO;Oַ//[ |yf| PP9Ѿx&A`,aΈM1Lм| z)q0\:] FtŦ*!L`6>n+8K[S.,]~Aŷο]n[ ~3}z34'Gܜ$+&+A#BS'8y䁵L 14SP09ι 0Ɩ?x2 , 1P/ 7q-TT?6c/I]'3F7c{[ W^Aw5jT/F$eB>stß=hY3[Wk=_/*N6R\iWC\Z>8j+H :19}MnJUg$ԙܺYG]ŨDZ` -cju߳t1=M_|$LfJmz~LxθӂFk8Z!h$0Xq0UQ7vhkp'ֶ$.P~`O'uf([@CPP9W{Ͻ{.vb%*h΋׿QnoJRx0VE RٸcͶT|uok+񇈫&h];Kx !` _G-vcrXx4 _T]Sl#,'"^XFNGy`yd+bV8_ 3pzÊ}‚C(DZxHBhTy a FRuG!\Wx`4Mq[yF^iVW= VPW.R3#χW}B~[iʷ_fTR@ï]S#\\YȪ,tRtS^LC+@?uϫ?PwϦ⚩4KgBO5KKfCev?߹[62youo&VKe&-t8w|_(J—&vEI`U+MPcWYٔV?2K@\^^f]~2.U%̵ctTvF䥡MK)J:" ZڏYY gky^vy4UjdKym!;\Ҍ01>b#-5y ,JxxdB1VJ?:=X!bPÇ`HZ .qiQx,om@Y-%o@#" *:Vc{Y[7|6Dj|$}IcJ!/j* \:̇&+;S5'`(íu3.-Q](hc+ۃE&*TbD ՚Uc9jeufljmrW Du,,:j#^cz9@Sم g9CQܒLjmYsb80~T1^3.BxzlQAuȟ6f.3g~@s40X~ޅZr[{jAE"ЈCէ cJ{ׁϒH {6A(D]vI8 ?'8# xp6԰QAWWpm&ӷ8G4p[ڞ{vݮj9m"tg55'>" ph]4.dDjw :.`K#3k{oaR[MUVAxj5]RV}T۸# s IH~K-& *f+f-{ۊkCm*MIY>D/L pK_ =?>ѐACV@xJ*oS[1iݦwB3وzf"d >ᯜQ_[K7OÂ˷}Qb&^~D4IV^j8Q+G%:1 o?l VW e4OcpڏO%E0d.pgb| e#\ўtږK H2 ;GGS}n-h&_UUkڒ]|VL G06.2)ڞ{XRv\Ա%tb0ELjΞMWABqYI#g٣t ⵈ ]u6vM29wPhF;tzs [nY@|A;o=F?q{-0?٬'yYA"iuP8%2>v0v+b8Wtk"x+-19ڭW{} rxcu |غ44,vAn@7+ڀqD@88\SY(N[ͧEwTrBƪn&M6]m\N{uUnaגTZJ#*p9ͷv!Ve-=08Cvyj+!- AX*_ѡP0w@ȳŖIlҁ#9rrus+?5o\[aL}si",UŸu@ ݍ+y;֞ttۯ$oƇⷉ+k]e.yڽp9 Fǀ45_ݬ{p ֝Ų${PZC,x6Nެɧ|QJ[T [Ce m3i{4*YtbT⹻Ca a*DrOOG%?g%pv" \X&:`.)l<2ç8 2| ,@u!2j:5 f48a8;:MSX' IN;8qx5ƍ wA-kpLE`$$Ԏ a@c E €i>@l@8[oV"3D\,Z@=O*ā \/ycĬ3za't΀6Aш[i#mfMICIAJ[P<1V|g3i+Ro1(KaH.C@MWFSg3?.cPF87!<G.3Pg}4N@ֿD+x^|K; 1rqN8]`ofmC-^g}O*TwӲ#^p2%#aB߰{0>Np-8aA͐u4-H`[9xM.o܊z8:-'!.pIY~Ф gqxBaL8njN\p%YC?Y[~rل y]8qw?T|g~g"zOA8#Y͙H6UitXy[%` Tw ȇ,.c+aܻ~Fb4|SBJc̹0;uH4ʲ?bĆu,W|[Qiɋ Bs!0 o,46`AxlZk.( F0% 04»`7(.`" LN#0ր1#-i!$kQFu_wZy{FpRATOOXeP*"S%B{6 [|ꤷNQvsOXv:ܓgr76N3- 9rSq'CCQX(hd:rꗯZRzާQ2"%R AV"o~ My*$Mn__ RDc" R A8 jӂ` g,A 8IOɅDžs s؃hffVnȀ gX $3dZYIݝ*XzNPA VtĜ񗺄sjgiԹAbMWpGGuWE.C@E$՗]ZڇwETb^ !Vr#Ua1X^υL(0DP3vud%𲼼baX3[FK|/ffM03%+ܔ AsM8Q?%z$6WC03v[U(bDi!HI^i(3= b+@Þ*d{*"KxQgzցq0&tV4xvκֻc_U]q%^,.3h G?M _|X^?5rj1zLSyöcZA30`Q6휆 kQhTފfjsb> v)jݥ L`/[/^T^)iF"?n&tŧ:{- D8}% 4K#] u5} oB: ېHHx~l?:(m aaYg͋ h劗Cq^WKUqID֚ŨV¬7­VgvK[Us},p^E+P}L2Fag,$̆ZU{Eسp~bA2B2Y谘pO6+aa*.hzJ6UrZ(^eu%TeǢ.RY˩pkV64h/7N 4ׯ%8h̞ +lM*̄Օ0*FW+,{iݥX|Zؕr^!b(kqʹ GWdqq)r m7箝0A9wJC~0=)RU% Uil;݃u2 @?[ҷ=iEc"ej)ojyi!T9OCǵ|qnLjZs }XZ ӧN2p>{ W S`G~OJ#DG ̩ISAӘ3ӯKLMpҰmhOMBO ^˕eTU8)`{I7VSiޚ߅(~5N<}%{H#D8*!ٖLl :$F.S;cԝGML[9I>~ FtTܹ0W5um`H ݷ.͝JH}Pp KիSɰ% Q_[+6FCRi9Ϩ\5,;!:,XiS$ʅOwP.p؎ ?mQ k@IDATٔF830Ij0bLw<|S6͸3r3p!{N;S%ݒ˰OS]QEBPJ0hv} 6ûaQ=q z*χﶩ0K/>ڍD 4V'F% 2?7a}eVD:"UWCƭ ̬/ͅwW 꺖3퀌̺ &a:gQ%1aca#sSkbm|#Ih%! an\FU<(ߺA# ,lEn XR'U)χ[a32^{GBiʤhb|gwz c=#Sok/(ymP}a= i/\^܆M`ќyeaWo#MQ!o13I֝~[0iVo8Ryл+l*>LViؿ΄'ku)'{;?6 %H ]њk^С-2ݸ&װ*іZuŕ[ D;b#Њ<7qo877g7o0Xn!/|?.I) :W/眑MH[0sVy35j09κ]n o5!sJwR HnTZ3ٞT| #Sx~QIkDeD#;kcl] ^V֝o8B<+e^|ym5}gf56&XvyQ{:.:(pa##jcLLR啵06/ԝi#l+?&$TbVU_kMi9'~F2 K_{)^B؞ ؙU%4D7޺FM1)̲]Vڊ5ۊ@̈́c TڢXBϳ 3Yu%C˼:zK]ž! oUj< =j͚o;"憌 S t3Y:"oqͳE\x_\U[CٜgObv'4 0/fHZ/K{Am@ǟ,.%p^d5!觇6oi&1 $UgYPݢAc݋6~F*gc050VJL HT(:f׳TAkI j1jUeknM[}߰^[Jk?m"][ sִMY$5MZ:Ef)&̈́ܲ l ŭ0I,bWL2c%Y e /;tC,j&2_59={XX^z!,e3wqY:`,fuL/ES7f1&@HuMMmeHnsCYll zT$y>٢+8)Hc߽q'2 *޳oqi.qvd\hDP\10jt4 \obaJ'$'}۶$&~}eU# g,Z4J ns̾r`JfE>x^ϲ+F@jC-ϠnPOKz`Y>l@='`*e&{U;J,2(1IآYKD'WE|2cι_O3P|9Z4ym>r_ܭ/+`Ĕb9!jv5* ƹS胵;-_뾺7^N s5 %F_Ѝ!pSpaFFݴ>PoTRMKMj%]A{C p q.'?Ic>dK7@Xw 5L5Y⇤'@C!:(sDZZUXOgDu '<K- .Wd:T+-c^Mְ\ۯP>xc(So0 FJ>p/Hh1lXδ=sk.BZ yMO].R= p1*SYkk|DQurQgÀhYBZ  m֣9nO- A̦ŨAVWfi6+h.F(#*lԎIq]7_6)kO t;#[7_[+2jg#!݊Lx;pbh8ҥKc7CM?Bh@!4œ ]m#l{0(!?qm][kA(d;u&rZ"' hF"4x¾շ^s]g 2ʉ.0>SS>KwX S0 w:V6qtYw^84\dvgxw4! ~c^?6p{N D1jB n| {F8™w8<&Xx@ZgMX7>0u8QHMWC Tcc'_wW^uQ磆#2er0y=kcž@P~#ݧU>`v(k;X.1D;M@- =p"}я~4\|vUDB 7?|ѻ =]{r3#o`䔦߾n w7Gq ga(ihi` 8w[xVɖ+WÏa1&Ѫv1B@4B>4yӼpS^֙;Cbm)n24Xr3vh2Î5qgp%0+0Z4F.ålȇ 3 d5psKx.#`vҸc%@[|U8 {q4Q=98Xlib(sZ Q[z{q(C )9 ox_ǕN4 c/%i 4x)QM{Z405?pW T{#f;_{ =$xG`Ɇ̂QN_T_@^Y@1=xA<-ЮA{t&=f(PVczGg3ajsm)]I@LJz3`P&_ :MM4zz7'5Zxh5N؉hV?3b?%5ՅE݂,<UgRyMT@xՔ"A0~ǝo64#nH]fuHz'đ~4Vz2a ؄ xx9Af-ζʀ6TW[kbԎ;J3VKZ.*"4EU[ӀL?@Eiq'U`PxU QFlrnDKtW=!uFp`rNCƪU^9B1~1$"YS6Ffj p?h@G&miV vH P0@Qk^fRj20GUa[;JuoNMG0-{- 8ʋ>ОXMi+I; 2tvq-t*7 *Q%(:"6AKnQQ-+0P.nv/ rT,{"3"ЦavlxԴYfi6TraIgNllPߧ8"Qnؗ!RiX~R뗽Eg8lUi=ƨ`ݠΝe F8s>j=8rvDt" lj! .@..Hf@~_-wdUQt^LS#CZݶC2Ԣa,J(`e ;CL5t!wK׿ Z'< y_V;5C<&.j-d?۬mWa|3~l pE_Sy1AfN Z~.:i^,]ȪiF:?K:\Do/ɂA6ٴ}FĨ6 g'pBX!G1x;彩=zP•EQg¸Lg&GPhNNA.Zi1~ &?υ{'< [&u*UܴbNqMްOsoQۄ}Vd 95i?Y1 wdl/kިd|>2Ц`oB8/3Xj&1 7iԑ;4'=# 2Ϟ=,@PcmY.j~Iӟ"c]$&ORٷ79j tq|~S?VrJ&崕2k兰3hf9Rc^u`FS IdA|NY~`OC=Ξ;'`T-JZh0x 82J[ 歽?"4nw !ImGԏY6݁(/ްbyK񿈅"+;1u^xoZ*4jNel1zF82ـlYu8cϪOYL'-~؍;l3)l`TF#DZlmݫ+7x@?>RF6$L-Ycfi1N{%F= lTQ[mPn]ZQy%ev.~ _1Qח EjB*zDUQlvc`(f -^! p2JEYۀ7 wlS#}ifĩ4| 2  uA&fSn-4bؽ6x -|1blHh>6WV#+pqNNO؎0*M(nЇN{N*f^ T^Q,9$yCO3 >P4Kkpg^S~= ߿)3^ [jjh N -Z`kM4EЖ ~{#1e_~<'A>Y??v'{Nd :ӶG2ԭ #H x4 H1eZUϝQw4KNτ!,+o3YrI?!]gYhy u}GmYh-K@#. @ZʷjeC{(@y+9KQ`8W?ꃺN/oV \j?/bb 1=8ї#8 S-p: Lʦ$6 ;oQ*}9<,g!Ql$[U1ʊvJGlW`e,g :l_~I;|up@:T+xE;FF".=;5dԊ8/,h 0IHw+}mHs/hi(6d%& Z 0EcL3k*Ri/0>Lm*ugfSL?sap҆aZ?(, gqt9]oELV`̍*zi:cn/Q K h*A'??&p|N!?^ne+ b_9,E 3?B:걎LN s1zPGWS0F]|T :K UǪ arj L)|ߪGmЁ`BNfN`T35 1fmgm@PQh$ ̿&Rq)gfL>rxPW@ZNljDʋGL̢Lni K &ٚڃʁ;pD_f8eW`Vle\[ ǹbVS#LNeƛlth\Њ% M: #ВG+[[ek:ʄ3677ofb|ŽB߽~J 6ԪfUAjmplPªEͯ6^s0' 81~@E-vN>-xYnjsj.k9l<|nc#-4V 12Ui:~~]Ιv3<m8lD{o>\x'5|i1kRB[]պ~_aF1NA#Xܸi0Η/X Ipfjj~aA旤k| ˦ޯXMvelEW9⵫4MW0 @o~1T4ߗQpک=@|ZE1tcv-Lϫe- 74$veڄA<(Tբo$'Eh'zFd /`J%Mg`RTDٽLRU`{PUldX>(K~ aXtLօҤgtD$Ggbh)9o>"=Zxۮn0wa h_>iA$ ErH0/⦘H>}*Ww(iqm"Ҋ6y ; ٳ9@ w;<33s@.la)4s#6"CCS«+}'kG ߒaYl<)aDXՎ7~ة0?[G>*Hxt렺CX8=5. SSu V~Ch8A^ץs/=#Pp?ٰV|L+Rz~eYQ]]:9ޕso1ύPԲokK]|JxKײ_m.NK*k}' }{!+Ӛz09.]KA(+@ ,wL2dk=s=#3 =䓚2oF1&رՉ‚.ZRÄwjΘBuq5}|ļMmdA\}{5gkᵯx(uהR^L}aE'4vbe0uhnYԗȻusDSI3fڦ(`~)f}G%Ct?@@>0)DŽ$gk&\}k?%pIRnCoVWC-vk%WdMHg Bgh27iut6>1*ƇAmڬ1uԍJ<]џ" $H68!%}~`ȶg+ڍ61,&ekr~0ۑHkKNX Zo\K@hw5T&f#}g>c F$!Ą|,nSnON˷-Z۟M6akfmի6]4 _bN?]z!s-YFāK QQu2|&P&hHOWBy&YcZ!ZYj-7]BXx멙Qej mi7BIkcDIȚ.psFjX}=2RLa*̲&UrbX*$ۈ/:UsZC$WZ{Ahc:TטkT14]=CMoAD`ԹL uagEިkI904:yj&'@g j%Ydv4j2Ur^ԁ:he]v蹡gWCZ3_tlvo,;r_*3k˪RUI" 1asYSB [ޥ-=XdPf FNpnk+2q6`12VWl 0FE0Q C ~wko*]_ 64雲I7LȞԕY -|XU*7A ]=7ufXPӑk6mg&{'"So^;̩5GMH0Υ{icXԗJN 7b1+JEHE-E Hm n؏X,b}rs:y=УiAW`CS_RtGN&.Vީ_~jR~"kbdS78<'wDD]/dWΞq06zuZ ʘqo޼ara11hıG=8 ]-bܱmKkG/.ڶ3O0xm!V?l˹E(8fH>/3\uL` IE2t_yE 2@fܧ)8t! C{lшk08cO CnjSvǂa .]6TѶeKVd 5sMKHk^Җk4,m@?⟫8ڈ_EZqARa&[`c92DW 5/){jJ\?uP$6S[ %W\<W}P˯ѡDz*OϞR#6;=㊍ٷFBrM5Nifwgsǎ/J̫7<}c,I !s~Æ b;d3A2合 j Ny %(.a9VRyEyS.k+>p{%"SUv~CGӀG֪ ¡Qj,wO#셇= G_ɀKgHrVMۺɜ&"sֻ瑵rBy.ˏ.Y̡nCrDے3d;mI%[ )T}Ynٵ!OqƽHp>蚧 a@:El_;0;}U!%k_IikԊnjZyd*k;BZ7o3D/gDc?Ɛ>ob ݠMĎM692!k&dI⏑%{H?|~ ~T&C[URӴ BeR0iiyoX/'_Kx; ^nippJL{yK;;ᝳM(:.-'~y[{׽? @.$bmU/e9qC%)AL10XDlox??}id&p9b dJ'[uBb]MֺLy{V%"Y9ozYE@N=٧? w')A'rC_4p͜ TЛu\Z8K.T J{ʱNz-ņc"pS2@s$|Cw* a&v rKzTa@ٴA;BZ(z8˝irZe+gPܗTa ;W{+ECe75t!o* `%ıUe9d}dfͮ|bBZ#̥B8Ǒhsr8+ ;N7f89n6񫐄> [1ĥeJ.ҼԈ^".9TUJY/Zn@VH?#?@xS; z# *Wp60sN c|t.ܝ=z}l٬MY3ijGN, "<(A Ȱ5!MG" ɟ"0 `;Œn:q!6"i_ ~Hē#_>)ˊ>4ʹkurXFZ.`ks%+2\wz?x-W"H^$I+퍺*Ȱ:7psl7;xt ҊaL ~B%9ky2dthvDkmy4ǰ<;[~/}(W _Znmڸ1p3K2}j*qUVi"Ǭ)~2^f^j@u`Γ\9DŽ9b "4E|*RB)ة"Dn!)r{:w({lh4=m:eT<[O\^ SKimȪTƥ~6,G7(#+x?jф=>yĿ{TGfP=PݦG @bKќh!ּ'˴k}JDޗ;xfZL7Rr(ap^e ¿ XN@@3`yu=WJ/EZ0!D; s3 d}g7.USW4iv5s927, ax3#nttb뫧྾}C s߭l# ~[6oɶ0+E:eU~KxL ҧSj,~iD^<5e8Rʿ\X0Vd{sp6b&A` +h.-IS+9>C3>;$%|1A0 o3#Ͼxv펦+q&y38ȰikHO*.p>~_`s7ݰBu)%q;/WӍOˁRsPZ5|&ӂHh 5ॆlW x9U!wJ(Ⱦe&?^ܳQH($b|$kֻmI3KNĤ5B^xjR67dK( Hl5͠AK4v`<*oEg-=,0]p{' lXd,2]lh}<>:_wWzlFFlS7,P3&ӽ Y gxڧ |nڻ;{0ŶY>1Q>GYܷ?Cב?6sHDS2q'\JOGRVC"q2MN & [ O<_Pw `%l v܍_/.FJcq<8*wRD}5BDZZzvkFzrei`6׆QNǯp4^e33/d[Ae"O i~;㾾:펹xcs{vսُ߀ N@+,u.
4"{z;Xcwc[x3O|c}M0pݿi? L+"}X{Z r@%.XX5`Ϝy"R+(qVz~Z7l^VSyH}wVf;s$=i$@|[E٪0qtwnFF|W!aۨڱ:09K~L?+㩒9R&@IDATw|~OuS]#9{bwXļ,] \v$\?ulωQQJ$4  ]xXUy|Kg/*6\s&QDUbXWv|`4~to@Q%=,?XsDdglMu ͮ.߄y< foх}LnUy`ĴV~=wϿgH&>ɾk Є[`: V?\}u}{c:OH)[^S|3Vǽ/3鞭Eٸo߾CSgG*u_r+kڔ$kaG^ LP 8\5w1=32hӶ=TЁ:[TclU9 ނcJP}cԿ5{^$tC\rBDp.~džX:8 adŶf|2킰yo_~ ڜh͓>]iR^DP4}!>LY>Teysl5X^jS$\?u\Hlx<i#~-@OB5Pf r2O&(':Mx2 ̠쪞1hJպ$Qu;v8Ӟƫ^%g -tl% ީ%;:;"h OD |t%&Ú|'02 )|g機U,V_SzO~8ll+=U-Nʙ%>5UrCk,9>Cg~g=y0i1;wP+@NH[v""W)X$V;o`W=Ժ}Io`qo=8?re;0y-ޠ/2x̡<h2XG_ J0o\N{!3'Se8[/y3]{^ \Fj?ԭ]v5u]}%v&(ۡ~>sѶџav|#0D .lËw/%r5ek:882G;̐}t4M1}RHer{>J@"}U_[vd1يOТKL\gCKob0{ggcsdv~1k[81Tgg8j4UŕQttvNkC/F},9c(/,Gg}">N}VNKyZF!TWxW.]7!LvmO|w%~7+Wggn RZa6 _B>Y UNOxe'aN.ij,-SہӫǍaS0| {-wDJwW"NȫiNR"b˹夓i:<֒ WmMAvŚ֚#w>y~jįʳmb![c(-ʍ2(4B$Q[S;]dBhHW-`=})Y`z ak0L,?U/{R(= ň\#ԧ߼!Ƽ_ge7~E:U~>t}}2afv,"p$*-e-B08ȯw:MY" p3NsT5djnL>z&:F~ۭNfAė%fEp^yDN3M3~S{ǣ.c0ubDu`k/ PDz}t[X ׻a} e^eż^'o{T"(߷_~2 1=7}۶mW~`~YA^W+Ͻ/l[򖿧݋ p cUK_(W8u s~;D= faW3Jw~̽%NULF\JjdFc,μ2lL|/1H扺3:NNN"%Fc=D*~ZA]Y#Y;:ڌOSG@?% YHYi;qt2AOa޵kw0Ȫ8gϕO:ᓟ 1Dt[S[нn'a /0Y~x3Wkw-ܝZzh@&;~߁AQyŽG^MӭϼtMqҺqP; /lw?xk_pk~j[K2mZP7 曣 0'HJ/a#P ^wʿ[\"#xG0ǪuIHتi( 2Y.@j"[jIɺr-1:mқbԾ e@ny?~7e}٣Ƴ]7my"D֜u^E%QlK(p*"l;#>!uĩ.oƚOwǎ >q@8 W̾!Y'i2!QdgJu,6ڇ~X@qAF1iP%xl<`#Uo~Gqyx} .:Mp q1B0X@1N L=hh!bs!-ET-܎`"HBx{;_QO~[}n;Q P#%֘9&}O.xN+@-D|MS̨F.0Z=l}X:ǷYNm|S~CU86ҵ9^*FُXTq|<ESbO7۶mφ8yY^~HGGGC~7p2(T`JK/zыo0?[1è _<6w(GEl5]\; G;O3L(Ñs0a=UywH40o_.3F:;QZEǓ-?I 2e:o-밨DOYk ;Zokoxx S(ϜK}뭷Oɼ.%6ޟryc0ViA݅4?;'192CFO\$ Tfف3ٝ fn] #v75{9#ظ'bNX`.D658 ytt2;p=Xf8:R;6q.2uܶ**Qd VCH*Cp8vv H%W<*͝6 d۶oM=qA ub.К lo$F¾ט]Lfv>*svt`,wr_;v(#9dX`C!8g)Lk ر`X#HQ}/7kb$2569Wyr!?F]S+i`[l<]Σ~v蕿+m`pCR8Nݧ)ׅW/j_+MXm;:%  ݝ GSfZ<9 usO*ӷA`ܰ9]D)gCһ)"q*!Z<#561g!!:ud>V%Amk Oq$wwP[6vFpKmW%mv/U\]e\q ~cǃ||Z{oHfޓ;;z"zԈVBLL_ZF|,aPKhkNd}Q0 LW$2'N )i􉡊VAu]iZ7K,`_u׽Y?/m.KHOUwTtu ).Fk '4lz7j241(w)̧͗[ȵ|b7r-T"ͪa.N +nϝ.;PHMb:pIG,If;/ۚݻN߽*__ozGILg5GDYٟYW~W. GuN)m eg -\VP+u^06f<]0ߙ}2?D% cQ,ΊWFuzf.Y/%U3ZcirJe ȻxF>*!KfJ Fn^?F <@9{f%7oߑ)87OҷSqA̧®[;w'v]\Doo|竌S,tJSg &9O[aK2n&G0||F] νd']QC -B'Q}3H: 5Y{hޑM˟d >'oX% 1YNqPwqGCWN ~w;KK.V_Wg|;,ۥպc;fB)3SׄkbHE.`WVIjP@D ӮrFb($z«D]&dq d!++o@SEXjΦcE*#(A5o#Fdq#?Ŀ^//ez׻TIk՞{uZ0ȯL( ?Xb^!\] OiL.%ks1i2b* [zb#3 /E&,5Dl#]џ`t\RYwR5W)^6:(< KyTw}>uyo2I;:~D ~la/^?uwW]uUls(pKauZTv0 \%tjϩC?c##߼_??'&H;U-?ZJį!5*R\FH~s~^vF%Zilbր&'>rY2,?#ߎZi`:[4`{XKI,_z.K)QSk*'Yy*1=t45VաG;/Fr)$eDԣua `ۇL%]Mݩ2I%}蘿sg<~MΦ[LOs#?+sY/o'h ~cS#~ڪmpzu0+Ϳ .1qg;"b#n>7kAY 'мq&ܚ?͍>^>*ʞ#Q%*tJL85pQJĉ&8 { }6i =_~E[ /=O|'d|~ R~O72ʡǸEx7H3z_瘆|cַ1`soa9%u]r/e|o' 0C܊b3`8lZWI҃)Q;R\v666h̽yUtEd7.ϲw0twѹ Z~DW$,L)#e2{ c#_1/\Y`TRϽ[7K$|sG~F[s(᚟zc 姰4 SoW&`H}XPciWG?w|ۿ;!apӑ#XoXW29~~(h= Nu7bAt Ha&",.֚1QQ1 So[k=}}Q(aMڪpFTtfȝXem^?U4@kdZ*7Z}(,[9R 9X{@TEY=Eo鉄 V<  xVpH4˷,!=:Ǡ5a {0$ٌۿkw?|88܆LٖIJdjjB7}uHnHi`A=&.C8$w䟗UaxU߅?'NX #kк ̢T8*+'5lU\EqLlI YabC濝)LP-unfkTHfbb,6Z9jouβL:(k||RNN$iJ(d'l&m5XO](/avr^GfzIJ$Ģ> [9== gX4m)mQHUdJY\5]'؄>d$yR0xtį3>11͟gs˯ 2w[Ar~0_EJ¨.%+&)BƖ|d| * njct2ؘ#,vRmp!jnYG#,ONNgߡ{~~,C058 Q_)}aCx1ْǏ"m946Y(d66iF*:ruik raoW? {_{gC!cv47SďT?øKz?1mqa].OqYeVt({FFHF?#ݽA;Hn@8jyClHaO]:rHX|G؏GϙXBBd&3,.D u"yrEzCsG6rۈeHE _KdF|+G87xh?H1=,N:jG2Z:UƮ+hK>/sFv`׃7dц{KvpPxхiP `_LW{D|O|k!Oَaӟ_W㯔D!~aWw w8)n!|wgvǷhȾ]"o(M8djZLf2$k1jY'꺙zG '?lp)ߺcG]]],8C@j'ӸBwHitWd2csR&OOD !^S5[ +ۏqUejzzkn~M77P5^qڑF񲮂xuY~I]aA&FU?Ã6J&-F}Q4RpUmbd,ۨonʞs!{ge-.u쏤X.LtS& yO&`?{̃B毾y{-{GynYOD~u#؉G U g7zh+)rۻw/3I 21REϱ˲bA\x+6DUdMїm /+?zkz/ڏJs%~v毹i~ QWhG5 qt9A5bI F kED1ǟc"LtȖFa8P+ iQ~nQ&F&# h|sVK?iTޘ-da*<9ӑT῏m,^KR>1Q`%~~x^]7EIHp=aϸRFOPEy k7T8kZo}o}v ֙?xpG](T}Wn[Z7S2Dw A ϥ"L N/bЙW~mD3o M]Zi$֛/}O"yEp RSgAf!48SFz^R!ȓG8odD#"󮝻1FNDS 甆ਫ਼W)'j/=W &Sgݻ7xGa|?N DAǂE!~߀¾ `ZZTd@AD,e:z r\$Xy W:QE9"̳A W( S߽}sG&Q,7ywͯʫ{ @L.ž}'UrBd/1Xgr_A+EUu!fOz/#mth6=_>B2CVInkP&|*ӱjĿT9TͰYͯz~vT;FypM4XA2'؟2[S:8Nuɫv}`h9jk6cBm|o`DaD\ME_IMX (b~.rڬe[y̍N-P'%$u0orj_066OLۻ8 &7߳"weE%f0"R}ߌґ~ڑ#T-U/Pmh_~__~aڟ9~#į!>>YCIێVd: ": oQ1j CFj *fg kd\vlfD^p,͢:3EVf1zw8Lobn2kLJ0㥎 ll|!{| Y( HMex >熲VqR?Zˤz cF6R1N ޜMȎ)NQWbl _G}F> ܵcGB߼9u`#))]0}?t [B_$޼i'CРJ~YB|^w9F~~5D4Kƫo)=C0 `:~1vd7<1Z5Z89kfE )!Qd\nR.b64v s<,62R.,dZrb׆بQ"G!;z4zz ⛐ D&C |ίc[, *V k*tl莹3WWw%t} 6\ mϺ5 v0Mo9Rq!S^b|Za]aIseK_;!_|nFW0i8߯|%#x?d[2GeWh 2*Z;ӖM2v^Yx`~HQ&Ho!E%#*ș7aV]Kz[uj!K8FGdu\t>묑GGY 1߻^yD"ZPAw*507uۊXآ wj b|9rkkm^˘VOOSYg'yK7 \t\88ԅ T5B9rWұЋ8b4$:ȯ/A[܂t+O@@џ{KSoGJ>R ~^ݻwoTf HԶۃ =s" H }D| 3^2NSi^Y(^O-X'wJ#xQS2ݸI[=}zn(4c_gmʂ>"dR/p!'~ T*,7-A?y$ӂ$8N G?ѪgF-*#?ubDB*?S?=o_lUn8!) p vmşT}˲SgDس7yT?zn/8/N{u/||2_i// EY^N_B@en+855p7Wp/{3}g;yNE. ~FC0߳o!׾6;B-ႌ5h7OiP1h Š2 .pEjG%)˯0*{~_̟OHyA|EYN2V#L*(9˨[? x/o  >KaH+2.JЋ{'Å e//33ӇAOc .d@S2hԤƜgE,r'<Ѩ)8/FEcW)8q/@bx XN1)^U)z&%taFV?v{8n>{^WNmkk^ru:J>uDtZ qz50+Yn5hl2DJv赝m-شfiHd{/ر#L5y&"o8Ijqt0w se;P{=b)g^,Y3Z$BMc=l Tf,mi&F<Kz4xe,fЫy J)(rExW%t ;% }ei/ e 4yL@a#xZ nzRϲCfm< 1NOT@knz[X^sog?_K|Th:XV!-BL,l Atn"/" 8hP\pdrd`U9DOH;H5nLk#8pQI╰uS}Y?X*[U(]A|]̶n غʏ$qx4X7u]m(Bq:6l!)y%( 4UISY;G1?6pH+*2 `j:ituB9wGyaq^gaco\ tcRM,ݳ'GI!1,'5nen#;zhL泮~^~Չ mnp^Q]eϽ﫭u?N*i]^yO!kQec b]wWI4Qu ч߰kW>I!Ch:,s*.!X!> 3QV\6ؾ[{5$r% %`Y¨!jhĭi҆Ӌ6 GGƭҍe m||B0e𑙪)lB'v2#a& Ncvn̆9:a01rdUwquo{?z.cdl=p=)^}g0"bo?g$0Hr e*9#WJ arj#e 2y QfO:H~/5gw\ГHc}U`^a s Ǚ;Dc{(·"wx`P%je)oGJ)c&=n%)DhIDAT(Bع2 ]*a຅;N)}ľ5vHblpPG{@"2W$cr{i'~?LXLsQf2̂aէ=w73/;LnF~o`!P7 @2 ]}q߽E>9 H# @O6`!Q9l5Lzm)D)z ,E^WU9n@,F߈A@QpE;婀߆ |>w P._bbn/l1L2E m?(KyFB;v#lQuqfN"t}a[t׏1+Ux%w}Ȭ_2[S]Pzr^/-uyDc۔(`۷f e-" J͂u% v1UހM)c??(ߧ9v(։_<} QDP_8[n ``jje`ր D['UHs>L ǐ&ؚ@bA s9ީN@ QO.=)lӧA*G%F|7Lʷr]CcPGlӋxb}G~ѣh:(/e9Αx64:ygW6˂tֲaK6:5lcHQ=RBkwvo8$m{9\vŏhhֺa+ #G'~\mZ(WUTGM1o`]+fʝ4 ԃj&]fMW+Dt06MgLJYէ{qx 0go2顉l]W:[g]yhjiONH//lۅEl0k~;7ꧯu9oNCRHvY  !bGKy],De G8|K/ &DFi)~-D*r9\)$]}i1]ayǝH!wO`,-d#Mwn' O1z46/=e}YEe"G⤱W,Pe>=MF dWNcCĒ O|(=X& x ^b7]Fo}G?KMУܘ߷oo}?s'| sZWpE=vlM| ttQs%Isb=dT|ezїNvзO.NW 5:fV%\s쩻0F.7.34Y#~8{{S<(x/0޵mC8]z_i0kj0f`\YA =k`K77r`~Fr9GsCE> _'^bX˭ a2N!~!_g~ێ8%{nV &-ܣ_u-qumeNi\?3 zHWv ^xf!1<:S S^.@5yx<:K8Dp! uZ}{__04hh)0k;is}\$ˡ|'sPNLn]E=JA|KuN%K0[0_[¡ Ur!S\trtfƂ9 vVAUvxurNk=U%qo.)ObL>j')ܰNK1[+|%!yVߛiهJmi/8/Y_\R`2_G }\}zjДe'֙z}Y1z!?[:8+pg^ ?oR? XGůUS[IJ*$:7鶪O{7@Hkk?<kktq݋m6nƶد-mR;`zv x#=\% oYRڱ#&?>ҹЪn_;j;$}*}+(}'}/LtY"$].9⦅%{_a݊]hk5'SN{<_ t jM{-4%TńtY۟R6#v\喊x:'HO3^&0::m,L%3:qVE t]~Iv6Wٯ) |ʸ2]G4(6w‹$"AEv m[D;Vh[}چN|3HS:KtxU'D;77;_"e?Yqxl+@IDATxk-UYUvmc6e,#K#ԃ hB|/|m|%AB`3i0@ʸlrʮ[[qޯ=\c̝ws='}"2kXb~q@ǁ:/̝v8q@ǁ@trq@ǁ!:z:t89 t8q@ǁsȁ8u@ǁ:t@':t8qrsw]8q@ǁ@ǁ:t8s8];t8q@t2q@ǁ!:z:t89 t8q@ǁsȁ8u@ǁ:t@':t8qrsw]8q@ǁ  9+t:t8pp5Κ8\d;a%;t8ps ϳqmc(758q@ǁǁ8DX x/εFcxxhyϏK:t8(ZYf̹tA#7o|3aii)s .\_W>i\βt}8qiw&2_̡]_ڜ'9> 4oo._^ySOa~:t8q~ҥ/~wG>p5+g@L8ˎr ?3ST?O?`\XEoww8p~9ȏɟ] sO gÃsee%loo?? ``/@N_m^^XY,<]@ǁN5E[#EvvvB888wSw._3O|{>lG(0=9 n-6O``/t:t8@gݿ=0v?@ oo,Yp.1 ~~ lŤe&!N{^w8q=*3H:}>¸= g`\ Z?/pX+7_8]w8q8y]gG41F4'x@ggɳs™u0?/~1[=n~s?;pތe!PBg]q@ǁÁpF0a=~6scncNOw'L: dnݺc w#x?Oe'8*b07 |N}"[DPpe`Ctɳ\%G7 :t8 H`1nLsM۷oۿ0y-bU5ϙs| 1A]A^s7kDJϭBOĕKXѐQ7}LV9 QS+-wk\ć|xQֶ~n.:_.Dkib: m/{t]COӕ>ZɆ: x$ dڝķL\*k)'H1؂+W]]@~u:qⱲ@W9wO!D Ld!MP#G^#699 PqH`N NV RJiG1uҚIf8DK#pnؽǖ? hGx'CYQ=q:_u>T|M\75osDUªSqOY֭k(8icdi:<8UL-U  vC|ev @I| 8FVjWw V xql) 73CJ֎-M[pFCKp(Rt"H2Du} ꎫ+89NZ?ptְ Q^AӲ_QGF/c3+"'Kip:ϗAq ѾtY`;ԯ|.ĕB+x3:_;˺հ;[#i[TDu<V mѴ?ƚx\tL.cN|SUPu,0&, z<3tu^h0ɋ 3޷}x$i~nK70.h7zG.md As1舱SlI}<.nήg1 9Embz5~+td4N!G{tv5u΋R6f1FU?0|&%EqB ̣KZfTa`Oj k` rbRL`O]v 3VU}Ϗ)qbKD?sy=B1Ғ༘7`\\; ϋhf`8 ^`ƝG 767ɟ/|+:<|gxptR$dS!-L%fp0YN|bKqOV q9b"?.+I8%tu(ӫH.N)~ϱ`1+r&]N14d' \LV@{lmXJs=G? _zx]xT`>9بJ5}j",rb8E .kzaͣ 89Ct{?yz*ϳ3;Mgc:UkoB]cxV>oܸ1lƿLFa֨rQ}y] LT; eJHGDcpy=lxro…_SO>qU*x#K"^xᅠWVao_ t Q7$sRKMr!㶮{!VccCxj92q% dEsh~`*833nirܲ3_nz&i}b:'2gnCҔ'=y/kv1@fN.fǜD(`6s$>QrsesY7:/e鿩 7vZ=.ѕ(ʻl<;2Bzpَ8ݵ9_g*q3`;Ca~\-쿶?s>kO|v08;۲5&Uxw+}HlIOS$5zל)Lآ&5+q]g{B}dE;!\krw߸32#0_Ԟ "髀?}0-V x衇$@-s\*]}l3%b 5J+L* ~̖WB1ҙ,u@<qzFDZ qם84+0]A.wV#垾wa!o?#pU[sOX[] [[K&C pegR`fF~פs&SW=c_x&ňk)nҥO c7f,LݩC lodfAS/ʡ^Z<<:Ubd-;BH"?-xIL hKow<8*oLyKw N :6q s\IHzKzB8Zcgd $]h,]u@]U4OS6NCЄm f|hbLo[6iFn;<Ɲ`1vل a qϿ'}*+*U0CQ$y୉ Lh*Cv*^5DmQ'/-3k2mcr޴7RG`8c%9F:d''UqM SFpM 8 56 5L+>M`h{;⪲!nvt6T n}q9׮evh.~^Ct[U')# wq f$|fr.jZ`* 4nEs CB/|R'ӿmCEY1L թ &VUΗў7z>_>pnd@P0[c̼/s 8sS4E>o]V҄&e:hx bY2͸z12(Owh$\k"@NK`(&S,i8+hoͰ׆;zŮ^Tlp>e -=V'qΏTc22F>uQ'͵i4 pg` tg?}|Gs fFB:$yY|I}48iHJS j_8iWI\fBp-\28.{8{sQfS0Q |A1muÕ"uiKoZ*'4+D`+vڱuU[߫ph6)yls@t3g 8^rH?nmԵ)9W֡.n'39ly8e(l~~>a3(MpZa߼c.pfA0 k2{xk` #qY gR:V7i7ivy]ps`So@GoBhax pb|Q4;1퓦yu1A4*ҡ[_ga3,qYzh.F4H/DJl` V 4Jˍ(JE9MZWd^ÇtolR ,K1^aCh8W^4f(@JCV·|  Y‚Q8)3E*bͥl-5Bأ L&P#m;2лM Ѐqz5}P@9c`'UXupK[tpQ[ړO]V7t!બ':ݹa m E]#6V[t~N t1:\ J#{8*\Oߠ Z\~c4.HrkÙKC.]ty, BaR JʰC?gڙc9MIAqŠJ)8!Z# v-A>?8QeeIoe$P\PQU*zCy3@=eqA)ɔG^o#@˵2 &pmm@=lj|ɖݫ-+4LY?)P=6`Z|l ~ h &Rx=q<9#Ƌ`Gq>F:s!޸n(Pi!0 h'DžQ6|Gxb!Jyㅣ1}sGR9'LAkww> :.w5<%9xG>eH0,~3 =QlN(@tƏfa~ sw$e0&+F`5C,06e(Zlţ'v#('7ϕ8)h2#FH{`kp)8Ǟ"l2UKy e X}vP24+P<`PJLR?-ٶѡ(mcq z Em/Jr oOC=1i vfO+Mu:ot-յxhS;x+:yCb?(+16h̸SVlIm#FL>:gIƙ>S1) zwGǯ dPbԆClh$z3N\k&'W ڛ# -J ߒ}M P#8D}c؃&_K!1wO~J{t9ך3p88Fm}+t|r1^{ / +G ׮]H0-%~UÐj4ӧL`h2+(3ڊw~ڠ{N!F`S'Nmhq'8ƍ~FFpC7O_FXV}~}޽96ZYn|'lG[7Ý/Woޖwc?}8 PJh4> hLY]aKha5,]z {4^} 9c꾽/tC+,üag;m׀XMOkV_=V7;m X88v~X|pf|_^?ɀy~dE1P҂gA}Uq3!.7Chpfxg n0-#))>?O n4E7-ЇʈU|2Ec[W'pL_(dM<\e8\1i-gUjc)EFmkT10$`~'[TEѳ7|Ae/Zm.ů%=5dta헾^{1l wN0q Sؚj5y;yC2y+KImk 'BX(&AP0d1'ct ۓ1@QHɴe#jN:+|ʩR8ΓuشI[FQL>?˜P*fʄ9I/O-{V_w=ᖌky>|%w N# , t.4\JkѐqcDml[>i[r@^zx(:v%]|0a5ap.l!ݴs S85rxQaQ:WƑ;IGr!+Kb.|}] !rH8F;|sƃ`gdNs XZ,3wD+twQ]qJ[>1%\~dtq& uMps0?[{},ܯ˗m˘mcbgy&$R,Y0\)+V($W?MrsW9@q"] p [28w l1Cg1lԷjd?Nu-J!7hx Ef޲&(EwLHdnn|7%l\8Ps^\ g/}4L%q %(+AN! O?nOnibp_ _Vp᰿zM&pqa|!x&?8RGsrdGn'3>u~lSj"^ce1 Pd3t/'|=n|=|=xQFU>tb2TSwbnw@)$_U|cʲmښsִ1wEʝ>k?vW W/I(OB2aowBlh,U$f^74 _IFLdiA(VP HO dX|пhؿۇ!ټ%E|ѭFa艍Lu{zaW:>uOWow?t6 G=|K4Vy H vVDCYՔ&hc5ϞvxH$%o[afp `ghTeKvL#&*K[p[Rpr‚Oⶠ^czĀ xWe&OIvQjP"䰻b 1V?~?N5Wõ+kN3lY<29XM)G%d:DGAIQC,Osih|;_6+FmSyv;pT~=C}{7L'oJɈ z^[ iHۊD%HfL&NC\,!Mۚ^΁H|)lEBNk/iŇc ~nc F >0&/Z=lsCcl't`ocH[j]ٟiLc;.?ǘb7èP !` #0V9k1u%qt+]I-t@̕ӝ0>[o{׾53l@?: -;̔;ٴO#fteՀ%q`Hvy8R|L☚z-ynaj6p<~{.yon^u^nkh7@OueyAnBٟ!>Fij29PFqW<5Y9*瓷y9|;/sa-s}myJ^6|l7A\ʓ)ke h =!*8jspN?a'_s h *XYV$,oP2MtS Ec*QUc4P +kBIOFh,agC=|`IDeb ޣ +3%\0s{YfEN`>C0 c+%ULd8 ;sȍb`]ֽu~og"\2/- BpR7P0ؤ 0EuFUigx0Dd|?Jx֫a[/<V z^FdN@zS΀ *,쒐1DƱ"' kc(jZWFqY>@"BLÙLLd2 >50^P 4DPrӔƮ~ƁX1pAd)~0~N_Lo<öD=6a2s)U d\h$`Owi*jZXy,OA0+y%/8OFlasÜ.|W.ןV_0ÿqj_T\ōu&F4Qƕ{唲288`QGngn Kz@XpԭꇱwAvQȉ=I o\hLM{ᱠDaT%Ɋ+STG@"g{Mu^Vv)L:pyqݑt.n#jeʼn_ V 99e '7w֭o~z h>dqX_~IJn#(8q3+ pe΢KpZj50:wKG᎞ycp( J<|5z*:M*u`crY1/Rzzzl?z>|GX侴ވ#qz4O(歏}A*Ĵx(;?Ih^'~ܾNŲ:E좂i$uL:1:΋뎤 pI3b~։e$7 w# gbp>[cozS;(VWQ(7\suV[z,#(+PT{(oSB^*,~4 _XvRƈ<3ǫxiG.8dz,ʷ눾ؠ.ˋ@'M/ea[] W=vu#|mXҎmPOH퐚Nh7 Șxp cWXcXծ29*tW JAtԤ!n -acqDŽ}qt nh(5m3Gx onˌO##LKv4^ҏ7Ч# 3r: Ӥ+`v'n[R{{Rھ/~?=χ~Wk)<;*:W^UF8empe92euFFx} @Z8_9<}2N;':ţ|x؜9vX:Oc]"cبň8 qUi9SƷ4)˳2`gxq,T򺴭A95%!n+7 5(γ(pO]f@S۴h38#ȥc^{Gx7^_heF^73)qu8iS:z9֯}|l A 9/Y^dؖ߿q;Nvxqji.{^.Fwx[toFΤѡ1sh,3t?#qjLO 9(L+~L)T2 WQ'ݿOC)D+tyK K*NrYcʟ-}V[=l~['9ٸL*nStM^(qq8!ë¸ym 4͓e(_z^s{/?9֩nt},30"傘o ,]ӛ? wof^noF= #lqp1ee|Y0ƍ~)>hE, KOۏo&蕵b'zu*hܭ^ԇvSsiquG:f 5S)=#(?㾂L=k=;Х\(kGz})aL`rx絒] uqD 0$a>ſ 3dO 0%pԍ%E<y#ŧ:vGqQ/`m륰}pڻEpwaqFL8]Dr &޺_yKuWcj18]ӫtDilƟG0ONOYzoH-ni|xǼw7f-Wo}%l_/>.ѻf9es8 8⼜X{2TvVZa_3) 3Wݻ.s&q0 (\5hNNZ )KU]y]AZah/>q_ Cyni|kY2Iz򎀉#C ޛ_Mu|#@IDATnˠ}sTb]i,C F.[r~Rh/_oq1JsʕG@g vpLZ&bx5M8:uNo)ZuFώJG2%OIߝ#N8χpH(pFVt|[ [&Pf'0+W"C\^SRgqiMo{6|㓿}PA`e1-̡61s&'^4>+t>XήǮd$dD8Ohc}}=q"zy?Q2?hioW_й>R0^}NNX4ecU@J985\SFzY~<'?6^~.,վkɯvY hNÊHCpCG 7v.ʓs&Õy-y9q3&CD1.P[=3|P˘h*O^||=i/SC={%2>h':%>np8g:Q<NM2:X~L0(+Bltm6#RdFީK 8X?mINIbXĴrҔ9sIr򺤝>:zq=lR')SvYem_߅<){OʘXER,3Jj&-ctIo+bznvm}@d@ɀ NcFQ---%?m"`cncbjC@Y$ibArHl+sm2 t,k,NauM%c+~Y8mɟUc()Dfj%Dxf26i?:ƨ>Șn<K! `|MS3†LWa"/L١tP~zA3;iJXQGM(8WMqSȏt끌᝗+O_] NU !1@:8,O\א]'P#uT丼VQ/#veuMU\6K*u+@m_ .qĈT% .NⲆf Z!+m:j\#3-;u8K~( 6wd|B[ܳ>=6aD{F0ppov=/7E<s6&B%Tx%*?|N;xsH 2l Fɸq+R.#(bɞ9ƃ h@Pl@ǝE)YJ3gEutui*Vj6J Z{GSn))xTG{w p +k/\ bG!P3Wv?Fe#4WwkH< :~.e;Et pIWG6wn6yUdJ/^ +2$,m(|ƍb# +bTosOyF08̝I") #q QgkI DhK_ײꀇ>8,pĒ.fK ' fj%D.@ !_P0f(QHnY lJ_gK_RyfxG~}ҥKh*nZ`IE)Wkx/K%f䤐PVOFڍ? mXDX0(7&%JrQܑKՓ|>8~b;*6yOڥ/O}_= `)CAMڃ:_BN*Ƙ8ick2+v`A ,h]yټI۳k̪V*KeOs1 4rm+>|9sƈJHʃI7>я|3 K/W? TqQ2 xv:liBRN|]F`Qv/4 9CPNnɄFѱzqdːW~v<ܠ.4lZu\+x갋p$+gN'6S_ѵySt(=m]*JY<48/NW\,@^ 1og"*IDZmv> rs4M_y6+M :W)J{ mOZv@RVgbgEzPP2Lr i̊OX_~=9  EB|3z;zG2Y,Ô`]Ya|\ :V7I:&Yn@L OԎ]EÀUPla?۵S}+NGCht|vbOSNC' X(k$8Nc5v^f=˛!13F ӘJ9-8cLK+pѡ01; 81hU\ftNG)N!t9ni9cF X)zA^S=lxm T5mSAޮ ג*4"P|\x\{HU. .Ha4Q(~pTXYx6_ףeO0'>t".4;t6 l;J62\5ZÏ'9'h|o;U;Sf$휏w΍0sA!u2b ?h@<%=3DX; .ЛغG6n \C>q 8ѶBO2q)+6̂.Y0$ɡ dLA/uKeGHr}ػ37ؗeQLS0E~>?౿oK?HN|"X&Skq:@섵yށ~28nELwCsI0yHe Vbp(9|EJ^*8K 4Sڲ|bshJ43 gDs~kVVW?kIK qJ&+Jr;]V8]5N)" eׅVRp ה-pBBr9 RQOZ8% Wt{TQ^Hi{{G'ZA!zӳןp}^cq=wIuhr`+ad*)5,&PFUq}l6g_gW.sf~ PmvVVtHr'!=Ɗq1JI) 9|P2F[ ~p_S) ztYrroohPqXXLɃVq`qCbX8הyzQ2mx0Yؑ|@>|l#ϙL;4^ r7ч~GS(Tqs,Gai^ ]s qnn؞M^`/.1ϑ22/ 4Xxg-pX6a6= :`XX|gyo~)>K}V*YUقވgh5cvJ&"Jݔf2SRn@Q8lG#bXВ z!D!'+;GޡWG8ٖ՛Ҁ"Cm;^^ =\TX' ;0$Z"RIH2 QnݢzEyɦpkg]򸞗%er4,갟v^ :غ1+Ao^xVs:C"T6!Vrc~c#9kc^?t9DvwvÅ9T2}nsLy#6/I-[2p65onq%yZl&t07oIfSDfp"SY8R^x B|59B{r}JcjPXI ||#<_]] ?`X^oI cD=pPH|M +ewEQ* фuᘒd)GxX@Yd!**QFN&Gʾ(PgPO|ד?E32rKT'OYC|K'cFt..euKF 3ow(xUúG\>-el՗!t,{L\Hǻ''H'7sk_r!îz=`%s!Q-`~;zNT]2Hm}On; ~]ȗc" @W~O !a`-g3rKa%cn4eڝ0S?=ی??m/P +Wk׮Yٍ[Ld1Y-R`v U/8hNk-a)/ħwv{}kj m m5NH U 9U1!z[JuNQf~a]s]#]^V=m3NB 2wu qM0Y[: / sև 5tejR1dN'p%yT40[LLMdYO Bq0ܢǁ!SBpY[+ njvt iV2ր`wsGv y ܶY EJ00`fݭτy= mqPޫ4=b+LVo%dVӂ8ǧuHP9 V0'`!}Iryw<,a2[lI#_zYu/^P'J* R8*(@C^4iYևT4VOK 2]WTps1+5<@LYZeZqDWp0ؔ]+ꚲ4DڬPŞ54qyZ<˒83}}CWH&4;ޞqڮIA>Npt"uun?>X2Ɯ??;J*eN1>NwP(Gqp@)[OnQNR:&8q4DHfG():|}}A׺v~=1m#QF4l_O L&Su0*H{'=FqiUU*Y U)eA/<~6W shNF@7 4=@ߥN[[/:<N'SGgl z%h.qOX?uqmK5+өWCE (D\:q:[ +#"N2>u9t]2L ˫yKUBZ2&M0*/G|`gC߽< ?upµE}Z=Yv?Ƕ}@Oxky_F幼PI946ƴiےZ c!ǖH]՛"Bk3|y9ͼy㸢_yX-~wyz 5w1yZ@2[?y?yNPnH/5886NĽA~pz]:o1[?⚺*jQC[c)g`,ohVb.J:YGX]ʹi6TVq n_~^_۾sv[**߭L :XmeDɰb[+@Z`<Vǵ<19-@МRhΪCX]bsVYLQ!tˬ窜}疰D:CE`}!yKaoV/{ekD|N+Y}hY/E@8 N=Ro7Y`gd|S_Z+:NM#5tթ-8ZX֟ akTk"WpԘ@&EEr5rRwLdpc|<ܺ:I0vG@+r@ doۮտpGwv 󆲞)4_^$hQk0;.[[8:JX~ڏ !%M-V@GO荊8MKU;obie+>WsH\H.X8〬أJ߶O߇s9\jt̗AQ;'LN4R{uD(3135`n@V 荊8};^ _KbyUŪhuy.>0 3ͷOv@>4rj@ȿH 6ڔ pQ;'K݉mֱPwQS1=W^ճ|s/+AWò(qj?}ԣX8bcCbU&@7A@3tsćOV|[a@E2T]$nlIo0otʌϣ3W)@)+ō&@0`4[u+6PU%=K[ןO|mLF Xg]svrCoN%Á/968czߘ̉"@ڋjk Ss`SفL3?;k +Fyv_ꎕ?*4_+ SŨp+0{u{/O}@'>dg9s[?5#<ĹNcz5n[uLfsBېSn8?5=%>_o4b-xJޝNԟGKg3u@=Dy3`d;I_`:ŹUأCbvkn$v${Y҂IDdNZOR}^&{J|6^pbcWi۷o|+፛7õWã>\l5%՘p6tɬdXӤXEt8xj5 U>zX,=ݥN+28a'v|: ' fNZ116O2&ji )\N_K}>sj}_ݶ| {ڷ|re4@)2޷gSçf8ܓ4oL%8 CץOlLeuqPBˌӃjߝ7 0#a,0 akwC{Cbp)p Z=n' &}ns6wm}󯯯~?o<@X^Z oF?OO~73\Ֆ?%p…ps/o~msNN ʣW"^z`gE)ʢKQyb#}, |t.c,+=c&0Rn;U3Y& ljw \qje'`1;qF>h)'?ɰ[_t)x9+ď`v+\nc@Q<{>LׇpPwpM-vÞQMLȲN\V! t;c$ oV !`kfbӖR7))]fS!9}c̻8]R`f8X3E? 9Myu'V'q'$p2.Sۻ O1Ar?86yX֦ S% 1 Ao~ILwIt]8`x|t'c^X8E[6'Пg̷0gӞn!Nmo3+z"hgt/躞MA}qNJ1MiBמ?wf DŽgՈ3(^| {v#~1tr1M^ܔ1%-uIVyhZ]Slyh@8Gϣ{q8\i91}pǓg<59%#|;OOXm?FO?lko'˥K{o?׌H,!g>][h 8N{z;(b 78/uf3; U0OI,Nz\gѧ6`vgpŃ>ꩧlUV;ZJ_Рan BmZ'ʲ.,_y@[嵄s:8#;:""GfW͌1 sr49]٦=fs#xwAㅐr_w]o}9< ?)KxM:tH8 yY*k8wfWLɭ{}셅+aa p^ ̷p8pgF%\,CB8v~x:ǀ63ED̨JNV ޞ']1=^񁋭;'-V\PHqNb\ҝ[ԷʵG_ 棖;:H 9Hb6{L#^+6WOoy[OX#WŲՐFP܏ŸǎDpF]!h4T],c〉Zꍰ+7+*M$L4XxMYھIAlvuSԉ)}Zع]J\(J@ )^xm ֘˩=s(]=#H^?7VVQŅGO+rԖ\w\zsx'CVcXu@1":Ҷ?JO0%Isg;ܸZ1HAG_?MSK+"`E钎RC?ҷa~q5.!0ItH?"풥?0Iv>ҧy]}؞pi o\ s: қ GÞ6vxq@sI/ZЎdxtB@I2H&mg'muA:S{YՋ̋f[90E7p6H5W{sP RYDr7#sTEk$i).|?<Go^gCY Ռ?'esQ' YMx`b6%vi%\E9ŋz/ZY] :7ŋzM/IL Xw6xgv0|eY40PϩY@St&aF;suyv[B9cޤ kvֹZ$2r0IunE@UaxJT~KrԺdDU s+w;3a^o?Ɗ[cZXFi˧Z',$ˌTX|2OIzҶ^1V,-)}Tk}%\\_-尦C8n[[8QCZZb4xqE~nusLczk=m-o״5ǧsDKQV2YZ;5< '`w;\W&ع8/F>|M!ƀ_2ve7Pˤ(> V$O/MhvڨKB4%BsbGr @=vT14}88mUBx]jw1~tДFn'v^1C@`ByZS N'ʔOJS` ]~ެN<#%XTA}j+B8}5pMǧ jwh( YZWps*e›Ip ަu&\L)hf ]AV$ߔCZ׬e0h5s8Ҍ:?yORu/y;`[fP U8vGmM.,ͅ#m'=ΏSÑ"j/b|{ܷO`11ַЛ<ץHF2TiL(3Ma1e؞ GqM^͛ypRNοp)!t |ݑk' EY*ڣMoJ[ÅG7z3any]O0`OR"[ r=[ax&?h댄VD^(AS9O\(3u4zBWQ4_]oh]^y ],0rZzhv!vnB_&M0ʐfPc$D d'Hx̠3-yzy0dvѮ1.}W N7Г' =OIޔ$s_^SυK?PXh=[mT\&}L;A?#ODJsk *o_G/%G(;#uRD`ŮVgVe 0Cm& O5S(*f]wq3aOkf"56yHssI8-h'þz KˡG%<=e;=}`EP$B YSPon]7\sM(BZ¾os{򼽣?#e `+s2Mk k %`eJxSc˕|޿_5`_{M+o x6[;b-R-iBXF_C@G ~l'#PC< qf={r7q>DTG8;d wAAjggW^ > O?txτ .Q?&}8r{\XOanxX_ [+b~̙,4 #b_A=τ׻0q BN6-sbZ}|`#܊ԧuɨ&tQNE`[:MPv=X x{篶߅5x MF?ygc3`D0s1۷ aee%? 7ë~73ܺuk%@ ]&=DaL/<תaGP^ ׅ %i^f) Pd(]}ʶ=z O0Tr)\~vG: Xm8(]eMYwjPfa2b/wwN'nONqӘ6IQn;rH@X&S>S9|`9Sd^_җ[ƍ͛wy'|_ CqװEqR1H3nvaQadr OXS*- doµAcWs7޵қ$T^䂏& + |\+q1ÿX1G0 p/kw(Bgɖ4{?_ᇛŸ΀8N^"oGTr{L@-05GtI^^<$P OÀьKw#nN̏'MQ/lCU-x;Sxa"*f}`+IpϠ.gI0{Tab).Re9, 4w 9k}'/I/侲rm ~`cP0 P|Y28 ?78#n@IDAT^ǡcy'o%Ɋۖ$7A`S:iۧzL+n`d'iixY;4ycy,txA ay>  ᫩PE&ue)Gq<vt5PTދ7א^{˯Q=xJ-Kx'gL HsJ؃I7 ]sez&=Gtqx(vtIFz-Rqb%@O|meMl_׃vǻJ#qdm{udj--ީO#tN4P&,~7~x6̖hۂf=.'_rXzW[&5- &(Dy:$ j&S-|Z+oI{OS)Uz#Q۫](ODaH_ 64L~?kCBXZ(&y\! | ˜4}Vq']Núp<=~5;4J+ß h/`x7=`ȇp/EO7 (qpt&?|'Q ?X"<*%"fŢݻᩧ4L ~Op=XٓUMc]{"o` ~B8 aJ0+F|%i~:,ap' ~0/aZ0V` /0m1FzDcZu(<|#;@7!ÕM1{MCD-Zӂb~}* z& * ]XY^wnP#nowl mp8`T # Q8o1<@C?4E{w2qH:p] q#MhIX= q',(6p'^A`']=o`G\~ 'm8KKZ{ccwf2 L%*j4/~//{^]v$5gG<[j75gȗ2L0 g90 gx0Lqa<,n0P M!i $u xAW|WC󵯄%G1]KQ׊ʅ|,Rg+l<-98p;iuv EPz{[sz/XhGSF)ꑴ{?pi JrX_&G(;i\XS6ȗüBaş40CzV0y&%h(ey7SF +WL e\tz ѐ9Hx-c83@1p;`Nq'eoO8Ƿ?0yp?NvӃz<;ە/Rث Kf_C  kqp|e9,]gcX)")rGYͰXҌ,1WYvi! ?ncwOR6t(xުo|tkD9@.]#~Lhwlhp.:DiEHa\; ]ݍk. lE<ʅ=w~{A!-/qwNY<= tOIdQ82`DhP4j?ϚLwMt$x0޿ 0gt0a8ά(L2R6¤*߄v<< i۞P=o1 (,,R3|://,4wMCGߐ'yQ@bRT6Y֭}+>)*te|~P@,FZHn_]oBF45} lwN Sm~ibm]c>q#4֕ttäm7BMN=}QfPήt` tZOu9\^FF,xe2e?` S ƝM$ Fŷhg o"^TL*?p0Hk`(0(8SxuyR7 fFI䏻3MW a!ַ1,6;䉚g*/VG^5 K~b3:1ʟ)c t17s ߺ_ BtC!|+ԕ 1x|PK1O:6<8izȧnI6Ĕ Oxtex1hӛ'<6nCWzQ#aI'mʛ?;9Ǎ3pw42`DmAިݦQ6oMd#<%280 33qMXd@!0$mz;=y"0⊄?b`l񃙦 <ʹ<ܻl~7VeSIp.Y|y} Urz߷:"=z@ T&,[*5 $6.ԩ+]P@Q0#waSzE8iEc*Ok _R)*LNW`Sfyx2ZHX6)k ~U Oᶷ ,Mҥl إRTpUQM&lL##{(q( ;;l+Jh >kBڂܠQKXp]'6JĨx?١l#_ \3M\׵˥VSzrHAX8()p@`$M-Ȓ:8 ~2szᚆf""]saUhc&m 76DtEdN[:X2A70bqdx` .d=t\Q{p ?1Ǵ;0;\9q&Lxn ;i=ƲViԢY)Zkw'BIgwYԒ՚t"ii:_ x ]%3-??ju }VVD:+%C Kńf[;, -qۣVG r#N^MYYY`l`&}兀SҹGctt u tִ^hIOKlh´)lZ gt&y, ?5~ ~JLf!e΍#ʏ׉aI;JC1fh[az=IAWe o${ }pL4Bfn>&V$91>'}aCsT%ꭚ,+)+Q2(b==c_y ͈ݴc2@v}oW q;T( 7z&vH.Gg<@Emsd}eIxBh憍0F(pz*z/k] Hz =Ӄ3z g3mO2oGn6/qڒI6BFa6ߠwi;`kŏT^C|S )臾&0c(GB;G< Y^bμ=(r>12`bQ,JP-~$Ǣ5lӑpAؾ&PHQ3y#A 6b)Q$jhk>waaeH姞 H-p,{dޤeױ;yJ',0U\ 0ψ 8jzR/:AsZ(D1,^8hZ7M-[i*E  BR*m)EzٳjOE 0BFaysGH`Ir:q8 1g;GT5rkNޓG6OcE ]6!2`F0(O8,ID/b2-K N'D}{` |rw$p$ɋ$W4UհygGzÓ~|YtHWwg\{8B0.hMY)VE> Ku`61a~Oq&HG:A`21#"zG"Aqd_\#-}qzُdG\S S0!pDTz=Gp?ƧOqw){;>28æ)a:ujVihVLztW/a0;my;isVJaὗgAJf*B@d0zo|/GBF'x(#@/E?wۤ"!ּ! 7>ZOl뜅itJkJ(.w{Z?[sS)*d*Z &zGKqZCP0T7fmS¡=:ʄTl!)'elKnB_J,|QCQ P eU{uL =Y(#@硫whO /We2`:a"ap믇˗/zʶ=r0*Cϸbp[{T"? To}9ԶBiɘ; lv< ]tO9Y cqD^~ !v#DZHFK5qG `?ߕz{Gx#yia۲݇R6L{%[| Zlń%=D'.twGԉ4Q IS /D(.) 9";3 JZ@~*uSo7>6R0!V"_j?p=_~=|# k>1_8heK̙(t;c@9^y50Czqy w^UWla;kxw1cL!'{eđPXτ1epKK%Lu^~d=Cză] p7_OXLc™p#PuP۾g SN#TAhS.xlN?0\ ?Է%}S ԌN$n#2r@)BeU[UurhJfj g{8㩀2`Cļx{T˾N00>.`?? kOŸڅ!h xckmJ [  `aޛw_bE!0<@퓗ܳCh&@vtN?&¿as"Df C߇>_O'tWңo0?œ>C 6?x-44gѠ:^)rfLsTwNs2c/%;yu6P&O`niY:\I%"eјZq OLN=x0ڝ2{饗"p4d |#g13v\RUjكX}y"/ᥞV(Ep:4)י]YEH[pw}O5ç>_ҭyR+Op+`MyfևS$0zо#о+ q<4 }-EC:qPG?|~[/h$ 'Q%Wd~QBR ADM]X<~cWlڃ7D:yj\z6+4]#3χ4nywE_Yi?iǺ>W-Psjd T3w #lzD\ S|O?=0~9ݒjutʀ=سɫ7x念Eͳj[$c0H%nE},{۹pgesafEN-o? _١4fFcU)]QE:~.J\^[/]0I]zgok}?O';GqG7PxF$}]q.>c PXGi)o7? Dž?6(PN@Gt#^l׽Bz4_"e2 ˡu߻|ٱ ksus`SwZORIC!) 4B ~Kǧ~{~ivճ'ϻoG~RiS^PnPW@%m鼆BtPk)aDS8wA3^ϴkG ; d *^xotz Go #,Xң,uy0gcv f٪Fumwk^u?hhTܶKaG5ϝ{ZxՖ{u]0?ipGӭv Z {+]^ΘMO6ƅ=|zvFC=¹?nC.\xǸm4懯+>='it{4^|QS(riD7PP M<:yaAGa'9}hKܥ; 0h,0__7:|z|ŋF @@glyIk<3Fgpp 1!.f,?v8忠!'tO,-pӒRTWڧ];Mij_C6{]A}BzC0|9[Зz9=WUw<ZO+ i;x.Ha_kq?hWǿz \|$)<)-x Zhn=Զ6%@]T\׌zW/5zqW{0QӁYODt(<(7nܰ)|>Mj'Dlp0Lj4< =큅n52aߔbDeQFʀz[f!WO J)^hh'I]hklh=.ā ,b( qv:/K$tw}_oNc9OsvYDЈD8~+Oڳzٔ3)>BC[WOi&x@C %Mȳ%7u}Sv8"chT珛^C10;޼pglVCSrB"MOV0x~) $7e %TΣAqƤ)268v1z/?wzHǧXx߸aΎk_L? /VdX,tzʢy^SSWNݹU{cg uMcɟO|BΚV#Ax|}$A7Eҕjo >Bh(ސjԤA~q=]C 6y7`D{p|Ð4C8Kx`l,Sf ٖ@ 7#g|spcWՖ:_S ;΃`$WxMmLJ+㸥LxF@ԓ.PSTӗdo}Ak8bZ! @$)V_=+?v)d-@Q7mid8 L&loop{'-&i9( [U6 'mC$_1`@~W;s 퓆ćye$O4& !~aI&yyN&FaxˋiVT<B#uL!\KuKA\ uc4|t|KFhOP˺H[glW;ީì>\e@0ji0δx#bCFq y94jY aak`N$CXaHoEq9wkچ } MψeH-섥fc=@ovyx[ o|SE9GOڜ40L7utYKp.,ˆiLC!La;y!]䝲Ə3r=='-# icyp]n. Gziy#C؄Ơ|H0D偓<7у^tQa+ MlJhd)k 4A~ީp3JB-ɰۛa5 پr=̞$H@>\*m0BXo^I;H.0a2>P(Ԅ"?g6 T4q q{7`<#dpslAkLw`4afG6Cl+e7@T;^ <z0[;W|~8-m| QGivwÝЮrz 4N:ҧ^œ)`녰w:-8x;<ُ2Z߬oxrג"(WLq̞[:CNgd,PtxI7BELX y{鸽oyHʀ0r%<ӂ .b8:o}![oۺe ֙3~+M7J wǫ SX= pY7{?gi;*}X.޶Z Bi N{}ccm#yd0p5?2:GqewQ\"ж1p4(C z̀xE=K-Q<~"4]9 *j?? pwc~< UNjw@cOGZa6O^~2'Fs'a^]iww4CGl?v9Q {w}02`FhGfaP?ø{x,ў[3s]n\~hmkQ]05zҀypwzEV[0H@`V%4vw&]paI@q0qΙi), #yK,D Isd\D;B/Hh2@JeE#"fu!-ŰoxaU/\]fBZIфp_d8IhR~NCiS8ZRXAPp_A+7_η~(~7!t^ΡLլW| 3 E{ ~3[i$-ďFj0z 2Cƨ63n~PlMv;=M q#ɋCHpzAʐ/w %tP&Jajb>`!0@5G@f⢆'!)~(" ~RvД`+}5D)“?)aih wAKAZ\`("Eڶ qresW\ vgDOF? eNCdЅXQwF?8Р rwC=so e%6[~#?{2=#o^ {SE68uMj2~S'E)`" X;w 9?=-F _/ [ͦm>ĠFpVJ@ jޒtP zy~-FA`џ#`Ky =,_P]ay["7w^3e#Åu('*a A)_l(&ojkSp#7;Rop}͢ų5Y;U \RCiWmkrma!Td[oVڢWKUS`qD8^ GJ GY#,x)XUŽؐ?gS' )bAiyp4-IաA(nRaQ; *h: %fghKvr:k0lo&hT6 y(y(KN5[8 Qa~ %1x+BLN5 2i;;B{<)9 Hć5F%iR$G!&y;S^hÝ,jBɿw$taSfGF( YRP$L1>a`Jq(JgqAN E`HaaÔOp-[Q9P&7T/4А"͑rY"?"2 gMn?.K;`H^qpnM8h(p),zXعu7n*b:枿V -TŒ~ߨ WiŽxP<( JcgG\ƦzYGmzB C'Ѭ~4)z qB|u%ܺ~/fY= ?)Ȥ!swFXl{R.8zs7ztuvޫ5m {^%zj%ygXnA[E]-_ZͶ -A6XǴH+*e1)#psN|z"s\_pƍCLwD]ofb~$po09FcGh\IGjնuiPX01 }\cmA>s"ϰ+iG3ÖOT²WA4%BElÙ<,C%nϣW hiaIӍnbDx?(0 @@?#y\ e>N0g< DN0ρ*|=6 A+`nΛgUV#s c$K (kW=-^GwJ IkN㢤^ ;߉kD`e=jڦJIO!ܿ H @z`pNy@60`0k׮G3w៶Q?dw;,,1aW聵0R[[T@c=j +2O zMM0<o{d%E7.Wwk>ﶏ8ݤi^:ȠF}S^?%\tZ$jwਠEnoeK?FVX7?|=l+;X:W/pI-5밹+g (+Qatu)#*f_]'?I ^z3Z\b #-@Fd֭tS6? xQx8&wyiMw&NRyxe?-^euTTf<2n cYaD"]Y+5Mieawtt<@ gh/~_VtFx|I d${ԙܴ p3S@[yH-pE{WT;5"<qvN1އSD8 ޿/Cgg W Ւ /ua?iz:sa{{{&;Kڟ #ѐi(1GjNmbdn n'q32>0:\;#DL/D9McнnIecUN1غ.Z't\a TwǦr GG|Ѝ?qk1bG Ϛ*ޱhQn;hn~c` ft.0*fN[+?tJl0EˬK_h1G?u]ow@#Ȅ!tǏ+x)Ԃ ޿ ~yGiLL#A)1*ՙ?Jf{{F ~FP 0S 祋psK9"xp~yrw.%c/];4 Q` w-{zWNeH8o{3wBD.D4"qmYWDF?\;PH  ؆L z 2c$u4IñkbLGe.sÀ+IzB\R,/)q=H%"zt6ͼ$ ¤8gFvn|O跥#/,l- #-޾+ظNS}1)}2ܑF `|:$M-wp@EUu k^S6%@1-[Ѹ̙!'n{L+!i:u_cm^~l%zϾCh\ʇKZ76iw8KSv h`&8a$ȒjYȧp@:d7G3/g|NB7.-JHVR Tӫ.åe}4n{hC:Fظ܂?BI&<^fc D}lWNOt1؁̻BUl,#󮕑w`+zɣwd@ X[6 7LA}y7zHY!1MdjC -S*X$=I N.<h`_ݾwV \IpOYG0)GPr0k*_O$!Kv إ8HU婙8O @. oE2(8WSw?r9={ ą,]yipG@i̜o d Y?qރb4nNN r @hF1:t'LeRIGuf;}#qB.8a-`\%غJ u̻%}A8\`5x>;gX<H` 3JQߡ=ep0O~O˼ ߪ+~x=nP.fA"'0Y (@vZr4x@z;(IjΔ6O 2g'1> *޼a| <гma0FxSAqbK ~NIUYՁ@F, t п];-n0)LW Ս;yk8}^> v@e'~hq٣RcJN0EJiDa n᠒ba6( 'l@4ٌI2B(u|b6&K$4}{_: .zu7Ϯ7XYNG2 CC9? tc6\e B4Jئ<ȒkحN &?3mf'!՗Nz3*r u;^rtFV/OhOwU Ʋ)uQchE>!QP!%^0v> d xuf> "-aqqMi OIv?ԋF{b Q3kGZ |}rhnؕ? y1hT".F;JPo۳%nYH~m孽$.W*RJ !_ ߶O,Ȕv5ݰ  TvnvUfJr(:hPet007^ZHxd/'L0B4\ ӱ|ppSh{+ؐO'CQz*= {mQع}  FM'w5 Ƹ0!:28.389o?4BqʓS|%-󼅽TV lS+}s6/9SJbRÎ2.쯇Ž WX[?SE93 Pӵ,d#D/<jrY*8JqbSXy ɡZO^gL&¿ B׿(ܽ{zW^ ?+O~E#~ [:~( r  UtŒع\fiX}U X]tؤzT%;;U\>\k-bX\Z++amu-].յ"PY\PZxp, 2}L,ډa@*eM\.Jl#CKͪhFt@7+(!\-E C\;(R[Gxtx_f ?;;ҥKb4o/=>/Q$s7 ಪL˔{_cǯY3O$1]g9vA=v+^P%lhǡ>3H](k`Y{tTY ł _+ER9bKZP} 2cueYb:.s B7%=ME L+hgyQǦ7шNO@!a Sډ?XիfJCx7=`8fa, g2O׸In7uسa,]XFalf$\-FD}ߪk58ʕGF*cqYt:@[\DIccŽ. r Gسʅ?OTVl`P4#4h{V/)oacr.sFMWٻU)N*˫ #{>p =,fO$B!r:q~#_`|q9 f /`~YC<|(mHG{`r$D0cx@c;dLOۥPx9Thdc't3:_E}*|f $4J_4fKiQb SF/??oϢ?#ypG ;@ѣSңݟG3OfbS2NF5*s] Y"SG{$u@hTH鸓VB3izq"cgb Sԝ pjg?~~B? ͞JjծwAG3!q޶%L8J\ĝWpb?iTץT~6p7캊#A->G೬gl $(үjWWõk/l+G`X0荦_4#+􊘯6 n9!7P'l_`~Iw2rpnZ$3~<Ϳu'IjSŀxQGJIKʹ57,1q /bڲ^(j..Y>ja.GS*޶hREAZ:u n<:@ J!1PEǎHIdʈ|~7@ Mlj;k M{rFN(N!< @N]hȼY ݇j *譆m+c(}"KnugoնBkoS"1 O q,`QjHЌGu d 5sG(Q)%@+UbMbT8ߓˆ9|)ԴQ<3\^ E%~TK|O+Ჾϗ7{)P2PĈ CD[U=E^M~ngc SH6u`e/4B#@E_!E 9Wn|Z%/yG̳8+b%ӽ~I|E%  `RvN{Bqu67nNYI1)bla-n郝tQ'%Ak?lv3 88Q>BOoKݺCףM߿Ǹ/nw,V( g+K%ɝ|hÍ+KʅAM.:z+.*,mG iG': L7#q揁LQtFΛ6Ɨc @M+8L.H F WMsR$̨"+P GhUw\9cp!D8 7k\8zC&T7weiwE'/VFaYǚTL@`G1Z#˫{D[9pp#y7q(+0)p ]/94=ϯ y?;µ(%EPp;rey&`kGk|"*>HipY?ihie)sȄH\^%}DTMc 0aci`Y 5oQ $rg0(0 3*;w%O3#w KyyA(0$m+H#mg]Bʘ)bhߖ4A3B)pFzL^|~z^SnI X n\5(7KeP,lBk7/)Zf =^NA}2lBxmP&ON>]bM\u.Ip S$ÃV2+AGCǝFR@uf%z9$@r%Bڙ u”? p? yM0 )o£P*|!P`P(+8D%x+)(2( oJF7_pQ4zR^ wBt )9ihC31k•Bx n1c܈M v2@{ Ko[FUItiAR@Hd6f 6llxa^0afČ#$h(iH%JH222+#w;7޸k,ݏYowr :Ù:֏"D)FޜޡhΨad0V&H9Jd2xyK*#e"1d}C pC/n˔  Npw,ѐM^)cA/ı) z!L#a AXA(b[9UnkeiyFUV=bcޘVpHWÚ*ܢj>󪐗h!`EAt aAA cY ߙXذ;#@;NLz9n#憥z䌑 a`,+̚FEOzBG@@5^0Qƒ0൩ )Lw@bAY_P]`G}ƙ?%0ambeb#0}|i ߙ\#CAiAaH'M"?9l78 P&s! -nLgw;c#Ŭ3g X)p:NT|f̃̆B>%HO<=# a IW4iF(#\,0#FeDI@Cz߅E"MHu3Ǧ~yJCun]55k'uO~b;r$Ӥc8/іCjB}yI[BL-QtxpBN"uwk?F31@s0{F9Ǽ8f]V.jXVbϕ~S2<)#g qaǯ/I9wQ,F މ!<8J|#y״UMB5"=qnCڤ@6<)W֡9ݣaڙ0qIjєM}*G{'9/B .,1=Z ]4_|A _CpU$Ni7'Ez}6 〹,#=SᡇO`Ȃ* ZO業b6WzlC1gks1AQ W J[8?:.8$$"ʡI& K @o Mhi1n e8Sh&| -ϟ puؤѮ| \ jG::/| GQ&̑gۨ=P@/HaE?*[fDž9 g#߸lsépߟw$y Yb:gF-h:59eDœbׂ` DH˅MOOOiL>j_pL50 `# $qNS 0hFFDz:3ii:8A@b~ (}7֨ӷsB r_ kAxᠴ%xA Ѣ;riiyF .h:@55(ܽP ;L O}k3FCA BHT3y[Y\tkZ%ѨP.mrO}QڅEUzt{4 F{+N&e#7K/,HSХ9;IWZ3f$i~lz" Li!!ӱ=-9ұJ4,V'@$.o]EѣB@vDp3=8*g0|_a"?^am5\_o:y! m aEfΌ񂮉5wfZӪr1MWxvJ7b@q)PRkPF>.N#5פH92XPPthP&BGzDtA8Vb+eD ]@`I(cZ^Ē󖙘>tDpIM(nߊN+IhX :@mD΄[FPiY'q0 sj36r0δ\kGMbMp9YS&h/Dto1`a2'Z-u̿;@.ؘ1yσgd#0 ~#{@zkטF-GaEjQ(Ii5 ؠΝ"x'q; [l\HU?}p+n7ڴ~¹km}bnI{WGǣ}p3鳡aݻ靣625Owqg@M:h2_KQl325@.4L -9AanG02ą7uBkZlVEazY =Qf4'O{:icC_`X\ ͦ 8?I) .ȿ_S_f3 foP뇆s M;#yЄBBW7K x(r-{'h#PG47)46@.4Nm{l7L 9rؐ`I'E  Eu sRM0С usL? 3f:.es!X gƵ_'qi]#@v_Wniz.w"b$1ǿcF,/g8y@K;ci&J"L.Z+^Y?ᥗ^o'O4)?X(B@Vʂ E[eq褶nr3d~JY 0KY ̽aUǷz;MCUƏO[/Cw/MΠ[X]Q.![ lIق`7ĕ OtWs j]'kz jc&w6Ln@;.oo5#~nw}N0hl?ߠ$ofY7/ F͕0wߩaݭQ44YXɧOT;\Ŀ;gTn`,H h^j.yGoovh0B,ܭg*8@zmK#w:eN $|0ofx-y1{491*$i~ŻOLϖwIHk\ $m^jm  uT_#1—B3\|o߰+W1~C`A@tsi&a66Á!]z%h Hݰ=LJxMQ`a,D lT m%p:;>OV^9U4+AF0A!k8GKvw% !@U7"&;‹gM8pd2Cg0U @xH1JJn@.jtt,d]\Bn#Al;y^R&fVtB;Yjw[bX~Gߙ`pniSB`x ˯00ne `ʬ-$6 B/r})ʈצk<ٻjo& St!-t_WN?% _-3!uvZ@sk&@@fO`;O5/?J+5 euXʕ *o3^Qv\\XGp,x:Xj0i;,qBL0b5a&"]Pm ZaqXvpCWO퇶ck%/صG֜וbI{Zu M8FIPu s2NsS@.T@K։V֝F+/à&PN(@gWkjo>nb҂G~0~8Q2\:i4 JwGvvOE0.>LFNk}m!2_Gvu P_pFpq>8*wG:zuL1\>z9nG>e?~]<Pو2Mpg.- -=Hp6eX(cGV5![L+#ڂ_7-RQj}L?8&nK6M-EQݿb͓!eN Co c.`;сgv1pJυ .w:n@[:P;n*DԠR(OG#8rB]^  z=6_\3sEk.q߼\X*ΈF)G16A #+g z/Fةߘ?~w Y:hj2}Oo4`~oiwZ gGnhcb3b̺l 4-b-$ qI@:cAA륷.7`a YS=Etpsaaب_ 0ra0]3hr) Ѐfn<+M9MVJPܴ@.Qt9Fj!ɜCۇ _a1/ya ;/;\\c&amUR#8y=Ů2rْԕڿ?3~%8a<a%C= -GqT [t9(ĝ>trY;:L4tF̭nVD@?SPs'FLy}EZ7I<|䥜|c | *S ]lw(_GCPe_T6x0K*i&N ;~V, _\<˦ʇ-n]|ζuiuO;ƥ=PͨBFR % HQ'nsh^p Ĥton:~oUxh`n:@5( L灩 A ̺|*" (-mnKu,yìGFGA[(3}y 9~sw'effƮ9>@!`L#e[0B055F٦2ŴkEizyl_G =aaKa^>'"ހM·f;dsn6F,  ;?Z=ԧa #05mKyu~m*Wn:@./tDFptC#Ӈ@1ǖÄTϛ#oa 掠#˳86wʉ6NJ\xoa|#T̾N'h|CDJƍ͛رcȀ?2/섺@<5*9ԗ |Z&@څAu"! %@|(1 XХA#|_yb\Cf^$uC¹Zv}|6<ʕpƴFTWFݿ:nLjlڀoN!קoM8 "ۨ_{4VY +xGF<40b tgCAe2o;4r.h_b=>x`8tS={6_EC?C)-H`T&A`0ҒFӼT%}>-PA&$ Q>S#`E=sRףU%+)MkmɇrLɏz0 g\ [:Ű<{VrW9@ڄ+%_v+hV@_O7Ó_#LLq~L, :Ȧο3^0>'Gt I7ܭ p9MCҠRД`p|7^" sC`rZvv[kR_rA{{gΝ 'l@IDATb 7тQ-C`d/|0{mD VD #m / jʂ{ʜTӜa=]' kM/# `,-3,V%-FpXZTP]XF@L]f,.m|K4wXeko~Vm;MXD:_Nr{>_x=<~YM0eߧ8$ ťycM0?o_x8n;Sn T_IEI::@I`̋?C =ul!`D{/_lihtԖB`:2ѭ;|&Lya̩ QCa }7C4#S'(;V.DbHaχ >LmŷD!ttiވLqZx(Ϸ_0qu;Gs//ߞ|KRsϾv4}ZORRCR.Ѩ>94ا^ ֌|aNTÞTJ+o@4+ZL?j_6ʨ_8wTG@E inF,t(k@b$o aԏp7 Y[6ҴPI͍V;2FE) LD9#ϐ14I ~o.>6{]˚E! @X*?a] LOMr&'KjSɿQVkź^e8Z.u4x|6hx?|aDO΋`u΄[Qj@vA VI))8φV4еfOeB ttqHFu#ÀuI?6ag rph(Yw{ O6U;v8)l8|&)20+vzGjٴqPU)n5˜yऋw=<ϺU/2$(!1У%ϙ=7m]#:u|$L8c` 9nd&-2t_ n tvMj1ήW=0 ¯I`;@Mc鴨@:U0- u4_} YwY=r>Ego]N=EdoE"%{!w>~x0XG-a$YOs0?uSy>)y{4e#ƀ) s9[}#zF*4Hظ 52a:pO VcW-B? V6h ggTòhshBg6k]2HZ_7uޤ`tL -ѹ`TP6K }U̴]pSCȡA>ݏJ.). m]`0tpI j#'rm}BT`2@LЄkٙp,? j[ԅGgfƶc*`>~NZ)L.l74dظ > h;iFR g4!=~ߚCY>Ѧ4i39 f1;}S[\ 3Ez (TI0to_%"AH n ?g0NŮ6/e4 wNS5[RB0EfbD#7^.-hyb\_/@~<:[?rH8qx@ _!nL"&ǘB@ >5՘j; ೨Ã7og^˹zm71rMUJѽQgߑg)k L0FZl nYŭЀ{<}' ry\ۧ ̒41Zeڌ46Xzza?7`Q< 7  }ܶw=F̱Epj@[s_k3`P40&M[ i~ڟ ߄T<8̿bzGxƄb)Ӟ;/yꤞ;Ձ@.ʺ;6DۘH_G*1V^EWd`A=.*U%EPs ~U fpp;€i`4?jv0_猠 3o;w&P1_QY OטҀŴ c&YԤ$_yMiDYKNN,tRS0ׅ;r ӡ(0t0 ve|@Czzh$<sgLL`+-Bn TkJgЎ}% D#}s7^$3ID3WVgLll3G]hF)3nixÐ؄q=Ȩ6*w$:YM:qӘ0$!D䙔Xp!]c[rճ]Ny-v@'A[\50|4Ż |uF7$KàD\6'ML<F&?BA [O#3u(LaTPa1iNⴓ^ [ѲmvJnjR Cv~:Os0t Ҏ9fFf0vc0 GG0{c6"{rO@ c$;6nȮ6 Ž *y{1r|?'q}9%n׭E.]2ftq*Y=fû]/hN+_DJyr9:i''0 D,S6k%.\Y*c0|T6[<Vwtѿ?OBMđ?Xd8̘Q6/JLi!@܀-ko#a?0!`U FXk]3qƭm*ҰmGې>Ыnد߹`-R3n&ӊAB&["a@0n:cSa6w0<2 ڭEZ  õL8XB[ɩ,믞 Wn̈́>)y-}qL~G#~I&"@W D͘k--rrû࠙pحL3§P _5a-BJQ@kiT?W#uZ$zQ nDM8c q4ٍD^sMЁQpk\ 6%i3*o_nr73Pz&ykroֳ~ˇˆWFGhD~ gס۞w=OPvPyvw/slnA StxF\*wubRY#ɳ4fC)*`4ъFB@#lhQW˘=V4?#aN@`I݊ kLϨ/\X}:NHCv.h-UÁ!ݦic?y3Q?P-BJ6tώv:v3[7цכĿe,!4UOʛV0RG1P7μF~zx衇=  MgT)+i-xc;իJ3o +:bhXz^Ht+ݞn? FY*x1&:$mK 4kLh"ڹc}^8~B?odm*dkF0|7]/~>B_-MdS\3~2N. 㟕Y^ko'P)Gm;Z4 H_KG1:pM0%+ܿ76W7Y:=!We4`lăn!b!GᢚW #@k4-4i|W!`[m5p᭫bq-8ŀqgn\R8߹]ofeQ'G|o0sg/!K 5G#`lo5Gs@%0-h숺h(%rnvn0{?6RѨIoVf4 ɑ;; OqK 7/Oiߜ8 iI7;5eV33ϠkT7:.l`)jieNm̴;ʯIfԖRݴ$rEXAZhb ܭݞ6=$}e3}K̢N5Np*$pZɦhԃ3.K2 mrxhA. a_.Ԗ\0VOWߖЭLK$ '椑~Y+ s<{Y8MQ.7ΙҧfL8jy??ȡ3! p9lFnADjfJ"MOidM_sU 1{ #GL`9kEZBA@L9fyxMo] ;b0g âsc_Fb6?L9^M̟ g|7;V^aP0Lf!vj'\R=Ӟ߆+laa!OTx饗t#Y18qСC@KpDkݴpj̣˟hPfAxT.Āځ #ЄjK..]*(ivn!5.- pA$@n_0J۹#Jg?G&îE+swiv?}FD)iQ@o^Oot1;`Ù/~;_Ytz| فl8LIµ:®tgABK0[w^^/KKNE|UGbn}ۯ\ OKUosxwu .JI8i`~N<:qNχ񩅸_g9gSabÃ0~K1' 28eÏU1:q~z1j[t4VC}K_R/n;y(>E[Mp@Eb£>~~+|5u?~:1dHpXZ\m;>HɆc"çp" V*^.'##^"T;8q\0 _)7]pA +'Q1|+gO?92>TCRq+fW8X˓aY?ꕚf.d_rso[C=6bl3ojTp(&#{5<ݤV /t^/ !XiKW X.pUY{v0>'L E:= m4*b*)'~'l*H0'N pm$YĈ:Huu~eB:cov4G=)Z6d5%}`^ݓOK6*FurǴN|BE3)psߵߞ> -cd/tA9cWXt0`E>!"[ -A Á>/q`IPO@ȭ[pCP? 6 }8v(_qy1xyã*1&&ҌFifVK`@~^NaJ?  y 1AccCw>u@Ź I9EZT/;^MS5A7EW8bUjR[XԽ30s&^=VX :64.ʽV1PDV2B4̕ZB .* r #i ]E#h.Ou1wi렒7**H+` '!@gZFo_6x/p.ڞ6F)}Lxe\&&ſ_"gML?>VxugFtwaT1l=4Fv}ӟGq!U/FfpwYŏ[ǭ߬^4ߙV{LqvffN^L 00fس!qi'`8q/ . ^6b "&;BI^\|Vv_;)wrX@X(29ba EL讅l1E 34} הxc #>[K=g*ӸH)Ϊ ts$#if{7{Q+s٬g('ާ;7NN๹ jv|i- 9gx Kxl_L?~`׾MXgB=y2)ԁ-$O\`xw?B dYӐϜ0vwlwZw?h,]?LP Q1 V dw&[U4Mb6H~['̰Aɽn'rv$iDn H%@ok4xA XӫmSC0Uj}ǖã'iHPB@QHI׍AWχ@inJ/ⷲHcȳn iQ/?1m|h-^=?9M=ÚB`2%MՏYa򵌟&OI&}e d>-hOnE+f^\wsߵn~1*2`Y{)LT:W¬fr`qγ >V ]G?% YB'[Xsx{|A7.W'GLC^+ ؋Y3㢿/pFㇵrX k#@8ub(NJ/ڬ6& b_:OB^g^|?fD~¡9*;OEa3wc?upOk6\VN,Uy)T𭗯QA1~ lw Lܙ0"gυͨG!};2ު nd¯Q9(C;5 KWv78x%9,$7rRKE?LЌt1&&>6@284U iyw n 5,*YޚX~9-l',vkG}ȊY0g/کi71?~x8G?"aaE:Wf_]Ș>cm fN_U/~F6 &~AeɃǝ2}w?k;'i.y`wͷ~Ƚ_8)cβŭ2?X=.6UPUv~ )5ͼ 3nH8 g SHE@rRA‰6Dl+B@JhmJ@c6w5%w dtR=yD|_mCMm狣?¨Ş)w‰Ki06SЮ_/ܙ*j;𽳷t ~TaL u?>Lޅg.GS|;~z}F'ڍ_-m'f5ww<]=~M/Ѕӧ(`*@na +&%at>8`놸 `h^ @ sڅ4i];<Nz"D97 @ׁ39o_ hO a?SG?t1df˙>hrq,3k`j"Gߢ?S#Z_; lxw-an侞8ejGIB{o'.f~~~E(uݸq3g.˩[i٨k9i@+'?I+톡]v-G724|3FIbP/UN^@'| m+~tU-.χOO8u{nĻug>?1U; Y)zkd38Ca#̿u.CIN2Lz^1tw& cӖ?j]ALpt{9?cĵV?9w;|~~y~N=oMSE9iTFS/ljAtk=}/Vו'Ov} _x}ll"ƾ(/zSih 70@\򠼻@q#:[ΐt}FX.:}N .?  6DضfJX>9Nߜ \\Рà 9#i.W΄Y.aޜ~C?|pa-df 5dQqpI`АMQ[SəRz{6ysH@T m;_+o3}g}ȃ6iߦ]s#N#f~K=CZiz鷻^fƦ鷿g8_z~V,dq So{_E1mG'`0}:ȋ$|nS<:=P/LfŏpYW*^o'nTޝ;0Ty "Sؑ ֯\pHWp^ 7yUpР ô<ehj8ChA<%LO•[s)ٳ7MWkLC]70|Kgߤ&O߁j BC>O;榞~7bI˖^z_=7㶻aяZUc@sÆу4Gl# Ǐ67m{C֝GB6?R\N T cӎ{JyiJgg{;ᑃaOݙGc 3sAv'P=njwNtI<}wvF7<,!, bZFCS`Er^=\pH4 3- ~m;rVI?3?}Q븵H mUOqοߨܵrFnme-PsO-D'ظ {JyGE+s =tOxdvՑ\jozFpZ9).g Wgu4:Bǎf?ѲUfY%\,m܊n,i_ w֟Y}+@Kܡ~@1bG{C_p&k<<4=?*G^ncǹZz깓^eoO56njv7011Oj|3,Xw/,,vw*Oe/~UPbrEKկ~5n  w i~>|HtΖ )M#I ī+N 㦄g+:2uފtӺteI,V DVw뙗ߥBxpSJK#\`ޟ:?A y1o̿2ΌIHx]|p(< A@לּ0 @ǽL?/m|q%꼆9]55vȠ hYLi4Ȯ|YOxwf7 Ǚ )wsۍy=RύLҨ?u#-jKp|;qc, ؗۿ/8weq TקX};O>=E&!JA -Z4^FÃ>h:<ޡ۴.MH0X\mL/e"μ=^jnqҴOQcwk#K;t6{iQ ͻ{xq=upW?LMN/;%-gAO}|;7 @gS[&@, !HxԖh5h3Z)#<B033S!pN0N0t:i#4C㛇w N_y cẦo齦~[Þ~Vij䖎QO)3V$L9~o8St=4ȝ]hZoRN X-4}}P4zF/ ~7ƿY?^NSFnxlTkS0#wLº?6ån^k&7 ;qݝ4=]ўS[Ÿ -s=߸qI"ܟJ+̟#I!0tD^[WwŒ:2F܀qbxPQ^ B9A& d#bJIxn!Ic{9Hpi K8#x<O7LNy7Sv:y|@a$ |Py Yp2ܑjh-1'ɴC%K;vYrle60̃f3%𦝀;y؞~M+/.)ol$\wƄiGNsӭQ` Dޑ6ui(n7=ef OG!wuy5Qtƶ; }mA?wDH Q&оXCxީ'7?5">wt Q!,pHx)L pa@6¡p>i:IZ[ )/e-a: /&G^v%!N9&^o@U;CN=/(ճxf(sUwǸ1B_w)f; |i;Ak# NxcMIO<}yXOSFi[O:{/Q>pw yu O_&n% ģtxK|MzҤbfVVo||#C>VGiX` %e\LyH‚W{x2Pp?/ K ߊ+SN-}C{o.^UTE3k+__ݓ=fL' /TΔ`bh0hҮ ' %!1}s @%/ Nl(6"I8D ? A0щ A ,b7qDxBMsp"|SZB4 V]kpoEQ CuNdd8Qo1\q( H 0AA֖͑hwЀp/[ s7ñƿ> 9lyhHp/ڒv}0;qy!σ7}>>Sgģ#_'(}:7#c(}ä9[Iy8_;xEoCY=]$tyȇ4a)Gunġ:e}M)7aq# ڈ2GԐmok>[GYyܳώo0Z bsQTH##?ACzǥc.\w攌8džCmu2_NӀN 4a#,8@ a0@ w`DX 8&HF~0Z!^N|H?!qS씏psNXyx'#+Bho0teѡFCao8JznQ՘w=wFoeן+>8,G# ̀nE?{60p޽/;HvqKX}hgڐoϗ~F\nۖoڟz",S{:IxOp|Mnl(3PRvps"ŝ{Ό;CCM‚ ,/y0\^灅1v?wæ7n!fpQ?zy7ڗyxۧ/ݎ6nwٙ66wx'0>>ѧ;p'<@Z`lq'-’mH]䇁uaN>.8X쇼x0ԇr8<>܏p4$_ww6e>. S/򧞄'xٰݐ?e#=S^NؐGj'a=ݨyQ|#em^9/kvܹ*5p0y?2 d\?ߋfoj[NH'O}*hnTrA<SqSF i[Z& ȍyѝPxTǑw¸swO"A`tAN wt1|c(S[6sܢ%?sXF R#efE{_?]#wޙ)-x1ZxLt}2<~HQ4d} ;9pXp1mMa4qa~ġO!vjۙ3x;t f77#eKϸQ! x?>#MXq~S=*xAx<n$?r;^wa{ڤ ˴ q)y:oFCp~c ._xO~*Y(36__??kMS&o &Z4# Ng Q\?~tH#Sd[F4Κ@94ML -Ƒ}= D 7 14Jta,`{#$XWC&iy] jqk$.~- ﯳߺ:; wt^!cna2~3q#XCavp'L~à #mD ?/}ot#-9aCHܼumA:FFz뗸 Gk 43OD \:p^/-Oï C ?;ߨH(%HAx ߵÅ<\Kqx;t7 6,Kpw ^fOpࡰRfԖԇ+z-Ԃ?/`ц3 8̘>x00t?K_?mT #SO%}ɔ//?u=)3&hF:"yT.[K]v :xF$Y5姃ӱ!tnHeōlj&Q TNlb^oǵf̛ 5Ĝ _  >F0>^.'8η,BPnq3Q|#S^LKΗ `nT׼ _~ڛcvߐNc鈴mDl`F[;F\?h|ڂktҡ?i`[33K%k~_,ƭnT]P ]H ,Rn)YZ7ʵcmQ}F؀34J9duCwVU^6 @,// vѮilbUW74 [HHtQHLPX` ,Q,$!4!/_>wG9I/߻'ƹ}gV/sʒx\.`{\

^R܃aSͥx9TՑcC5ĝEHӒʸM8T<'@thF%tW9'OhN T]vݢ_6r\=J{ I#w*.ʞӯJ~1%.x(3܁3 |o~ZC C[Y 7.aZ-Q/OF0JxGMYA:xƬ^M㤎iO^r4QiaHzĶn%ˇMbqa˃[ "E|I+i)- e}"Pʇ>E?$5<?0Ŷ;Z%4 8ʳ @w=x:p?8 Ǟ~1sL7'?ϪpVr@K#-^B2MIوi5(څSM0J[˓֍B:87m"\}:SL*醹0@H :^ؚ #\FIQg~ꔶr8d90uQ!Jt6>1R:9xwCw73"dvּ24bӇpP: )D\fw h~)wALC%D Jx|s b͈'fǏi_420_D&?a@7!888v1gꜶ<3v {MRO}:?x0L3P0:ǦN%Y?} LNLJQ%F2e[)䁼LLu TF6@hSJS DXS%cU-.oڝB$}rBB#9 4;nkX̨IV]Q+X RQ׵ᠫt!-FdޠTV\Ͻ5Tv jCkq--i\-0!!仮ի她dxFג!UurHǨ.,HLk:YP:۾!Z\T|zvFHa+_VfB0!Jt:^iLɹ%1m;VPPbl$e;` Kک+ֲRGaUưLJHm.yd]O' dv,K[Q>>F tJɂ>)i z JҜZ#$sɏI[SVɬ6CVIxh(\БAV$0(a:iN\ᗪJNg<9jPrNUwڤ[ gB_r4qt|JH]J+/QpaILf+7$l,KN1\s,L"aWjj:C" }EQ /O(~1}S­TZ`y;~z&/hB9M:<}>b\I>2Y@k{5ک<]N4С/AC*pZe]V?!MYE V$ۣ4 Y:co5ﻯ^,ß_}SjڟLP|٪hD<[C]0= ~mExY3;YIHi0jWɒtrͻKb 舺D!lY#/ Ku4i \*h8A0gb+ID^fġ}"_n?V_H+d^$4YM*@@ku~],1#08ꨤyJ}TĠ5jxf5QH80E7CIYO$ h&Q@4RAy(!dn@rd4e(j7Gi4*T[րJ+BQ_rAbQIlTA?JQdX2ZgQ~Cb0چUA{:ݾ(t4+ axL{GBr[`oQBڥDcBJW6(/I_>]YD&1n(#.!8(}J OAiTC X tE1I# !ʆ>N~+&v(CJS]X7ȴb%1̚ͽ s<2hh0яM0"VyIw:h4Յ^š [phq.7nJ315 CLkguigN.-80Yp=q│{zr'FƇu#OXg:!0,Q_ThDPCkOYMNvQ:}\*r- Lb*6J l?ۧ ^8["C+Bnf_˘t4-6._i L`mHn0ȄYg2m2=sFH߿G-I0šLM3XzȧيC6G˻+*˷K'äg @mjiY)⿓~ꧬ]h*y' O?T/{ޏhMSjYGߍS<Ĩ ̟)]w)Vfh ۍ\&Z\C(B f8`Rl:2^Wd88eac <|݁ ~0" |ꪒ}S:Ek ᝐE#\QVBF\Py<\2:\? Xѷ8<iN&L'e楙{C`"qڦ?&mF2:i̳bw! pr bn᎙[+dIx ٵ 0 4iC1f)4p=scyZ(J}.°F}ZG0qY:q[aI|I #z[ՔN('> v/(4>&2 S/,k?\}U| &'yV#%G oUiH`rT0}ߙ?GC_]t_ek(0,\ٔQBP(մ&Myݛ–رEL<,KdXq DD3:UX8 #i|FrJu(}-P`v8b]laXuc/* 8E06sxyo~b@?C8q|´{Mvx\XX3uށ xZDb<˩>ထ͢6>>1[1/ÆC>#͂%?Ny?iaOp/i#89ud^h輁ҷ{;XIZ&1!1Wͦ/˜ŲOear @Gqlyǭ#d,eV_o}vT!$,&:JzO1g"UVVĴ'U'_QCK?3k~~r\DROLPB\yP3mKF xo'-)KXGq&SM 1@ j _  !|`q8?|w~w axWA_owW/@|qw{'6i0[x(Ce!6>%=\s5UW]eibҏσ?y0pqx iapw~4?ǰC`9Ҝ> y,iI^Tf??, Eq&[[v :Ѱݛ{Gv],6gwb:G#;Ϣ?@x=E }*ё}1wl[ìk;KYgJBf~zusc]#ϸ]6JѦw-I~ϣ䷒ hE*kV՝m0\ys:xTǭlb9cBrҐ4aݹ'λ@ Tt:#Zqg g\{y7ͽ[6lfob;#߁qwǑ.v'^oOlp2' Cx=v??=kH2g+*~Ffg*("p4'drA IZQ 뼂o{+i9?"Q?͒4_U똵5E#Mw<#l^Ou 7l2wqx'\sM)Lg{1__"[9uר55_E )A,!4|%`QUʪ.IH97#L=fcAgln{n;nv& !t[6J9^nؽFoKq:ͨ^77p/?G2-` 0[ҵ?s>G5#u0>;x)wҶWUIC__'wyg?'cN5@Oh3́]2ZzX7oճb3dYI">R d B3 ļǂR~Zbx`b8wwm(,l,X~;,lnw懛?ziix7;1s'>at@/-/~_C4ϵ~_ߙMf._g\Xth]Q%7ShtX@jV4_xnܳ.[}^H+✛ "U˺ZxzgwQ=YJӋ=&bkۯ}&~o{mqx[^2{'v˾{\{yˎ:LHQ&d|JUa^:я}?16>lkz?2>%EgCֽ0y0Lo {C!@wob;p86Yj?V,%?NK2Zr]i6k`؟ ?6mwq7ono1L7,!nҍl[Y腻aۙwK^(T/OǠoQ?ɵG=z?7 G =Bjgn ܰ=Db7aM:i7±p l=P?04ć[ԁm*ke\~'ӷݸ\&n'v2yڽ3@vs\? ݩk1旆)8\wx&/ l-9}7!@(~b&<PR?g/0_9ݏqp/r߫Ut,kW~sW4߾}ǝ6dE]DdӐ e; D'!h!0F]_CHV,;`R=eg707Ǚ=.7`\ݙ'nYӻ ݿw~{q߳a0RwsK ^ytxiNoH0j;ѵq2WY0~4$GZs?{י?IaMX]N6xы^*#jZbߙkK:̆¸N,Z٠Яۭ cv"SyaьN8[_!]Z+'g3Nngr`i .ϩob7guPq{\|nݍci{7O<8: 6awǁ_n6~8nx ;~s#i6 `FҗsMB~>j\ l4 ϯe!63~-%k _T=y[; spřxuvOާQdkttfacH+bN,&?!.!8g#A eJ4<p˓_6n'1vw pKbCZ ?(X‚UpaůT*%'ij؋l臦86CTF:,죭 [޽(c eD;q4 ^_|u@'ehG]l ph>ܨ?C~nx ;~s_eAzWGsٯ~ɑ#k_ncʫ_C5ڤ- 뭷6gʞ4fDFQۉA²44t=% /c: 0%N1D4b|1#q' BP1,~tbIx( NlҁnAiv<<j0!qs,L8|(Hri#-L T$Hb'՞/_$:yb6:(YYź̈mL)ʛAP.2Mb?u76ʚd8+vO]FG@{x|!>δ;}叼mGz'}^x#~wÎ},~Fcv/ k?V^z`>wDRhx-q믿% v -7K zq9x#tbX& T  ^,  s@pBH(!Z݅`sFN6\D >%=^v0j1Nx!N-B=ptolp4@;~q? i$͔xRYr2:RJ<{oF E}{t LdDڞai}T2<]>(Cʅrxw֦ig ,8/ uBǍ6D|MAapaIu 'Nٳg 7p& Ҧ׻aL?C3v rݥߚ.cb{ƕ'&KYzi7QFؔ1ze\<.7mڠǍ?x܀Y0' ^wcD+ '{N.# цhab_? aǏ>vdυ?'Y2?2Z/,=߼Ye>:(OY#u?b?Ҥ @[91dU}UΩ"w)m<s)M+M%uܩ+;e[\ʴ .:㦾+48pw=oGVyCsw!@D{ %2^@tg77w{ aǏ>IIݷk?[GhӑHAu@遇|7[\绨<9q??M9#K#ٍ:yK ^V;ISqUkG1A = t,5D 6X!^:~Np# NpOpi:i# v;vʁ'kPnN\}K S<ꔜ_%\244xy]g|6p}0x~te߉ݰl$%uVyZo/=7~fiES 'ڟJ;_siKf!ɡveQ1B@adب$!Rh19QpnĻVpNL;QҏJt# `Cx']в8Czq?ϋ7FqhuB59%'WJ z"?<wGOiɩ\a^_R/lM `wpu Q/ L_pC qwoO||cbw15Wُ/}e}{ě:}JM ՛?t0Kmk Qw+絥*\U2ϔ;޲G#yw%-3u8򨿫/ߗi zelR'{}oTl㝧„ǫ@5?O?4x< av3\6/u7ч]#U?gοa̿M8~q̟ڿ+Wy_&v2w5J`v 09SUaPA3eLSM X #9p`[ZEpga6IZ҉\*.C\I/b7Nmf~< 8=qU##aG\2˪xe-9ߟw@>)lr 0xG:030u*9gZ]0S@jesߵh  兊 8?F.#$W|0g5N}OBɤb8}ؾ. [ g9I75_{'N61P~twv6(eS!ܯwp Yp3jmR(w=J&D>D FgT 3@ոM !"3ʅ42cp LY=$?4m2F9+ *|C%@T*V.NȢǡdӹsɹA>(b ~T̪\6ʙd驯'ˋ:o/rw5@R%?t2b0eeLP0l? |zIG*mQAȇ!& jYÇbL[U:C-~\.ÂFFjFh􍹹ɓFZf&'&Ό@x~6!h'L'o x1,i:WG7rIwS?Q9a >.=QN)fʩ')_\6F4ho!p.H̀}釤|L@4tHJEjk: [Lb0GSi4k %ity͜*& i1TQ|Z-I3W9MMkg Hz",+jCY]:q㿴"Jb0vEjJʴsI8U~ւ=;tXfKgqMRYQ6C~ZWYRJZ]Y2Q h)8?Ԓ wW&9C0N ^hfW0MgQG+_16Fg7P/s?+# IMV1\XyuEg8ӆ؀GxhUFJLeV(po2jQZ^g7 Lo1\W Fz #_ƅZ#0a1&ZC8y_\U1(ÊV4<,-k_r'~@Zt6<'!!":, &QFᇼ/HhpZHŘ`G2U\JIC *BftEq1'{IE3珰ye{P'礮Z64$ZV&'uSI\SY%5ȚK0#TꫨwvbR$c-ؒ˪"$+uH*{Kh_O蔿E1aRZGI FUy%mExnhgڗGE@ObT x ;~He'u[ 8qplM+0#$B>>=q~? T#,GS|nr`eJä\ső襜I9C0tN:PKDtl`jή~88zp=㌺pqQB=ns:^ZB%g3fW%K Sa_ve`.A YZ/S'g/$hZ ?sErznE i4oit Q*M1mWS.A5O;nGsP9SֱY] lR!0 g2w m+|#@40Cora1c=R+ ߁}:Q@ heCrzzbwÎ"'f}?##7*m^QbH< h33`r` Z$;&f&PQ fB1t~Ac4 ~B@8m$*E!7w6a?v8 pLmq\#dߞ  0i F6!|!"f @1QLj85~~|I_;!v'KɃzY0H*Qˆ4{{SZ8vPWLIJ=hEi<2u0GlcdF֠خI*0k "(IB A໸=,& bGڰSVjtpyp089 vsw/mq?zv6pYnf8?$C{ Y$ ّwS+ ߜgk>e@QO bub v10d>}~XaMj^1)ҫ̷}f,MpP-uFqT0 }bw'LJĢNj{QE%1%-{&saЏ$-6@)2)& M -6'>rpQxs c [DtB[症] ;~o5.hku>xu{:T?>gc`?g==s;Hqs%$}sĪ pN k_9Vٷ_R毕EQrG߰`p1W&?>,j|iDϦ>"a`8a雝]lynHO֬#=#,T$[}ҝ5qEn-}{;C0>rt?׿] _-̎ssO&TH!aEwߝ_+r ~D=뒟ٟ5Gcӵ ]  ÅgDP t\?*0 !S[+uY?s wg51}4a-@)ogˉ8NB}y=y!{UWDÉŘ}ַeGf篓k uV5Y;̿DTVeq+;oH{/6 jڛNoєU)e9vHi4=6@.@e Nvx˺gc8,0}w Is~N L+lݰnSG̟uo'ft=<׼b.#9o攬OA8쳟l=j*SF8Zx HL6d毓 h :9"ȿ;K@e gS_6#+%';#s_i`W\=\ud8Ԭ:ׅC|[Bab<񻵕(L GaHiepolwÎ"Hݷڡufm4.=׾ss4q濨t!?\y=Sq0F,i~KɃOXi;<7i83c6&<p K K7EjN ,y,97@.ynOq3`=?o,v?[? [.$M={@ቧ3o0b_qݙœb5!IۖӺ'c +?y!)ZVjW^Yҗݬɂƙ?W|::wo.B:$\{vs>#ltAƆ/7)TVU֎'H}:&sǤ 6 n5 1oQ}yLt}B? Tn9WۘvSߩü/4w]\gaJ_8-RETg D9ЦK@kzPno|69Ľv AdՆՍ}XEE'SC~M}QgƏ+LH`TC'0NT֋ZQ{Mҳ^:6I[+ [r^A?Tr6-Œnkԋ"0\WמSd,/%#úY[iyNl1 Ii+!ڨ bzձ1H'}m1x¹aVO.k!3~awq¥q}a㗹`EmBK+}bxۆGtsC̟ws柭 }jIQ6/}i9scO;5!$ D+,i+`}eB{0bEG6ҏJ7;el{xʙ3)sV eAyɇm_m5'~[?m+S(Og'g* @(/9rBK"<1Vf .,F8RgbDw jh"µ2B[.M0Ԕqns2jHj:[l_naLU_xr" CEMm#b-a(E;~f+Ml?[jqJ'L )Ia!Z>Im2O+ XP;ȇ7ڐ0~ -!=~Zqe62X2/5/$=[o훿%DHhb?#(OrW`  Ƴ;30ؚ a<캶+`.u^B' SW'\-Q"H6;1$I'cVGjtR?[t/s0q;-X,2Q?S#j0N;X65ds_߶ikeDÚ~S7?@ TYSr: C~<>KHS̿46 1dLт[n;>**WYǨWR-o>*jrOe-ڹՙZd zaNn^6^y`%#?73cK~cdtĶ\Nh/o/.VA#ɲ|1T)A VÇ >l U9€VY(/$jӰz}ʟ:pA k:6K5ƚ)CSwܾLpufOC!#Tbh3Z@{D>5u =m͚;Bp?>KoU{~Jq5wİYLAjjpϿ|;>s?x'e.?(M6?WQx@JrWU*n,dM75~ȟ1h^y#o`퓶1*°( !A`o*hwvn^a&Qs[Fũ&T `llFEۣ:A+0BJ=mFNڀCW=)&xaքMn9/XEP5% LXن 7S2oy3(oo}#ԧ?L ȟ<6}67rٮ0&Gd ZF>&;0@FJC#0Q2b!59LQ8dxqR=Ax1cJ̜v:$+Ý@,#&/V@yGc Dy8ۼFu0|yZH~1  FBp0M:.fQn\Uho`ޮUB48}jK03 ZGg!vH~Dq uÇ1P0'hlm[!08v%W}J7xb6x[aڮ16U{E47?O+KJ/G;CCoir< +A09Yvb!vq@.}h8A Q<<8 ؄G-w- 2/KPzlgD^Lz}ėnLDY@m qӁ-7Ý1'6fMnoh4c*Baz H>L`;8J+ "m6pn pe$O{2#&@I1S0hB I7 =ۗavo҃Pm4W´p[eajISwU}a赯}6eg&mSR`.|]3f@ᄡ_8)@rnHΌjDYT'3FQЈl By'h0T0{lqZxB#X#%~M6!ΌPߢ%G c& s180 L|C8Ch 3K\^+Y⒣4i\å33 !SYiUu# i%h; m+w-%=aeFGN /G -ÌCX >÷5+ښ,-32_imԏ3M|+a>g sa~?Yf:3P8o^(9ڍ 239X2d&%PFR- &6AaDkLCute4z2 U57OlBma\0wYjU;jVTGG?EGNoAB/]V*Owࢁ1yȦa!4P0x`ZC0'^m6[}ć8L`#7ӞF GD fhՎݴ4bī05+Mi=$ıru׿/H#v_7Oާ\3 .K{ b18Q^< ˨gͨX@p 4WUWjl[mA`abuQ@k4 f@: ~T|Yb>Fj%!V*[EOKpڎNaʮmI6Z".bmA𦡒?Τ2mpΖࠪ|N*DC}-ûP.q9mV6 7- lk04 ҢԮ{իn{.הg.ė|?%U3 ܎txNr"BCb:9*As.iw5(NtD=p(,8 SEJ͘M0Ylx܌CjoJH@<3;c '&'e `,?"-b8{DuA:[;ʒi[g4UܨKbDaL qc'GmpW:|hy:][#E-f*nr}*7WaZ(=LL_it?Ҋ- +^g̟9̟MG XÐBQ?wE|IأYJ/":*YC"8=qB4'ZۤpԶBh8pxj/ d1V.}ݐyi c΄ڟ?[P Ȩ;Fj!8=]ؘ;~_`&jgn \O0@F-vIaےάF- (}zzZ-ጉɩ=ɼ3gm}[ %m!:g<+iSADďP; m6Oz'&&Mxh(oK9&yM;) kvJK MHF ſrСv3H>dsNӒ}eG>ӸڟmA#sF/ȗ\D221柆CFIT'Խ # A35P9팕"Fi1jz<QxLhaڨYFg#a!nvu.<,7DQc$855i mlearq8{IKItxè-NY7N9mR`\$6n`0/`.*BDЦH0dO;F>F&!J*3Qro5dK %T0e{RX 1;̕& @7b5K71SH*t+)G)EE\wCD8Ҳ4r`: H#.``hdž\MĹ<_AqLyP+KЗ h'](?}W 4 )Y:۽xWuO"1wi-Fh qn&QЃQD|k:}55 \w wibs/2CW1\$P )-ڗsz-"UTޅ41 ̡ul)k!־$Y ,uUL.BGh6m._1HZLꎂE=x`꽩6Yfz2\na\J ;h7 &ޜB?1ѣGKַ~_9shQM4.*|?NCBmnem5W@.F }j^$mBj Ԟpa?VOc}㊁;=_79#n\mkX\>zb_KęjvP&4 1ckQ]1BXKo |>ipJAceFޙ2@(=ɡ v daB)=2ʧ &_ꪫ<_4t3Xv#$?%dNyGP<9Wa\&|1ɇ>|S2Ɖ0+%ins*0>x[tEo&XRA8\\6x@2B5g1$ aZSG0<=Hsv Pn4]Fnƚ> Xe{CO% h@}OE# b>.|`/W w>}nZ&K`6SڰV1܄IݙvNd?F2ԥe'?}nM 7GGVO%C9_ڰȎvJǏOs:Ѵ>{(`n@Ƶo..h1(Y 8sϿ!HCy&ʳ7M-P g=B;E7kg"=MuRa33c2#i!Z^{ ~0I;қ7@mpiR)ڿ>_k'Z< %(qTkiz,loOt\S0p:7uf&l1mLdRaOxh휅aKQu6`o0ca"ɔPX Qvsbowڤ7Hv&|LVZX780J( L^~[̿+IF^p'o|- Lv}[Z+3 W Z_Rɭ~-oym0 W1G=b6ɝwi1SNK /U3g%1|: B: 3ן7Fdk7e=w;EZ&%FW}!soճ^LPi671{Ǧmoΐ-NcW7+zw<؞XKbVh [ZC`aIv?u1δќ5m_s<|?0'_'㎦. w41^֖/\b.0=}CjF@ |E" fAO-gCv$Jڭ5i٭&ӫ-{Re|>?l-(>kCƶ1 bAl=qá Ϲw=_~׻X;bnsV c["&v&!hJsE3JǠ/ja  L hNB<Km7J@_*rvPM(űbx=M¨Ym\̨s6㌱<~[:F&[Q@9XN64i 5ooEw]{Kj;/`ڟ̟A ̟}aL0mI^>xrs@S8|;p !kma4E߃ ~F8 iy>ޤqi}(3_w7&u+.L􊽑:#n:?N@~^)q7>qϢ`ܖM3I:%P501Ծ\(5m~2,aΟh#|_ZM~%LC'9 pBLDx<+#*FP#ZU}k Af58XQ+ QQ\knR31j~Թ{+@kB uІo!>^`!ɏ85 ?~Czf<5=0d^;mv5woXG#YtAq\.򕯼}ߙ?jW;gԏ4mVlq5IDATWs毒͆J 6T\;6d^{%&ZKv(2ZFP3v=MOO[bmjK@g%hu@1c`\j/6C Θ+t˂a*8hRnr\rZ֍\[T0EfL_~nۂ9 {Y@V>=C0…JE[yl%.cwpf  g Ёt?Bv. #Ctӧg[xW%̞S{b?V3.W%EML4ψnj^}նi!dn%@.D:%F#S9I=cX=[.Z5q+BBl2Ri?;觎.ڽc]FTdQ>}!a6gIc aȻd[MZ\' 0Jj96RJN&kaFZ0Z3goS"Z+ۥ>9?7h P"v0B1F:9ƈ d|@e; V~Yos|txn\VDvKVS3 ʇhdTUI5;q8U-ٓ.yFJ)j2;7<|2+7-+ZE#E ` PGꔚ;Ɏ*߬z~4Жw7 o| 56ǃd  >=`f5c4FȌ5BFrzrsɊʬʪvuWylj7q(Т~]2X84e'?+YtΣE*Riݷ.mPz{Ќ>rS^TPuFi#KjqElvYGpx{}8%5zL/)q(ˆ"xi)cFf^ #uW=}%)e/r߫r*_K-tYW ـ E<(>ԝzku@Є)WK49 PU8 !X>} [ڊd r,]ah8m-Oy sSAASƞ]3e#]698eJ-nַӇ*VeՄxAI5>MMOeS5y<t6)r^*Z:uwVn 1#*NT(ϼF#' Cff=]h>%Ep0&G ]XZ`zاpx7+/R` 6-DZK1q)A<=Epx-1e϶u&-Cy~#A"?Y;Lg(}(!-q!}?F^Wh/#RR ±௿7i_m-'9s計WoH[>-5s`QBOHyWHd;$yHنD23ٞ2mZaŊïȐ$yS2PmIBȁۡMOtKaHzf(]lBX`!)dsN+쿀PfUx02˶<>C܂"=p,x\O@OpӧK(J.8"BXE2ܹ[WEaX.AB1d>Ad:5pFJIleV$4,:7hh fGe ܡVܼ</̴~S®~ȏOg*C| 2`! uf vrkEcXn rΝs _A)8{5mmـ<ʧ(tΊsvrMUfHXX͖(Gq ox3#/XFݡ/9((Ng`i4:}ڼ.r+/wФЂ'Xh&߯MF_3~*og=?#L=& ;<:=±q+\a`I9^u 1+ivȊD;(e,>'`߶M r@e `E|2E"0K/,I o);yz\?ZgAGIcJ½^]. PFGS?Q~ő7fg$oC@+xPMYc;E2k_A H-V3.k 1ϼ-gLY'8&BhQh [-Z]@@/{@x;Z }6Gغ @XUU d,V>u1|:H{OYPH W Z-Rgp=Np!e-s,ܣP~ 3k+V@ J}\$EZ^F# 0NHSN#4r0AG:`qgV9)k3y@:X3 |i4E~B~ۀS&,rL^̵$PGvs*CsوH.sO)n8 *XAK( ]V%LX4m8БYBroX.uNa9H@3#8X sN&LKt>];' c-:ʀ> ,DӖ6Z!jEeHfhX[bKHucNɗc?7H^p<$eɇzB@"-u΀>vI>ř}W@0`T5 8ټ<|.e(OrO)ZIa hC{Pӷ{'-t_[`%?墼s[{mݬ}Qֺu]gϜ:ų{|u>r'羣硪gcX嶝 7ɁP 2 E2g*@ A@4^[l<9#@ ضfў(#I< +!P! Yk^u%Av@k$7+emx5h@QXcƃ᳁@9A\ޒ͂x(!~nӭ zPUP ˮE\aDPgځʞHKhWg6/Zs O ݴ3Yvw>+?>v!2`pЮ(L϶>8_{?-J7( |_}?,1#r aG1^Ezg~kf ?}#;x)Uvm1!]K#`J`Vٳ pYnHCBs[8¼^Xxo* Dg0%=2 Frբs&3KP{VoИH-V Ȣ?رmG^^🼰N:m{rv "3P>/QJNHعS{+(+PĜ_ Sy>T\\^W6@Fsh喷 PrԔh,xV2T˼NwŅ;rPNLzhExɯQNNjL|#e+Esc4%&hPwxEaZqMHOy[6a?\E{R0ge(y:rA{ڢPF)tF |D!eqv[](#6Gݯ?p6*8w_o?V?_T-JQf࿠4*Mm٥́PZp|m} _N8-Cp` >SY]\.Z6aU `ʠ.;mudY\,2-ؔ 4;1=X־`jnZYK;5!X9k(+Ŝ7FXJ,Akrnh:cw<(Cњ{_n [e~(ĬVW{s##`sF}yn\v>ƢJYY%\HOV41]-E+;=4)|bM^LB&A)93gG- 7!pҎ2ۭzg#ssjr6xcN)G QR &s *J:h~*[UN-rњ K3vN<4¹QmpnFufzo>o53uei)vCꥶRV$kWŴ]'gmtԎ<\&xok'Kθ灍Ebϻ18-)buSTNx)ҙ,-[]W9fԋk,A- S!el 3_#iU)WO8oYG]@)^է$M~b_ p 6،`)4WZ\Y+:$w*:J=; %@\VDX_X$ֲځDؽaБPT^SW$bzЦq ME|{.)r4Ux|,ay8ELx @KGmBX^9ۿvq`X6 ɵ|ݲ 8YF($jb#(V$YhSKZO{~\[ލa'?W ?E4X:;Qһy-x鷿/g-0,,~R*m=/(RN@(-X ًmgo^-Eo$$Ox\kaR%0@ ѥ~r4ާ K]U!IGT ǰes0C]6VN!VJyuO셾;3(uRӇyG7Fa߮0k+J<8g,cǎZ (na+^+gEk7gzB~G>_5h S??JM++ v\PneKfHdz͸HROPiz`4p%b-IcϮxx (.q_]~ŁT|Lws]8ϑPx5(e-2C΋>:t?|)lscMEx|Rt H@XX!TQqA3~7lRT@]nޭ譔W^;f%M?/˟f N˯}mGO|SOԦB,2ϰ?šp5Bhy !8c~vzZj8kuW$H,,U>%g=`\ozeZBFVƟuKܧ y{1HڊFuw_ާ]-G4}0}? @7H/Yf!8c\#qV@ol_:!y՝h[]H]. .=5{g}oJ?tG㈇Gzv/˾KR麷rw}{QiI7OWa{@(lBx,I`YV~Sm%"@T/A(+1.j̟ZDBh i |PSU>Sշ-Ǿ_/xOWǜҨqyp ŦN\S/z25YyW[1Qv&!Ɂ֫K1^z^U&}έ"槮dׅ/VnOiv|s]~Ōpr:=Q/$Pذ/"r ?CAE뚔mm'qTFI<g-?,\uUڡG_;~HsCn]* @ iEj|fs.l"sҐ+5dtWJ T…2a7zEm?MT^1yn:Qo:kS(s=?.?r0р  (B?sXRK[EȥF\\??XcH?ԇH]jm)yיgltܳYR%˦뮋r~O)̲z<' >WyϼviK/4}O(4 wB?/s>䟂?2*B> w)q`(&.9[;910$R7ǵ;us\U"E `M@K{@Yi\W/Oφ:.*,{h;vޞ}Lo?}gCCvS_e}yV/js?Ge75~.%lz@M{ id1ãW%T8pEÅh*%տ!"ZPB4 R+ïG?ǎ},U:XُVr,@6o6UPGyl+lJ@UqeͧRe O}{^!Qg( t~9\+a^oΧ m7h[T]h_>yȐ00Ct!uʤ< * U8CuA-7 \FْY?J#GT??D;~15ܥǁM`-KZ6΍us=l@GOq*lquZ8~}Jes;'|@׀Q ~|}Syp Y,#']r,g~='?ICyCQvma_?g?@_@4̽^Ka`ngmGL>9\wMQ2]|']ǚ #\ \t#!ibGu-FЃܳ`? SO)mlnYǽ@FgWxgog(xS ~d LpKN&`trg:? wPaD#* V吏9לCȬ>b)qڜش :&pB@b@ށ(u}A`4=(&ʄ!}0z"⾾>P xl5)Kc_jO:ՇmsN8[O\ ֏)C: } 􄥊ɻ7ymÁwgoF[ Lؔ#htVx{cm?66| ޫ.(|0fo:g¼q쵾,z*ha~?D?, Y9Gh4z?g>S!>-]GR58ίqA`׀=N> EXw}w:s، F'f*o.?\ PǮz"@2n~\7:8t45`/68޹ovue󚟏~/8MɁMtl4cǎeЇJǏ;w X#'ʀ7z3po/S5?_y]χ :tA˟^fD5.7Ҍ:89;siSп[{믿J2 @ڨ)pΝ=P'^|4z6U,@?8tMonv?hԳ3#qpʁ-ChSOe]v)7J|xٳg3}=4s|׮]M+(}wQ⫿Yv7fw6w:X8}/X?Ty*R#oxx8{gM(5ъcXG߻wow˧C}h>vEVUqC xVvVi?8h΁AC~6סÅpzlyٱaA98h΁(iuΉ8s &|i&LD328s`5}\בrr l .зD3G%7lpD@p 8و#@p 86l#@p 8l4B@p 8`Y@p`9 F@ @(28́P6"@p 8lBG@p 8h-@p`8 0= Fs}&WIENDB`icnV coq-8.15.0/ide/coqide/MacOS/coqide.icns000066400000000000000000011757501417001151100174670ustar00rootroot00000000000000icnsTOC `ic08pzic10Jic13pzic09ic07>xil32l8mkic11is32s8mkic14ic08pzPNG  IHDR\rf AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs  @IDATx}\U;wiF`ChX]}>|Q}>*vEA4;!=n6lz3mnd72{Nrܹss~_;0t-pߑdz9: s@?% K^,-;u#MF}Mܸqz]evm0JxgL<Ooݺ53Ϥq2XS`8)@ %طmC߰aw…n>ыd뮻EMc&R@@84w'q>zOaQ艹VS]U VTVܖpe;xq{cKݞj[{<|F(8XS2o$_h` q>+W9):2^W?vv]mI2 o&Rɔ+IWSF"f]T{Cm/msx͆lۆboj|?PyT3c3pY8I_}V=Zuft\=cY)ɮnk=̺u"HF@S!@ Yf)8hTh&'sOJ>Ӧ D2$y:v]vC>.( Mc3HߟN2P`6qA` nl^ڝnD!QDmڀf h b  Yjg<l:Ҋx"wRۄUoy, #Τ?>~;AO G=-t2I 0+zC۟}q_6vD(Ed{vf h R;;7O^>R kݻKw=ئ`ڎ1c ')z>"%~<$=`bSWPSUI6nEf&gEo}t7}n3PH/hB݆@b Њ@<>N/tYF.X<pM t dG dZNa `pR$P-~ h7aF9N ApZb6XS"Wwߛu_lzvpSҰ71=_~_c, iNyО; ը̸*j+T۵k(Tw`S&//~ݧOM>`"RGǻGO!^Bh&)F54:@h1#Ab}f'90O>=kڱj+Pv8:?p|')RX$#gלK].UHi ЌD( j Y98ȧqV:ۍ4jWOV@ Ddw\pIcN@W%hB248NmWrA c^N306́{?m~b?4.[h26}o:6aT H@h&"S WW?yiIى`>, PAiTq0M1Lx͜7TRj\ȊClOwL РX}X2X z1N;W6(|ݗ  ǒu= \/~}N R9<nG岼F:!(AO(V_rh +U͞4@S @v"z D\E1=&ue`4 ͨ4ar|@k$TrCs= ڵ>o cb-a7p_?dF?|lAY/F?}5詸,}dʡGyOc,-օ@Ih$?PA3\ӹCnQnF[2,_p]?5Fs"qqqe!|X؜^@/ipLVGP@3"7O.JYSID`ᰔ#ER5AL.K_F%9D/X0v @,/lsT37o*!$W&%n:85ؾxQfT,P;Jd"x>ǨgFs1dE\|% ( @_phKt<,erղXS85|Aَzg9w@z25wU{ , }?%(( [ y7S^pC!QQ (ȵT>z |a nq2't,Ӛ8jaݪ6En bOunYq-$S$Eٖg R5I;M/ U {9ˣ|̋S4&Gؙzs;5c{=hPb18#}X:A"A8<>6(5Gu$Աf.۵N[dv*O؊:"5pTkl;pxOpL47#v-h3E"ZdE]Df=mM>J%369PDrm,¡cP,duGa;G?/;_Ux|>緼^3m( |0UQkxjƻ*L۾ɴMFPl,}*EIp=VG$޲p+NH_g zN`Hcw6,.O0/؇11GHp<;臔Ȕt4:zg%%Պ,?b6Ft&J:\G{'xKΛ=q1Y㧇Ʉ:)*4j!0Mw$_V=(sHSS * ;6h.qM'o:wb ;Nn06KX_GvƎ,\fIᩝlxM1r67Yƥ82AF "G )P J!:xK^sdkL=51X=tAR/9KW!Vy8 6)/(--$1K׌S*_rGkGw2|/ln0#i2P`>+`_i8. I; 0 bfVԃ]|~孙733EJ}2.#H`2 @ tkk+Ju3p9 _]z~pD,Yv9pHǻ4Xd̟LEDZ!74hy6fkg[tOAe[)A'ɂ H}y5L[70Az! |Ҙ"y7W7MT*mC/zZ yx%oasen;O3H&"z RMVܿtwӯ7G`/W"a+KB3M6ñd[ /32ob;tοeloC^" |KHN\)lds[D2 $=nvJf@8o`<lqd>[p|1&׍/Í(p,lqPc.dI7EdMz^X>D3ϛ}6sMW8ѷm]j=md){Sg!0!+Ǐ(o "R Lĩf8N/Ҵ0w\SC Op3 9FC3+jxGl0Aa3v"ץ?/\h|F%Sfʱ@ `^Qhop<Ӆ|BT P˿=Рa2b=V qJ/929qaRs}m\P"yN4[-pC_l?8cb_UzsDsy_K$ac ϶{51n~}sbzjEm\.Psc$".₞`K-.G ~H-~5h(~tda B1WKąD6p~SӲ+Oz`qME<ˆPvn6X6,fRV(PWp zg!Э( cX}>4sD^o.RsҟP C0)@)H@ƎX䑯II̲$ng=cua@ŞH8&3pX,~2.^ 3nwS/>j] p~o>3EPJ^1(D'͏xG˿Җ &V~S)Ps3]w(mgOD sR <ҌکDei0/桵a?f ?/_|}:84XD# dm~LwFc>\~=hs|~,'RڋӶ#wX}d*Q7k[0t~y0O;%VuPbTΤҰ P-&(H]7\O^wc?<5L2QzͶ >?}_=tg 629N˴_yi"ቃ* B>u/w( q'ceO4jb>OJGa$L Ё~㗞{*ʉ+HEKr髿|DsϜ~2嗌त+A0:E熆ƍս8Ǵ繾=d,*wZmSB?ƿs͇ۢ%! y\C4eG Q%LTP4vn[? %hr/#ĹQSݧVCN{xt8 8oaOFG4=DqUDsa>=>%V p=%z44 ޔs[O)L& |w<}Λ eWnW~Ϻ)j2Ո>DҟҞd:鯤0H@@^{-&]_=(cyؑJFyumzS^^nv"Maxgd'i2Aާ!R2 I'b `SMl\cᅠ7-G:⡀OKhJ`nf˴Jn4asԑ ,cZe˶g;|F4ɸN R3𘀡9?$>SRxժUc+&@b,X7={qg3wkgL2 #JB%Тd hdʹhpnFv1ƹ3p{cGGMn}MwyŬS[# }&=u14@5u2 .DC_³-o']I̧NIϛ y3;Jxk0OSwH&M;RJrҔd 0, N$$ sƶm|fJ,/SVs'm0u^K)JE8V)<k̜2A jFVh<ݸЦ;Xvʧ-_եl2ΒdH -0QDE6𶶶w/x)X4J]ni@ڴ'n JfL?1q9gЩ8mdr0oW͎7wM4qe‘+D,Ip.SAoT Ma,hB'&)J@@eL ,hJIuٺk疼ݏ?%҄8[RwQJHk.h UkH[x":7v`o*AAZfr 66n;{J_oЌȨo>6f |3tY^P} ?={v-?ҺgϼenK@FdP9(4 }$Kb%6D#?!{ i S,'``M3_x^Ç>wXz?\lSFR~ 68a <A3~T#%YfѢEhݭ5o|+5յh"r\miÖݛްV߲fbY$OEhYI3qǴ~e|1 <cjlL]dgA3-0fvg)mYMXpࡖ~:gKlXlQ,rHj1Uu+2U)a ;ae ޶/\uƝOGskaj>ԀXo;[?HE .b0vo_:sV&c{Z;6mٳw?D &@-4%jj_sKGg8,/)C3<\l`CR6l=d<{&GrM/{KUXj,:,0 2]]Q,n3o?=~ϢFH& ΌIIp",Cv)QҳLD=46L6CvsN Og[/#(LRe-̔0N'[ﮪu|vVwWgO&@'yO:4y#6+ap~AE%`#iiؕIӍ{G ;m(W6MWHI`!#`à$}y @ ,{q˒H;lrԕ{$& #r,i9C?~8e|'bv:bC+]Aj jjn7|(Mľ~%Ujm/|2w F3EWk'_Ip /3ؿ\0A؆{W~qcgTt Ώ.9ۏ5m'qߕ b3?QΞc.,- &>7}]^ÞjbP;(jM+)҃ iֻ vU0Y=`HBcK|SO\p4v3A\Q}!53eY13*LGaJFL%ťZ= P~jHmFjyz~oUUh؄8b3긒ʬo Qpk'~Co{ʌ! n:z8(;|Cd&fP_{^|80vH]՟k0VT|+7l`?+nyJa Ԑѕ_RЍqnlfuoxܓqj2dMN # i (g$k^{˷^ՕX<ÄAg'p ܹsy:`I;k4Ri;0zw8Kή4okPqK #XZ3+n<#3ȏr)5O~Wxie;& `J2Uxsr|L _1WCw[^ᄉ1S/iF2\z]VLB1ktΧ-Nv `pGS'y&|+MWl@g 746o<{ vbC'|߹RdrS'50W 9эtfNyYI ,FQ i;;W_ӗ`ovqeS 879]o/ͲG) nʍT2 6cƷI<.5sA ,`RNPfبIgcQ{ <+XDžO$J p?6/.G; \Uٷz9L TSUqF8c%| I <*L Ä3t\4X8F [q۟-+ yƛK dwbdqwm\jb(y傂>C휘dF"ם7kZ/CȴZrL>͟yj3OBFZ !A5E)gۺʲ)D2RK$RP [}ac KS4&@@|!hݴ#n㘾2>\<@+ SRy>=*M9a,?#ώҔPaS?CB%4B-h/P(ߝlPTLꢪ lvȅ[rp<߼cUk&+i,g`Mh[y \,힪.-wzD|=L @2[j\ h|;&$R0:;i *iz`0? ;h`q=?H6H_l{C>3QE&5%gX4 0Z \1,94`Ll8?͔QP }3cO'zBHY&NyF'-'B< jaZf`cVǞS $GPs]BcRh9/nu1f&ِ<:;#Q߉JYj+@2WádֻTeo<5Xo$+$U6^끆&O/&P EfmdV!? N@t[!(wP~Zftȧ@mMbh~mL?:3]\JDnO| Hʊr2+'EuuFpz}e0|hHOҐF 3H(R jJ9o]3eSS;3j*Έcb ,C:YH>2U 80ct? /:iڪ-}%A]~y6\5l놟+? Y(4J[| @2(T3sU暧3!#J+d$ P]b caAv\VuIQβRk= @LI'׸ pTO@S*+MYiQRFEY W|i̘ ٞ4zڤڳíƪ?'qC6:)te*ײwfg75*86mZ› )>PMbKʊ? 8ڏߴ>팪ʲYX0ɱ'aդDa fRb#;Wc(1W Cb*F@?΅υr"Q cEܱ\%4(69Le)T8/i%%~M ` iLME9c֔׳_Y(KGGG\ D{n@qg{_tƏ =ӦL4 OUVOx6r{_.̱B j jT|%~3m01ǭf ,6i3'Wb0\1 [cWVfΩ*Ê78ӘyC \ +)x; #p R'T۴ӱ3mBWSl?I02ٖ;}\r&^}'N)e4)WĶm{pOP4N@aqF(`̯,Hc Jn@8UM̨EȌ @L߹g^gX\KR %{'W_5g5r uO ]5:723J~7bdnBuٓ+6nb6'yrKWj<~ɬS$r[ `"g#qnƊSǽ s;*Y;dH @mjasWϛ0 p:oxuؘ1hGFAKtpww1F%`wl `UM4]!{.jaq AZ%Cuh<;0tGco`f ʌ[$ 8aM0n1aj1W\8;LwǓ\ם0]6Cs>ڃҨ%ŢDkk;MJT!SV)) ?BFmrq-̨3SP}Y_ݩ{㲒 >؟o൓5S?qni9p[N0I HW鐳=_() wG{8% fP\LPW_~f[w#L}}~ت-I'tH8-MCQp֕_~\AIhc@)cMO^3)) I& غac`qKkw| 0P[0 58<.xpf>~P . ˽mזDP!DR](4Q[܂۱+ zvJ-zq s@M ?4zo wDɘrq kPTOcj+/)'D=+)zRrg NxyVe;s2WfZ g0QOO02 /;? q}SJf#]w @)wwf1SC  _e:z۶eemw0׎'*Y?|%o:!Rpb@sgcsGXN@/j @'l~p6,\ЍܫW ΊF M{sȎp 8MƎM$fny|Qh|kbLT7~tO|tlݽpO#EΔbZJtJ}8kK;YVQDugzqT t'y,&뗱f\=Yk`Ge\۶m͚5nEթ7N=~8Ҝsd P߃S//kM8Eʋw2a  C+ FhƆ  iyof<|w2ᒲz;nZ./Ƒ)ȆO?VUN@sҕ"2m\ ҟK0o'*`h]zЯOF[K{_XM GDgWW(5H葼Ʋ @ŗXTI|gZA3V@Lgџug׼&IݩH>yƴ*hѯ=&cϜ93p];{&x2ĽJL87aD`h/ƅܶvVt*J߁ʛvuEn8CMlgȠv2byYXˌƣVD7Q$85m ZMpF~zmWZ782E`R&%\n'K5J*؆:ha`Y3W+}9mr4Ǯ\F%p Kojܼؐ}74cx LHP4Ak}Tgώo^/>e|9X'r/2Z%"D ߾[}r񋻠MA*xG9?2N C?TDzgۆšl9ϑz(pvIOo_gM˗Nv,L9ݬh,$! i0LS̒KRʋMG|ey߳3"b 8w2/5X53vK#Jy\T~a($y@͝;7{}?5o9PZiEѴx!p%V%IDAT׼/61fEim^Vŭ44FMJV)6u=xPpddJP'nV?H{?գG3\ UK?+qc񳖯,IA؃[A.hb`"{%T2r8O}!uȅ.fYj'1,?Lp0#*_u.YHi%~{P_CA̿آ#v#XfҟN?'y@ꪯ@U#.3їIY#$RRjpa@"5<qb0SJK"xږZCHs:5N +iv*žߌBx0/wJoBㅃ< ᥺no`yF&9^VxÉ)@q$ 3o~3+e=a#M)A7-eY1l4'BE}0zMHWwnoQsJcvT=y) GG\X"5ebm\0_ H8s7nnnn NbPC&D2x}罋VP;1?U?SwY\a3J5?YSk[te+.ptq5')-( A7.kO:z"-$( ft8.n^YZj-_yu7޹՟ TO%P>!gX 1DیFxj/yjbp #W/ܦ`@bLìSΜ=y2 #~22'i<40}?ޕ`޻P'jR৤'ùH@Л<@A ȸ8Ǿx^qh ]T=cGĠivniYi;iSPf@)MF+Ê (\pO`?l󸺺#/~tD082ghxX龯^{5Ȼ|6Ǎx8K a*I%P>"2R+ug̐ŬXts:쫮o $ZO)/4_g Z8ZgH҃ܽn ;Kƫۘ̒NU EBOW};&bJ X=;kjKVmQsOOK Q Q5JM OT<6JtUv*l<*bӦ`ڪ0tYȆ8Lz[z̃:۞m Ymr= >NOہR}n`O57=M`'n@9( f[OO)̀cea'wGU^~1-^?yA)Rx.H=xm~?dx8 Ґy]!!"TYgWp7^~J'7,[i^tne۟Sd ~;~2Y$? :3eGaƑFvVlXSKנS'ԍĹ|0USZ'-ݎ_¸J>sq7< K?sZP& ~v]mud$@ơL'SQ^b._iGDO gh~PSU|a8,~?BfBS_0i6b]׿^sΌ)SKC^1A2s} P0.];9gڱ `OO9e.%>Gi^XKR ߌ?c϶bFZ' h֗H~͟/'1H5 j0Jdp26*N8+/y~KjKO1ih,ypsF"h17YYV|o` D~?qhZ7WfO=6v?҂!.H H>W 7}Vmp 8J節Ȱ"_~s8]a4!I"&d65͑B0a+>yEϼX(s2S$<>ahů  1'UY^~ٕ[>_>՜׆$t;/+ Vk.?GȪ` ܷ%kރ͸[3bXovpAHP3/v?SF{h" ;#)F@)0WZFP FM1ݕ^$^mc4mH" %X3-5럄]nBwH|KSrx"ñ^lq%H{7M<ѽ:t=rK^k누|^;>ϋ/-MK z4~P}Ќhd*sޅ 0-ygN{}Lq b n(@GU%]X_=[vN:v)߻Ο7 #ѽYAϿKg]0t6F&J sn=ԲȺ}[P/*yRͧO &EK*R3!`Jciddb3 +8¯}⍗4`G`Ht #s7r`_ #(7ҘS[\O {9_ ],h:rb>|;Vp쥕g@0sqv;yrӮ}~z<{><7\T޾;oOn*>=MTޕ&Cv0 ZKQ @eZ~`G:ٸ'mn9F;R_xH}G;U~~D w|~SG@@5'jz3Sjï~wɳ]cO2hDduEY~/{9;oΔ?m oz# (7FGZXca'F:v=/߰wɊuqD/ *;%S[KtםRϏI}L{I) t8W zB>|58+lc݂c Ì$fYwox$b_y{v<8p\xT?|irs羦6h^y[u$(scwH{^&@F 3߼.#pLJ}Р'HC9B_'#G!O)wr̜tf8r'dzC믪(q?hՖ/|O`VU3929mԬt<`%dkkWHxKA*)PӁ1x@uF?A/Q{|^|I_icӇLfOsA9ƕFNs}}~KMk9(u~7\"gtgId/R_)AM H|gc F: ?Q2q[]xM^\ #R8-mݟOlm9d1wWp*-A@?\FV1"Lӡ \8ߞ:#70ax%%46 o1p@s^_sG4F{Puա&߻5wOU@#XndT.h8)A@XiH%_@M Н)yuj @f+``JaO ^} ӧ>{p$j{lؙ!W)CK3P 개d\L2pv~ZL!yx~>/g Ʉ.Ọ?S0r Փ}r,9LvKS=ּm?!9F$?G@M>_& ~#|!3 BSD :fq(5g.3;e%ƅ{1'8o~L| v>bùbS Vj?~N@@yx3}d4Iu8>}}h) )37~_l!GTi#Ioġ]j`jHkgxɪx@%6N&-Rw1a(PQ%( skV8ܺɏ5+_A )h?;{lt<7K.*CS@3o ,Pl3k~"Ķ[٭ .r\Xjq4$:%?1j8)C¥\PȘ0af 2Pe_U~҉df%?>PAkZs~hA'PpZ2ic 츃0EgJ\ΓF$ _iZ(КWXDW]uUť*ctu_l $㡀ȠGasr h `I<G'#8F?N<; 6F0g~'E ƃ~,щ!MDK<֡(506fժU]t]WDzXay0;P` 8򕖄0 1;SVPd@Uϟ~饗B~-`NM,5ss|ܽrMW|1" 9'QS/a/f2n M!| Yu㐸8#~PR rvzÆ )SD>o''-m8ҍC2(Vġ3'`??g[aʱ0J̈́0 `(:<72 .tϝ;7simK)vqG<@'y+.@2c1;3uRMgLh(t;c#MFrN?y흖78%L%0&ߝ8{tC7?/ϘpB e$#c4#mv2o2؅X#w*&`S_L7ἆ@"!,ȵbCPPGPNc>_Q G Vf `knW$HM_J7o?e'9W9BF/mt5XC/`)`X%  (!q1L-88Z@o8g@)T ~:fϞoo;yZLڎ[JLI;.- ۟y?{p?SndlK= 0B`~-H'0&(YGLwL Ӏ3lbvGbvS몃=f z&8KQ#t(G3}\{G~v$T(dm=3T}v z9љ|^|7bM$G62EJE"fk1('N@,N?9;_|}~dʏNDτ ]fxe/wǸ^ %>5:sb BA9t8p %0oݻꇖ-A sLW,H(fupDm D)+ijҟɩiWxוx׹QWT|?{xx甇Y1M2"MJ BDۜypg-Ofڂ%P0_A3x"=u6$@oZ0~r)XnT;dB[ 0c L#XIkkQn?((AJ fih"z `jhjpݳ=j{/#E@!W41W: v!yL@h3@Ѯph0luS޴m%%eF<>p [NEY-#,P~ $0*ՇyVHvWb9D٣a,ɽ̙0v$T%y ~Ѓf\C^;*Fy=j" &sX0aE݆!40z9,, 䤠hLy^"fTI{ޓ%>c4~0AQ@3aŁxwaH%v)GQ GEBێ>?E7>%G +v4 @UWOn$yoyF@RDl3c^[-( H%TFsa0nThXLG5{FsշI%U`@uZTNHaH@.j\ rc91ż*J[nΏXADD>(6`Um.rÑ\ =>5:!w.V}XH `jc{2†{mİ۝$3\{u¹9H0PcIc0;J(zdR@3aY_\U%F<CA`yu z~aOY4j.Z`oX, GN~fC\P(vnivףhp[%D2T"ef$++5#RFƇQ@qP#Nyhf@x(U4 U" CF,GȠ<Ân#" ADpC!P@k_ 겝^J@֫j$ `E3Q r@2\v ҠQ 9h0CpP2< m+)kC-P5aT A (M-fzL<ε M'` ~~C v au=QB.z݌Di]߮J:"AsXi.bƴx6s_B@سx(";YCs`d L Oh=і1NH}+<+=F `1? E.oy~ŵ&ψp߭߿!bHžU΄y䯌1_{5isf^ H@$  H@$  H@$  H@'haIENDB`ic10JPNG  IHDR+ AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs%%IR$@IDATxeu?e [إHv1nbbOF?S j]hb1jlEA H/²?g=s˯+W Zwn`g_^lW[%B/TUr f*}ιiWM/mTyVv[TZ^j(P @CClJ@ [M:W>\*UNp}f>lW܎JnBe*/W\`~ہ]zϏ>INvTyȞ&7S7YƲJ0` @` !dL @vZ.+&/ r ''x5G|c6e~aknu_|Kw/$e=^C0 ar#?\>B)tmf9Y4~r\`joȅ[MetwZa|MD  ЗfaR#vi?yǗNJϬ,T](Fw$=`}z]?>W:媫TWيͶ]/P@L@v/?BŮ rʆF Gڜ{쟾{ʟ[v)W=IpQuBe&$m¥7[ګ:$ @=#@g@uKҋܨ)~~ {݀qU&}S_?_x+>ە2PTC(k`@   @';BȎ@+yNQCgwl^u'v;7_yWp|N@ۆd&c4km|=M^%u8@F@P3 l y'>Ϯ:r?[{Tzp;'fͯ _񞩙tO]:Q /e\mid$iƼk@@   @+t3@Ȏ˃샞[(^7@;΋9GOTr{~mE,;TqkVj(P @%@x@v|c&^7ʑٍS7;gξs3;9Yo ~ݝ \q%U))*oTO#m_${1 dL@@@|B{Γ?n,朹m `oK9-ɒlm@@ 2 h-!dJ:W}k77DVQgGX^|l`l6 PYzxOz6zHZb&PH@@Ύ2!qn ο:IRVuBڬP^lD  Ԩ@ g>ʦwSFsN[Fslu=ȉ8٦QCm}zw:VzZ&H~@HK@ZRԃ u/;'o*?A.zg;bN?Zӳs_|=O~&uUC`6ӥP@ HDD@P{8rBk&kšKN+slM`@ѽS__wg/!G@tyڴNM@:KRVz(/j"$ hP@m_qk+ޜ+_\bz7;`D<`5FUiRP9i0`a;sӹy7iY:ej.RU2NZ"PC@ Ă@{7LZpbvWi5`wov>q eweY  h `5}Mo;Qyu\V P^!)6IҮ!h@ P#@@6^=~Ÿ ?]yKN9!Y'{;)՞k}ջ_333s:".N6^Vq @@ @_zٯ~xtcNh:J`O}:053w9շ;6: AIVn.Z"PC@ @N[*-=2~cj?=͌2/_)A OUlW󟘙{6.` ykӼJHIQrt\UI@qF!/=\~P*#quR9v[]_۴jK{*< ͳ>A75uCRvnK>jWJ%ID @QFtól@}}g[?{y/)ϲc9';?Ҏ)smu]o7vNJ*තw~񳎯:qRs-WJ_dFZ&RkH` $@`7:K' >Blv{lAvNW VTmr]rzĂRo\力۟|չO+Q!)+9ROJ_VCYˠ@F,h8'qZ=-lQv^L6Ǫʗc,@vUf@W%׸?muC8j9Sͦ.jrE $#Y4 q_j֭8Γ ǿSsok7vhw:H8=)KW޾uN X]ˣ8Pξ%i^ڬuWA% Q"@`6k& g//>̣k)9M8KG.Oӧ&u6v{$9J$) G]5uW)vk^VrERGի6 @D(mm 4E@+>%bCdSuǒ1e?|X"MG.>PUP y U{w?:e2q.RjS=$]5@  m?WǞ?lH缚X?9nofIl]욚hDfH H*g}W}ܬ_|_zW]}ѭ=rf@X7\ >] 82dOcZf8iIn@wC6]_{ߝa׏u:QR~)WݗIR닅nG*A!,he8IccϸV7ײvW:欿l" iGʣs8l.i'n Y>׼u.CAqW)6L5_4YjsBM+#!@B0, a! _3^~?wL^^!.qkmۗ-C,v6>?^=M/ԓZr=U\*i_rb_򹖼/gRU|h! 0`X,R8m/;zӚu^kS7Ʃ֦q'N6n 4K~Ҽ:59z!>Α3s.xӍwd??lzߖ(gEZ]z9sm :WRVMnmi 0  Vd @|x?;q qeeD;"-:x`@;]p|'?m^7Nի @o 0`H7,˂ |^~x3tp% Њ%0kSl:\a+läJmӺOy Knm. rMtI ,ܮ H pɇظfMK'9׶qΘ_;\DI3E{'f~~V/qa[9x&.*խ~utsoj9Ań a#@`ض( |=iXpc*q8_vg㜯3\[Ku#7 ?t[]@,[ʯSuV偀ITgߗr"6++~guG  %'I 027_1%wr;N;Hb;[~yu5߬3퐃O@}ڠO~6WNm~~tG(t׼[K7%\̙w00P!@`hMB $|[V,}9+'%Ս*LTS/#k>1:V6ٶI@?iV_c]7Yv'5K()O?# H 0L d-@$K?|+ƿNY) Y&jp<GsqBl 6~F>{ӎ{ճG-tu%8`QG 8s5YZ  C)Y EB+19.N]zR.P^s?OsB'9D}e_*ʉq, -K@ُ̃l] @a+@ ;㈱1 :D;=Fgeqs_bZ'oGJ@>CGɸUG絿VT_T/uEUy.ҟiJW}1q+@B$@CInˊQ99ܶ(_dzDȷC )jxұ<8h7Wg_[)}ԇ^|)2t@ 0CqyK*svWQ?0pI;'owf5.kuuL!?dSn:TBH@hz1y@ 1W˕=5<ޮ5y_oiZg_q֗[?JJH !@N@o!"p5owg垓W)Qi|ք'y+j&/?֑g%mA'\..u)Tׯ8uC }e_x)S߲ѭw|(8uGt[wuߴvŨeI5( 0 Fb@QV,b4:9Asp֜`,y ྴB!_iY@ud+AqH}J%\@ hc1U@`ݓ -V]pb w?7eF9ir#uEڗ8~\s @`?> #ϯ;>q6B t]OxCB sğw˔c6u쵭[wJ[*vnPb17vmQg6ͳaʔA` 3S~-SWHiR'_^ )sAB@?$@C`>o>_섓vچ ճ[F 69W/GIiN-@ >%@O7 ӂ \Og;L~cI%ǷX, :$S/T!8 )rW=l X]۫R4n:_H@@~$@}O;8}m!f'r>Byuqc$!vXwCN|R TyIA?8 [C*Ŗ** @O  ô zƊpCzz?xO9nQ:jr5P:'&e$@@x$@}M7W4;ɤ!?8F < G>{r$?rޭZ_ǽBAuE˩dU5( o @N(KpTsMr:Bz2!?uVzjUGznuנomDW+QuծREjkC #AMf&rmD  ỏU3/yut[nۋ/yu 'Bs#A kܗҟ63~s;u! h)BrY~9αyKY;Rd苴/)kHЀ S#A kV~}1I(h t|։GczLUJ}IV1TZ@t$9!@}@@ %x|>LWgE;%/?6qqνJk-O#eN?W #A9p$ҫPk(P 0hd>'mXB7܋T]Xo#y?\::$@@ ?8 9.WI$ouCAiXH! q꜇-6okE$@@_ ЗIA`rV\J$?78Mvԃ:C@S|]vۼοNi^,oHЀ t##A+~wr ο7'lm]t@ {G~ɫRyT]QU늴I%\@!gF 3b |o1Fo BGmcc㆗c}ӭzs#2F G }'Cj{ο0Y͕~ <ÏsY](ӯmmVMVJ$9nX; pgT@+sLP]߹oBhl@Wrc@d.۲4}hޗZ5UGB$A:K>t|Y ;oO;gߤ(:J`AN:bZ7:"! O۟ڬ5M ` . % N*\3g{Y.{|% Џ~姏G\vթOjH]I*sCz@@3$F^rP8#+!g7~*St٤uΓ4ڗHoVuJ^mw@@ ?3 xB7洃6K Nw(``UЯmQ~&;ྣ/-MWn%\żs@  @ct -C!r[*_* ْ(g=ԗIG/՟cp':ꨋ/|м@P]$we v$@]#pg>9OԀ g齳IЯ !!So[{-IJNub#AzN@7h(s/|Eԯk?8J &pf;ߚξl^8 @}I@_n&$|.Z]N͋~!ذf5+'h.;֦J)2ZfR#&#Ie,G@ tɱ~/nRQg9E ;@!U\v['^m/~] @=% H:Ntϝv AOjfNr@" O@#ʹ<ImM@_x7.M$Y{͋ SJkּR&Pb M@ t\w+?ē;˗!0%iߋ9!@="@GH~h6Quy?g-mt]:VJͷ۾tL+o @';B`8 *OqN|f+q{b`8*@IWԮV'?ʮIRzb$vMVW(@h@fmτ % MXU^lƄ@8hr TG]'veTQcj}$ @kt 5A`rV;n: ʉ1y` uun۫.R]-.B A@h +ڭwMK@`,2uUƗڇo}XW=RU( @@ hDG4mh,R|d3.,Tiu-KkVyZ!@]%@ G,U>N޿:A>!0߸^N:!)}-B6y@@d!0*ѷsŅ4I&'=2KM~ޖCF@P3 [h% bF@Nu,&LNn:V.I-x t@bK@`3y`Ok@ONrC6XڗJڥLS]ˑ  Qt":~%w[k5(ݷr]*ꚷ6efjHI~٢unkz3V m) L|*l ڼi >s @͹v.RlVZ{.CeK溱!!@#@chOZ[Xyܙz. +@ [lul5Rh^/!@AX0BȗJml}> |1% q%MB:ZR뫌K @t9B`8  s3Y{ dA> 0sWbej7-We~>ɮH@@d!0+@j0A#! x`aW]ַebӼSe.e   'W3GJEB!x:Ui_ZO%/IZڪx @?^ϋ!>'`(Sݵϧ %P,xGtuڥL면M۩M__QG tCt [?}jfꦟ5!Q ~@o\2[)յJNmVjWu h !Ksmiշhm 0:;֚W0RݾQ{ZYÿ*mԃ QDb ,ڗe_- 7M͚ԜkcUQk]׮|[]#!@]%@ M`ap@'V܍ :1ozC`)smQymԮRT; 9#C4F0&gߌoooK=?m}{(۴ʤr dF@f(@7=&Ռוt}~lZ?< 2-! /r]Pp@& F`[cK\풷[*lt@@Z:s#j  {um\m"]t}e%>T  :N!0~jnrpAF`fxGr:~/~}  q !Pnj]8w,}B`LηrOFT~!@]#@kO`-ÿJV `%9~ofB:N@3F']nSiVY4KP@IDAT\ nőrI}R@9=LCs~PH&mƂk)B5 ʹ. @eZFGC@ DRc w?j6 @='@盀 @`TrõIY  =q֛ C 6aFhs<p pxpõ"V@hعk+\0A!'{=B@ Ї)A` YZT ; 0<~Qó9Y  I@is7 tm[CRf ^ + $P=X\~%iⴧ%E=@ ]>}Nm˭.ڬ]tlC2#@ 3t&y:3#1X< r $$cqRˤ?FV.ɯhm|}7b d@@h P;ɕm 8 < ^B ŗItW_unEQ; 9#C_;=ӗ힙[ϖ׿~i4r/ry||lܽ\*KPpuJU=M։:~\ @ )N:r]' @a||l<ϝx_}@H]!gع;KB2k]kS]$  s\z>M&#TQj-sBrsVk9Y~ >ydXɝ'-pT A0[4Q]lZ6JZm\< U`(HSrtcge\߅<ˏM U ίw|YK}n>71P<+]u߿jDydyC֮=f˚M_q\%z |m69)WJml^tIֶhY~:Zo  %a 0W~/fombaG'?匿Կ(g*c~l2WX菍/9Gova\/~O_s_7Uɝr䦵O=a˦Xk7m9a'/o\{քլ.eUeڏlޯeVjkC t`S@ @@r߻ٽfco:N<˧ :d K$,wO^3_<;/Goݩr7޵Qy9ۭUYzr)<7i]d=g{"ztηJͬ} Nټ۬lt@@K-'AhߞUJї^q+|=H}i h[5\:lOF8ǿ||NKO+VUdmIܵwK^}h('Trt [ֽ9'8qxyB|;~g.ʵV"1U]}n&rv)/2 إ~8tn|]37YҪx Ȏ%=A`d ̋ԗ =~ ]_mz2~D^_uU\q՚{{˯4qlHV˥«^7~ 㞐|;`_䮘?{K~nr| T?nߵ5_?=[d}#["'bm"}=ΦeFF٤LzV!@= _GM=@G JLʟ~{wh+3h*VHgƝ/v}G3r 7) q /{&+Mnؽ[=nZ~Ayn+HIvE  . % a&=*֖U_qS<_y?nXhTΕoMcNtopg9e Oǟt7Me{#{}| KG,O4/֌q"%iwok  m) Kv ҃^1W󣻧yѷKyQmU6EN.Orunr>>WpX.?6/?x']~k]s@9像nk }R\ux/kjCB:N AYgvjϽlqW$ #3P /Ib))QˍyrnsŕkZ14}SN>-&cF@j毿g}sozk<E FRڨފcj{Ӗ.uH  q $u0¶j躻ޡ%UxzY\QX\ȻKJK':%7v1ĊW.A?| 0q/l5UnKoj[}T>w~|{)DvW(/6M:  5 &qjԁn[?u5F Wd֑{Ɵ!M8$#2ǯ^5Y> kS} W]~.pJqڗFy]z`羥Eqեky+P]ޮTjGBzB@O3(FdmU͟eηhq0'Zw6yy{]..K҆-wo<].0W{hVLO+_pr`Av}bɶ|/W_MFd6R]qkԺԮReu"-S]y+Kǖr @t7A+ =\o_sa^ZoY\)^FG(~ٗ !.O/ʛM\F-=gO}[s(@wO p5Pm \7MEjid6hku͋$JR[z@@+bZ6ZVFZ_wݳѿ+I~}Yr9!Jq?7r6o\?n]<ٚS*tP)N3ЏIU< y] UfI}MVjWir#!@ݣQ! @@UJuKqMp>*6ۏAb=wM\=/΀@Ch?븘e> n_XCzɍZ3As=ehD,goIz3;Ƿ tf $=Vi!-x~s? Iq}SVv=WC)?>s?7~O~֏TO4  W fλw{+,}]&jK#8ҷeu @='@盀 @`x$-NYvNۥ48(\h:* 7 18|c9`!m8qǭX}@tɪ:70wǃfzΩaӂ>7ݳνSSRdu͇؊dYj8ZW&浾EBB@W03 D@z})zp\+{篺{w_SMX$*$'Yw.mZF7?x3k̫I?~^*~pr{5:K'A.6Y2զBy|iPz\Yh,I^SdU#!@HJ@VߕصL<=_> оeXX,;?./o:=?|arohJF_MϿ3XGk$AzRްi:ڿ+oꆴufk+j+e:|ͯC #@@ $  6WxyVC?/׾yvC:X}_pէxncs^ölشbr<1|rҌ]O|;oXb͋d!w5/R[]mVV2@ b30  =`{lAN~k7iajpLbz.9\r;>s?6W\^_\4RPg<&bb}A*^'v:0nߝKLA LM7$]oY]Ydk Xk6$ @+t3@K쁯=bz@?qCWrZ3I4}džr܃/8KroԈ#𴧜x)fk?JWվ?RS{ 9\e\{-U)IWKpGBZ&Pr74 PO @VYe?n'ovڽ iWg8rA%y_* +s9)7\T+#: ؎s/}B,<۸=j>i*OEԭMuk/ξk!Y)ciB6y@@d zRyCv0~۶>qEC_~K9g'$@ʖQټ]pשOH]_T}S{Ns6 >|۱g,AYQ^ZG{-WiۇlR.[@G }0? 0&ЃeY*WνlpJg$rEЯ K%KK2ɣ+Y?DLMѽ|wl[rFETV~qtm52jlKMQ#!@#@chHA?(YwP<q~۲^;}c@3i;NT;mzwNt]"zۇ.Q"mpT]]2 V h TzR6vz`۽o"iTK;4"e9y_y{&C]ʖ듮M^Mfe-L+m߫؞>u ֥LDZVղ(-Gɨv\k  S 8Yԃq^{̞ ߾ofTWO;kۺg.w{'<=o_|a0ct?}#wzz:vQCݶ+Wv́yׁM+fjeQRdF `gP&eIYtvSo653wS5y@ɲPfSQgszW,gȍqb,sֿgpk 4y|B6[^Vt_A?n~%QmloOS6 +5ݯL %YҤ/@UC/=rrVpᇷO3&BSx7-8N_ߛs\iݦQ$?4Ќf 28L`W+vٗ.jw_PRn!]C~y[mT  3z!0ZgYJ@ϫ]o_w#+pzv.iJ)ݠ_,)xnrMGs|Gmn'&7mX7w4~FD9rIAXl!{`}cڵojǿ/MHפҟmUbm +K}O+e>~>&v @8:r"ȁM@q=h<]37V;lPgTEw{ڍ)Š}#7Un|7/t+v~|܂~.R!Kmk֦ ,T2 @t9B`t @`6o.m_~˶G_ x׾+Muz;@&RqkZ=OcÏύ\n){Z7{6uEڗLEtZY5X >L4un#{v7MV8ݖ6׾dKźr2 , Ȓ&}Ah80vl="5o w>޼coV'5A]_߿f}nܝ/ =hJ> - ;2Kk73y @k&i۝{MW-Jinu{~o۶j\mI$-[@1= Q#8%65/uTWiE @U:7wl] Q{/п\;ܭ ::6GM ثRzm}שc>Q:yûԅ735|Hv#)/}}qdפe6Ն t`f@ 8zz.ezP lnx#O 48'Ph.]_,.pO_8\qZ>%pȆS:Cqdm^ @?٬֍W,,4:T~/X]#~6K-Zf Dke( N Hqp:k+m@W ܧ:ySuc]\e{x~G˗-5n_b׼֩]gۺ`=vݛ:wߦy~*C6-ֵ2u  ]ތ,YAtT^إWg^𥋮\ӝwc3~jťn<s2CYy?ID`bb\~P7HeWWg Xtj@w\=ӿU$~?H='%:Q), v@@fd ؃x)o/_9i>ظDnȓrŃeR`kW|[V?iu޷~pu7]}R-]!jkEj?"ekI`ACA2%@ St4C p$ۼ"6o[*^ɯ_]>2SP(I~|qǜ+MDΝ'0VrOm\L䋔eB,}Cfy}{˯5}ޘk-~/ i"͇to  f`! ϒu[k NWp[>y>0hyV:D Y:Dg=oc[ |}+A: qmҼH֙k+ "k~>BVR e"6~9y@@p5 街=}ʺ+n;>w=tPa1PZ>YYra'Vl_kV|UEj?0=;;=rg/]8'u?TiC/EJz茿|h\ir7Ȅf @Y IZ&8үۃkUU{_wwxϻݏM;{_\ ༻;_.չqɕ2=8I @WN~_2m\n@|ۖW^ӑ߾&@*y$@@ 7@9Ȗdjq]ӿTWt T_o߾_MS:[Or'KBP(ߒ?\>8)i Z1Y_{M_S{b;ulvjEs w?dsQv}_fG ICI*CuAF@P3 G pnE{W?J}7g 9p r:};{^_(=.Wp>dVo'_R٪u2`iǮ};.o}QeG('?e2mI -:S =3G@ ;z`-=&碫v~vKY7ݳ+|A<nAUm5ʿKߴwjq:fK?rgܣs՟\< ەe]zM߿cC>kM}Y];djծͫR]vL%m1WNF|!@$2;I!$$z$>t6۷[w8 AKGkkJS-{{i57~w1ߟoqRk)\]*~CM(ݖW@%@3@^lz@ XiWܽ};?(8{] +u" ' G+\wG~Dk@~uTwl۹]R_w;=r߇&ӮÖn:m󺟫*WJ;T.ծr  cz&zpRKER})΀WV Gm/_XvZ 8/6GWg=$3== $ "qM(.((@] XpɈ,q`"99 afرxBWu|>={o>[U-x?5)ٱ-Ɏl})H {iԹ!@ٲa`hph_k4r0?طି\G2^@8B6 @W& >=@]dS"ԭwυ? Mع ѿk\p_b͆)SheN5 plIC@/|`K+?wuUBJOk~aŘи&%G?Z8_􏲳J m+%H#QGD@D@L@U>^D @Sv1M: zӆ`Mf{zS=[&3ioҒ헼Yk6; ,kYf˵;7?/ͼ<_C,J1o-۶osMQ$" " 5M@+К>= qG}#swӖ y?x}]?L@޾&ɶ6uݔD]P#OPg|u%} k|ק?D^igޗ뗯YH`po}?ϾaQ%ʘP$" " 5I@5yZ4("p"y.?B~Qϑto9?|톁GmmLMZĔ,b1%S~mA/{y qr]Ηezu 3_*_|R2G]d]_r S%WPظ,D@D@DP jSD" XLcdxG{<V2臍z<=gNj9kRI!084kd|,}UcF+?W߿G_>Gkš(m|u($_ԙWa}YvJqIJJ" " "Ps570 HD@B ޕ Q2,5&/~}odqZJ-}yuHݽ"+qв{O_|ߣn xIkcPO2-=2;!1 1udJ&E@N bamԭ\@N+Ɨ?wܯ\[#Α>6Eԟ~q3?:G /.o_y|'~;۱fqs&)@M@M ND l0Tp`|T'jܲ%00+n9˥OoFk_:dᕗ$ucS(8 A/㷉|X|9&)" " 5K@5{j40|* = "#gl*8g ofIo $λ\e{񭗡u}+]\s 2d$pC?$}QrؙPMlmT( ]S J $w4>`)'su@hjձ@ ,"CG(P_>Q_dmGlP?l?>JN=q^g voB3N@Xoy;8sHNE@D@DhN("P8 l` n訃̚kg+9H+|6ltk㺮.h[| -ך َm}_DbvOJ<`K?tڴ}ji 6/b:k"L;Wr+ḳk 6wKz(ӷlP$" " uG@uw4`B`)kmԭnLpϕ'pΟړIJ&~RQ|rqK,/Qzo_#~P i,@?l34v:lq (" " $ jW" "`ԭj;:ڿ_%nC;섋 Qype7>r1vk R g^HUmpSҙ36!aS}u*DM@ hHՠ@-yg.[Y|AҜ4!k]xeNwC9Q3^/<xG/ ϳ q/9J:LBy0cj,$w6>>O_D hϟF/"P,БF=+hk;.PO&7ahnMs7 )q~3

u "ԧ_"P&ݗwnMs]&6aNU{kWt58=6 :|ksnQ,%ȶT "P>@cuگrM/9U ;lju%뮿hOw[B?\}3<5Z3+&~ ⊲e0@Xm=׵q/ؐ:\QvБr&J9e_uiS&LL%&ۙtG*hwLt:N\>ᮮ3ђt;FdK:sKt!Ht!?H;=v?1Hg%X948j`(oU֮[qu7n)g~u\65(ΘN}(闕uPuvbRD@@^Drhd矿}wlyjnG`I,]t>]w9 W_S9o/se 3m is$:H(cpHmqyt`߶~͗Yz `/;=g-ן`J۵f{+J&g`~&twGebkj;xǶky2O؟m Mlڲm&JWWW/  -zve+^_7={KZJֱ~Q!m#m~,C>_9dF:NF@,qj*!םr:64!3/< q pA ~xWyHSf <K@IDATon2ηeh8GE,qY 8@Y:l9S69-;VW μg"[ŗ,lLV)0=8_-o{_zvW#K j%W  ?6>m_O^v՟_xϘk4{|0c`;/r&4m4TE@ KN9$LULǟZg٥G,q XA p6nv[lI~y?;Q)%УW70 /r׿ۿ꫸lg>VG|R}c˲z.""C@98htX(/[;RݮF?5<=~_Uȸ 0hfuM|מ ?-eXlcҭ=+sd-'O6r/X 7z{-w*)72!q ui\:ט1.B!ؐF,: W&hjhӯɋ@EӦwgoq-sOqc X2eˠA>J2l}9L|37 _c(mq6Z>1"dKtu`%w"=$kZGo_vO>_:'uWi lzk5 OYhzW pᇷ~߼7)o3^ۓD', ZdMm+mc3zH$>`eܘYu!m60(Foy ݟ3䊲ukϢ;._QwgⓉD'݈?{:iOHS;۾iSZ}^nupyRb!yC:іM Cy_ў#yj# 2"P3P3B$,Hc܊ d%:-beQMיG]vq&Sۯr攘?tHG&%, HI0e'OJK*"F ;}|]?Υ%?ww=sx= LJ|(#…GQIڭ&)L@|5wh"]/M4eM iĢ?h˙g926?>_ߟ~~n7MPl$m3gH#k'm钓ߓJhIl]K6 Dk*sO;6?.<+ox}%7q/^ xmҏ:e>g_lhx=k# 2"P vu*"0ᷛ& 'O% SrH(bJڱB6?2Է#&C}1pdþO%YXɾ|iA 9A %+Nd2yo3\J6Dejg{g:f3}Ϫk/ƥ|2J6^d}_} 6};}2Z # {f}^AgRSHv1mʮ=W_[y/xY׶V@q0菒-$-/XZ7CR~Ϻ,MDN;4h9֧\Ľ΍xdo>:] \tMv"πCGd>c? t.qAgpoAf}Qm85l97;O(2ݟly :3j# 0wL&[f?"C nٰoKG??{{oݿy[iuTfYڠ#ٲP>dTg.B@r5OhB_.7Z}/?n;Xr}1"bRoL2i+1 /"eozn?7X:Au )q=R%- Aq@PeQm d¿;;:N8b W[=3.uuϣyڈeȏID(+}8՘@x [fSS'ܭ_{gSn| ! yV0|pdW.[E #GuF| `3ȷ=s! [6MD"omKSTViwlqGx:iu;V {)l6ﲙH;.9nvOOw{AHf_;_}c=SqLAe:|SO: Y[DN34m4IE&qsodhǟv|]?*.qկl67R$upyS3k>6vc;쇒<`ru}^w˽w}wG }æ΅lcF^xU S} bgzquCec]1z :aԣC2AgV~3m`ͻl&O;HIuQ3:{~7ױ2f2@ HL}?>/8[x/q__Q6[nu hc>jk:%d*zv"E6jJ"޲Ap 7kl - >x$nymhh𩁡ٰ֬'?X􄫏@6ǂypٜ]<% +rKdva[zA{ԭu'mxu+Q"7 xY>H,}:+, Y`Ab=0llFC)"P-Sz쾟~ߙ}'^qݯtQ::K~iׯdIH<%" uB 7qGMۚl7LlH'z-}apxo`Uo{ǜtS&dԁ |lQb:%1ˎѨlJuHYny_ΣT6?\;̵H_OhǶ|d}3~|6+gS)f'5vhlgRD #,CM5:CuJkneR_lWRJ" "0 ko]0acͼGJ.ˬut(lIkm^omWgd$@* CAqHJ2i<[k0|6HC1ӎ}6+gSkM6xzJ"ZS;0.9vgs.Knb%|Z=TNlؘg9% |4'Xx!+@ ̙:%9J/zör$7 )+h CV*1pAX}3uje!hS/VuvHgmvnhv83fΛl2nwv)'SUM==7 n|J>2#l69 2" 9SF% 'He M6 h VZ$Pw_q `ئ`ck:ĵ9haJC??<5mK%osm3CDʞmw^~Y:v'ua_μSA?̖C&V)" Yx*@ȷNμгm7gJnv"E@wWn~ᣗ?=My_i^A_G{lB={#VT@a)"P삳@Ihaaۣn:q1D;J|Ծ{J_6k+a$>Mf7O^_p leV,D`Yf1$Dr.DG?ӵ? oul=:fmOqSDq: yy^NI{MmR`Jy$Groؙ/O]y]{#W6v"E=}7-3y_"/<Б 냬hh9CFYh@P2bE :}6K' }:z ~#ٟm6H(2g3F63knw]S=a@Utvoy?q_o + `M)̗:_,IxM#&$ &<隲4  M;ڭ6ij5r9DuikڬzϷ[|ciLAO]OS]U<ά$"P ޞ~wwOyͱq`#꾴M #DŒQYR@YWͅNhp6T`2 P2ЇPO}䩏_w۽dpǖz?(kݗe6>#$g#ǻ7qx{Ͱ楊AsS}Io{iJn0Cڀ6-ﭔ=f@@hMSD(6luJ6V2Џ ]݁ koxTzHӪhyݢkLh%m^l}a+guQ3fo>j~ƥD`O=['7mQgr3=0V[+@S@SvMZD<?O;$!+S" `sS&9@=3Sr<̗CIi^H]={t@&M#>iD?ލ2$/-1T^CmXD@j vx3 >td6^Hgtuꐀ{Jry][?<۴ҖEhۖ8}ޚ՝t"LԹՉG|wvkMȨw `M)]}o< QG6$k" |01A:%?g ?(|fN9w6зRp0#{o^ﶌ6[ڊٮC$֖#G1bŧޚU;J"  0kч}\6:7D:ЁiԁYfKim3I$!gXgB:$~ʨ?wʜ@~6yOn݆ǜRpXyDi ?t?ܷLM%p9(#"PvS{'`0~ }i7'P&%tp0D Z]|E@D V<$[ acOɠ2'w3=/@"}W}a>Qz gm?]vR)" %0ӹ{o|un?MI5gQs(?f L#o%7߷ewg/$6W\[JuD|h# *hu_Hu`=|+g"EK)wT.!wtq? aý2uEJ"PSPSC{^lw)m c`۠7 }f˧:JD:?":.w߷de.w:#rp{]wjk[opv`̀B7lQY=sT X؛:=3 <|,}C镜dp?PK LP !1K/ګ-٪p՞Im?RT v:FCg> iX#`hΩf$MK &xA|l!n0dneT0ߖ׷y1_x30! p̂Fn_e c^k[RXT" &0kz|5Ā?}i~6:6K}x=Y@hhgTB>mC?kCϼ3?m~AYǷ/[9t/pz-'(Yj 8X|dd=?ȷJe,ޫfk?S\;6؏ұ ?$mo| )1-#$ G@ wJ5!^nm@AH<}'%XrdmHuдAW~+Nظ5ؿ~ꯚ'C}@@G{[rEv#Nw!`>k&a),/) B@ r"5  ? )M!>)i/T2glv_fpYW444"c}.orsc%C2্yn0gp SwLBɗ#VH:' :?@. k<2Y@;k`_+0!#>0xG%':JUb&"P%sfOf쾅A~1~F/q[yH&g5ٌSa3okl:rJ$ I;&韝x}CEJ+F-#]HVڿbsdXw͢S~$l-"}裮i?O2nyCq Oݗ(`W$ yZ5)@'Qa 1gJn}nX㡍c~eWK׏RHotC*uYj0 zR]KkME@E`7ollou)9u}2%E hϚ,"03x'T'$SQYF 9V^gW!IR% ǯKhL8= ^~|j/G?'3=l'Bo hӬI@sx 0 3triWyVz([`~^XXm??/%O]2@ݿd62w͞S&uuaGn8?}??$Ae U=?uJ@uz4lq@awdlu2 vYh7nݺ}mj@#I"܍J{X|?S$~OIDtvO>概>e(ٸZ654;[)MC@Ms5QhN1Ok&ȇt| MqҶA?f3c|u  IKV ,?_8)LB%!=~73 Ϸ /@je:# :;a@@ t[ M][lQmpQ~Y-wuO;PIzFz﹦g솿mT*"PofLm J3g?mFeyJ"00R#P&d4F>6_>QK߼.``pC=^Dڈ++buŻ\ˮ+ۓZr3 uloux2ܓt,>a>Dȳ z@vD͏Wמz~Wtxxx9XI,HRj'jfB'nL%ONՑmxs\ mo7=K$bЁ{=\6'2ؙ I6m4lE `) Bl;ʗ!:(Do8Iop:&ӂL(v,J $`Dν*rOn6u~~2kcp!r_OPVPgGc<4l!mxt&|Y?Qcm/1:pү-~8+M,Fˑ(DVjs{Ўՙ@UÛwdɮsЏWq/laW' ?@2? `At(඾~{6ퟺoueԹ0|v}ǡMicѦIrfTM3iMT@kk/͟ z6اΠ6: (CmPE@*CE@TYDWAW;V[BƖ8\[`+K,T} `QXx%7:N@%&!;w##^a0_d]Ȩ^'{YDbW%Pf@}=Љ?Xg-뿺_;+ЏL"ކŬR =w~$EE"  H{g]wnD P2#,mD@M/u\'f=χ>-f'f?~_YNۈjÕ| muձsRѕ;jYD |ûՍ Q~l#$@8ƦJ&N@+"8>\} +Vqgw``Ř>TT&Xb }3O $"vf:ݣRn Cf!֣NɠmQg)$uA@`Kq#rkxWrpZ P뢫 r_❴,]]]}f݈3Xdu)m_e6};̓_<z!hhQۢl*{7JTDl7m3cZOT(``K_!y;l+@ME$" "P#QRgGoLl~#ݗދƔOQQx 7I&Z'(" "6}h;ڠ:zJءX/$2p1(#5F@`1>rǞCQg1_"s3G>_x_^zZR,l/ϗrIY# XVUy%GNN%? Թ@My 7Ht?:>'A"CŮ$" "P x q:+g7c[n=u@ )zNܚ:9@§ߗcS !a@?! &{$mS$ <-~B ?xօK-lTt.v BWןpR*83++" cm} >!qvؔD.𨋉i" "Lb #vn M'qٓO߰a ,6 Mڦ+[xִW]xWID@b L3}wn?9 =*;ri'}zXQ{\^'9Ů$" "И[ )#Onwo6 1Mܬ:p)|u'ОH%3Ԁ@{w&vfeFC;l:%Bgo GRN@<`g6DOf/;~(W*@k6B#E(t7 ^6x[wvx }|mveJ"Pp+4.6<}_2菒̀u/9q0(1_;ʋ8wd%q4" MH`Mfo榎v?g)zK=J7'c98!@D<J`nا2 6 Y<`+Y@BsQjL%Yr"ϻeV.]D@hMZ?7s~6=Qz&m:=8e(W$ <-N b 2gK?JNehkmb)S2~SN\7=ޱI`ۍK$ <-@`?u 9OK=[mRcDveYn}*>c+։hh;lnnDh>m!g ^A"\J" " A16GC7J~ܣ\$RrZMBv62J%SW,"fNVo 0X}*.o˨  lFJGC:m"PuD@D@@)6O=Jë׮{'ok 8?߇ ָ:VtqP |*69l-&٧-.5I@(b a7y!0k?ڠ*>Ύ":B3T%,_lm1Xԃ/|E@޾[ܟ(|:6>ʋ@Uh*թLM C s|16&Y6U\,϶]Qk춓%۬ } 졔{BxdmT`zhr6?g5՟R=@.6lXY RD@E{mk+{qeh=cļg}@@͜ DD@*G )sm?g6= Xdlʅ*X=/ݼ-9i+:AD@ $wu {adPo%}iCS'1Z)d} ' ]ID@D ?h6J:[ } n!2J:e +_̻(V$MO`6A}?3y_G|6D9{o֮EZbWhEn'! Xק !@~_PWűiL" K`3hZ8P6?Џ˳/]`eHVDEWh"$3ط2) HNxsI=q1J;v*+qlZr[(" $toy_%,yu-J$E V AM%\Cx R?lO(_jP[-vrbPF'"@! [}Ϸ5lL6HPcj$" "d"`yGsXd=]{53UDCEdD|~5Sοk.wl4Ԁl/yP yRZ;}Bgb9\J" " MHMP@ύG㸧G =E tEXE)m:e'ƲqɥrKOUݝ;nYs=1$&(-OM`S(/F@(r 'wؐ| ```hYc,yS _\z'RuQ$ηmp;B<} Qdd>S0Z6Fd5hէMA>-~N<)_Em4dV,+ )XfO$(E@DD;lg5{߳:~+Q<-vs#@ E$" " -MPoIegLoЧ',I]q\Lk~𷺭e>]vwCj@D@bӹu83KS}؎JQU%DhΉF$" Dn`\9Ah>c_z"Ӥθ ƅ%n/g?UY" "PFsfNuB$7|YH]; 3Y6ȬP[(]*I@E@D__l){>#x4FҸ-66oAL0Y3N1]f1 yJ}r_6YH3?b_mT z#=`~yW0d hoo{V.t/YD@DBzz'uwuLWdO:)m;y$ʑ<"P5D@D@DTc6҉KmE</"ChuJߟvJ[[n}9hw;Ytjp7v[ 8޿l>SБLYt00HD@@:ݲ9fZYڒɜqC.,Cz)oPbl9m{SY" "P)orc=Bn2>ۇDʹ7"QT(?GW" " D {0͑Hn9Wz.{C!v(,6qe)N| 5(ffO؆R$ գ}cYV(LmLdu!" M@| y§M(.@C\xR歎r?ec[-,9e;WkAT9ӱ`k<Зm+@M@M FD@~Y3 6Ooηy?dnBh&"PsfNGpqP/D3W}0&?oˤ#W" "zSި b֗:%FǍ߄}?D@&齡'x/D_m1ؼt00HD@@hJ;<ճfd7lJ>|~/:v#-k"" U&0cd~ eQ/lVgy,7 E(( E@D@H |ӦD=rh%utk}>|3FNID@D`" ttwl4{Z3{>.X?sA/#ѿ"T6*WM@ [gFBj6jS}L(mL(nu&" A}?c!@GG[G[[ɖg vY"e{GC**Ձ'oAk:ڳy}I?c6cLJ]l{5#" "hIo)E_˛͊z q̅%%˙%ۢ_H<o'I# {n;ng6ֱ-jh@" "PNY36d\t2OI?_r%C}d^|DQejGxF8iˠ#ψ@ h€ռ4܏ok9}}YVm[~V+ic3 " "PK:{s>Yf%t%*mT:#0<_(ΚQߴ]eE-ڬo[Sf@ loO|묎QimԭAGK_Z " nD@D d> ЯtNmRe}XFVg9e\Y-6gIUgڟ |SV@d@Y0,aJE:%ge<[~mɬ6$E@D` ̜:O>f8gЏHD2@@Ү.m._f%|hədRRE@ji`io}IyF֏:%}! 94d*o/TN qYLkKQ_fAdR. )@L<_2 aS$ <-/tKbjz#4E'$R(?R26~mNXr/6wnN2ޞ'xĐ~|To,db[=gI00HD@@eZ̴λ/$ѿoc֗vlg|Ot&͏{1~:`=$elRIJ@UůE@Do%֎6GF|#o{ֽ:AD@jNnFuu}lYdu<$E"B"Q$v)4DR!.63@vQytǼ1'R^tZ'Y^fkaӗ>f}[.U# W" "Р@gv1h_@ڼխS_C[N_~}W0" "Pܗ1{գ:c=|\yT?@hjձ4&X>PhT>e-YVˬ?|3N˯2aRיӢҶ6궮oeY]duVr@H +aƴ)]RlP=e&Hș oC;mN:[S^*FWFzW" " G@_X99;](BgV6VRW'S>oP⩝1,CKC*ng$ZiGKE@(7: Z X2HlK@Ush,/aƚpg3{Ӥ]lsV}2yX[;e%"LF [T}y_MI)ӡ@}hzxp)SpeQ2Ev=_197}]~Q—:%SϳLmLfu"" Amfs̴21u}vbV򴅤oc%Ljd$E@ꆀ ǹ яڨ[ GT=k.U'  @D@@{m~fg2{rwWf |hD!yI.רf3/)" B_Nӿg,/K@Ush,[lMl`fˍCŧE*YF=ּysARD@ꉀ{D_ƶ>Cd # CD@D $R-} S;Qʼm6tw"ʈGUCZeq!l"P2mNE@D@|lY [eԹDꔡv~z@]H@%t6ꔨou=EhN$" uL _ǣh7[ݖ2+i uhKׇl?ۄ" "PO/:{֧P[T[_T6]@8k_×mx3͞95#b!lGM>~y!-}@$mDQ@u9 ]?æο59} |=d9%}!r|&uu;slƗlַ#omO럯=DRtRyuO) H:[ (,#ڴJ" uI`C`=1cfuOg=|*@Y0V pL24U>@u[:-6[~/qn!ayĕ*T6 XNK@{5s(/t(ټw).umAZ>!'v{AID@c6OZBc:P "`UD@D @eRA`mpKÕUJ(OI;L $"P֮[?`s3v~S59M[WI@EQh> |̌| r!()1PVGR@}pG1'2_ﯼT6*V @f't7x#lD%.$ni1CT2 D@D^ ^[ 3-9CRLn{)T'L`9GÙ QpclQd{v=QE@D^^t7T.T6*X@H&)N3eVס],ZcAvb=ZJ" uM`'K2z"C@98([hnUt׷ne-’frZ-] I%jroʂB'J@[@cp_nf`P&=fe|>R橨9p+VUm`E(|E@D@c7uc d7o>n0mZuHTwf ,EA(k4hX)&hD OXK&Q,DDD**"M:lgvù|>wr={/y΢=sֲM;6 Y%6,uh"mzg2 sѻVFfYb lq@(Z--K@^AM+ h+o(.ʛ HK1<}<}ZM{H55 EM^6 /t^w^A?ྂ XJ@W@WU" "Pƫbǔ2 bY;l5Ic{u-s)vzhTD@` .ˉ6jk" "eX,w AЫGmIgPu,v_*+1)+" L`+ڙGxYaU-' _@D@7m&/«WU2)iy3:ŕ@g\(cX~?m͇pg>rg.h9d jkeE@D ̝@:1!Bg>eE h .&!" A_CV.YnـUUǢ vu!z=*" %A`@WHc2%~^x-[D@:C` L:73555|q5itWЁ f}KKf!t.c m̥DD@D [*bLKoZm|HVSڜc>QUD@DT,X@hiXG:s^&};ޞ+_:D`W:[E@D@ʂ?ކ=Nr ^,]`^ Њ~)"PO*" "PJ>]hOXꙃ>1˱MPr"3j $" Omy_=r_@:яqS78Zjh!0wޢt`M'm}Za}Ps"S)n &" K`slfDg[m[ۭ?M,CMnN'? @D@&0{|~`]v9Ӯ=ڛh}D hk(nun%Ž}5.9kUPG /z~ݻmR$4{9lG'y f PYOl/A@q4 (| }.Gv[b70sG/~"@_f B݊xm:yQ1a3/+9! ` " "P<{~n;*Jc555;n6l._7ގABT.YIcLi'Jm䃺"!ǫ.))4wqmqA[Zd|ؗx_@)ŋa]·1s>E /TD@goH}l.ُ87mз hd6ҍrưG0ϘYVWJ*!Y" ,T7" "Pb3Jqm|_wn=Vǰ1pɹ_?^ڬ֫5@:SޙޯPX[Nq?!9"U[YT.hhV)U ۯ9s-;Ѳk*co=xݪ{檵*H%{fbݫ^+❾q"|kuV#gjt mt Vu*" M{Z) hYV*" _hnn<Yg=-z9-ۅ,ۆЯd: гۏJ/h" " Lyktu}`$gX^m+_N@YGE@D n&^Y^f/" "P.fϝ/\Wh>-Vm|9m'N5Y4CGRQ>QQ~Buȩ@@\ MDD@O{ د?@D@D@2#0@ }ѧg{ֲ-ϳ6<ӵm@N h 5>ڞf٧g@EEcSsӴg/[LۭTt䆀6rY@}[7s ~Xxɒ&s8'*fG{cvַWJ@h K Ս3̿[յTZ4w"0E}@( ׅLsQyKT[EhЪc(.SF {}tbqZ(w.@bcGBab?lgBc>Ei@D@!Cb%Z@xo>`qPm=][1XN@y>ʁfg~gE@D@Dޜ(aQh}?Q=?TǮx_:U@/& " %нzFO;p/y*żm[3gчe;ȩ@ h 5̖X˜f!" " #sGa8b+鳝.1Ephpf"" 9#?nM/!gj 2g&]j칬#m+,@@(6=*~[xi/ml}F:Z;xnmXǾmL_V6";~^YX3lD@D@D s /[:oz PYgoYo- mv)I"K%m%" y&?ۿŮT4tγ0gVl[6yYd<˶"7z ," '0!n^]#@Lè/ â0>sX<[!gKT޶/Y' #U" "Pm',jV" " "9'c?`OlO\'WM1g_\"7z ," #?uПU*Q5t Ʀ1Zw+}1oOcriC9maUD` h`/&&" !?١*NED@D@K`74Y@bct[yZh moq~FE0#LjDu!" Lͧn^Uͱ穹@ޛ?6 |Ȣm(I-oL46ˊ@@#" "?㟸GMMa77 ," " %0/@Q~#0"I@yY4)<᧝VYs㿸ߣ('B8G}Xgn]v" "Fnݻ[{*b-_(WߞS|[K\6oCmC߾o %@ 9" 0" ")<▝++kt/\R$Ӣ>B L|n ض3jLۆWNN@uPD@֝ԑ\OSgƦ/O q+}9!U)1ۇb['_rF@j $" (|k*Z5" " "P>f͙u fް¼mg}~s#gmaQ|ۚ-2"y% " H“V[G:&JLOqMaMf6mC/& 0>rUN6(RۭǀKn~SD@D@E鑧_0Gs>>Q"a[ӇU% 4@|z=2SD@D@ +?GVziQg}1X|ˊ@A@A\MBDX POx]RrpEB!SgXQͩQ#v6nϧз>cñ66W/L0ҏ<@kh%@?7J,i" " "PJcfyv">-:{^G| (1"7z ,"P(G RύWƆulPkќE@D@D ̚9/`nP)~.*}ۘ>4mD@ ?3;ٹ;4(cޘY -V36vOo?slâЦ C 6 jh." IҳlQU]s r>, _`S t?s> mkylI_OL$ |" M\cM{_&ܽ'ɉ@xޢEߝ%@TwϺP͇bPNE 6 hB" &@yL}~7bS||K㋀@x};5DtTa=E>y{y@Q@Q\&MRD W(|jnr5;'ccq)۳q}~{m6"P0P0B' ğU?ϋE@D@ʊϖ-3~)Qe6Z+ sCmk1|[&Q$!o" y%@oN4!_n;˦7NcbZ_u<=q4q_B9b m(~p54r'pc/P< ck}1+c>T P<""EE@Eu4YlH YlT" " "it[3zQhâmC }<׏[g::Y(H(ˢItaήo=~^_Ʃԯ@fF2# lH3cNmaL~%ۆE SD g]]M`OBE_𾫠Pmy[K1OO6~]<>Ø> mͥDE@օ+^۫owץ/#" " "}Ï/zЦe5㐵B߯u&cPl$ ,@6POyUحW}d3g_c~O뷷ml|1}XoS 6 bh*" !@?vq{ Ջ@6 կY3f>CY!!gxfA)ECA@\MAD { ;2^q{<^ID@D@D OyoUuMO+S|cH~b_3S>YgsW$ lOqݫ* mǞ{mFČȦOgo-el}#ocL-~=6|;mh" %7lS/eۭ}|%dO'nfZL( w"~sR>CA@\MCD`@Oz/Wܻ֭%" " "kns34wJ~֧ȧE!zkڃ9X(:SXEw4a=^Fy{UE" " "P\xu+[AϘml}?i}E%TZ"P 8@gص[M?--ۇ[)+" Kyڻ?|msf]ٟ,oܕY5xPnc7`/+hf"[I3>sZA:),D>lǶQ ,A_'B# B"@ZSv^,^qk=mcU@XxْgGއF;-_Y8w"c;&m5xOaȏrAB'0mO;皧7#E1} uZ \1} _B9b(@\ MDD'0?o\[._XD@ >5$4|+^B!4%V8蔋܌R[kߝ߹;gοmϾlDp[@~&}0f%eMPoŸcc.lCy( (ˤI@>/ CU<[uYV*"Pj{dkck+\4{}_1"P?Ϧw{пo֏\T=fȱX?2ڱ䝀>.K ]-whc|ߗb$p񲥯O|2PЧ Z3o߫GO?X!*Q#0Y: y?v3=PI;}د/%B` COWNpˑ/ke@90nETL6pÓ|v0"PHܯD};@>`qsc"EI@Ey4i(-SGXnUʴr%0ᒕ[@ۃB%S4{_{fc+%y%O<6b܈I{ -<|{mgb.9 }G W hN"P.oɭx9dSD<ԭiG k;(fƏowAz۶{GM@y5/O܌6)q9Ϩ癃@<9,%OE }./Kًׯ7|6AQ6?Y/',](05D gg}4sxGڳ9mu,EL~=c#Z#" m@IDAT*^cAl'<(Tw>yenv<++9%g~<q#?YG?|zZ?}X[PB" p*k" HୗmSU=3)k" " -cQ$XK4Ky+CӇm|W;/;_EJߏ7勉qg(PPb}dEh.&,K`a;4VUB3 l:pJ+r 6|Ac)p&Ke+V-`.j"]B-~FtNj l|6ȱ1e[>}B~+#I@y4k(:okۊxlx&," $ɀ _8#*@wݴ"?/]>Os\@<3jAZ1s"Gۘmy sOHqA@A_MNJClV34VU@z}{tO*Yk#V[x##rUD 444g jy i?gcyưslc٩":ƭY"% "p 92^b)" %{>zX!ΧA?F@φ {`"9#.Z|֟= Oc _p%meӤE8xA/n^3,E@D {⎛N,!!&@B`KW>GFrBއ_ӍhC[пPΞzlG߶ |I)-$@@_ MOg٠1/+֋ytNnY<|+6|+.v#Gm }he+gt9䥱Sw4s~l8_C߷-UDd$J%"-DD*5&]!LFs|٣{_} ),l5V5ώ\2zd}g(&PX8j9ĨW% t.i{<S3D@D 7N=#Fb#ʷ"NJ }>hW bNJ@V,Xl{qV3GP:7X>lbB#B$ B*1F;}Ր"^." Y#lA^(!\Go7w^>07|24^ Q2ce9o[r"PPM$0yݛcxa[ iV" 9&P]UUS ,!Co-?raO毸WZ=ˊ@6 63tϣyu!\[ 4+O@/ i"PƏiFE1aMRD@rDʊ Gŷ2v~Rz446666*"uϽ8~ꪺzܓuY{ϧC?rȱX(mtEP t_Vq4/Aow7G:BB9k)brU|1ѸIKim`[=:bهߧ<}9UD h/ '0}_h" "Ps!`LK=VSDk8)qg0iiVJoL{{ނELlj-h"%C@%s)檫7," MK{lzWE2J O!6!Cߞ5#-"!3)m'Q4}~F>-нe6GyR>CA@]0MW /ݹK,VqfKB"Э[M~q%29?!]~<ռDTD&M}*(6ON˜yn(珇[3)%J@%za,榖XO㈀@8}v6s@>Ei"LPOqC93 ;_Nl;϶;?8{9/([ԟMFRT|P|L3!0;t_wTLH&0xP?pS@5 s6(MO?߬_0OF p>aT>>s{uQgUD` ȨIm'EL߷oO|dyڂzH?E h//"јk+b-k|+" H`z6_/!AL?xjKDR ,X$xϦwQuֆ `X,2( @Xh72CD@@<{q{3WPI'`XGd(4G+WՏ Gg75P3ރhC?dS?ߞg{?4bZ39"PrPrT 'f$"" E`wڹҬ*ίa/|(`dnYȔu׳ﶵǽB}=v[o}[blvQB[w[g@_C@rN`+jܠ'|` (" %@';~vґMte E sF'6\dLc'7h'({u?igβXcl}\uW$ Zt-ߍЧkGQ" "P8h=uQ̄,PRD=?f2Pu"` m9g(?݇[l%@ɦ@h\0;~a֯W,EM )i" ]O mh(]uַ޾ms"PPQhED@J~}t~-+g=-?궿<w| cZ~Z0JO~n͚ql,+>Οu?pN\+m/HlXGnD"' "@n@? @.k,%_v&j)Vb +WJY׬w>__!{Z{Om'm1mق:H?Eh.#" " "P<*?ѻxf"ŵ9: nc/Nwd"ձS|+}!a6~?f?1yڂ6(~@ CBED@%{.15lg'5z {u>(Q;.lljnJYŋ~1$|?A3xc5M<,TD@DCV7iCXD@q_<>EΉʥRESctTW>446\zϯXUA ʄ/1(iQrLsÀsp~Zhz"PRPhni{O " " Ax} v_>9۾9 ,?f@fR[~`L9wSp((1xH@ ,\>wRcO " " A2@YN1}+c{cm*+^+DYxs/P'!!Y)AS[˼o'2Q VA[H?E ࡠ"" &piw;uD " "$w^}?\+DYXnyg u|ֱs@o]UJA= mk"PPH`u.9YmE@D@ V[ZsN6Þg^Rr ҫy,7ű)r<|Qn㐸G=>mLl1.so]X%<'o}] L~&lQ|[Ԭ?؊<~:U|&/0+Xf™~:KQLD>E?-y~93Guɢ'Q)a(ያ@.kWXUK_ CD x~۸ER}LDƞo}DBZ[>M8Q1WfV>E0GYO?}v>$@@]r-XO`.]E@Df 6?R@;Tʀ9CO5KPfO񜉵ž"g~;a3 DZF%F@rD _<Ƿ656ݛ5 67ЭVc JlUZ*ПJ.r瘈eROR>#((k@ o?O[Sp_' @8K;mO%m}=σb6H%˗uM,_YgӴQӉ}z3s 1 r(~@@]l-UrA`{sW>i (EoY?.w tk ~g\~E,ނ*%HnMϮ9,Xmg#nqHȧۗ1% %t?IȖ ^.WZϖ_CZED@D׳G[ss+ޭzXlcİ-@2uk\rO432cQla7(۳XlR[Nk_q䎈F p |B1Cc ϱ`97[% ({?<,TD@D3=@u^u׌/K wf`+" J=Nqo-tQ&p{=+ʶ*z:-)9B"9(q1uåyƲ" m[AD MWc'\S.14' r&pn(`o/C\u]BA D'?6ߴాpcLEǎ [7l B(OϽ4rKԩA"" !Б}8o\vӣƟL@mE@D [X6}kzΏQ~=jwRVխِa迶#" "PM/l5?跖~>uݪw*gżv^>W^Ȭ^LKq|V (7X٨u2/+eO&]%)!❢"1څ|c?V S _s$DjY( x@g6mͿ_wo9qw665̤t+Io͜6wl:Asmm!/c>ʮ\bU|.|^S[x_39lØ%bCm m&>'t.Oxߏ~q3g{^NED@ʆcv8Q'oǽlR~Xdqnjk-HѷcZR\PW_k2Ng~rut#gr꥿WX/9w?;wŪ0T Y"~]ڥg5Gp&0|㜫:3S+|[?q!GK>QGNk/ц9zs>ϟ;mO: ""D@@;xcƴ|/Oxͭ~iM@}l)+" %A7gn[HH\7~<-ac]2![Fo݈+ Yg1|,[go<"?d\b9o]U ,~ 2mDk aK<,>io_o\z͚v_5经;>XD@/mo:ҡÞsm['{h1瞁x|䘧O簵1F ~?tp:BF$+,䉀t}HgF.opڌ#k S&0kΧpy;<[~mm[y$麡>g7 +z9io-|աba= /Pk~Y+c1y=-7x~FVD@ 1ߪP9ݶao|-Wwmm͗~Ο/n~g//R#$YOqβm97Ώs-rqx@*mP$" C?Eg=?bֲs |g=Wl]k{J8!"$ %E@:AЋQwgd{/XB}g g^鎛N?<7xy o0k̹S˸ң{D'g/~72b<{Qo6}_žyƴ̳ƨ9 ȵfbsű !mEגE  nm0h-:χQ]^*sJnMuY>cX9k)oE;}ko99ru|>b ĴQ%5Xk>c|Q8PGGYG=a@_p}@[d>k 8+|aeW?ִ|.>拻ls%V?-\p19OA`W`<{1)\e[kcLX-gAߝu)EP  H4HGZoc{Azx\}FʼnsS>vot7\st"" &pϿyMG XgmE 6u/ˆ%ة/WWC $Kз~η"Z_gl183u· kuFH 䈀%OţJt 4>lԁwQ)zڋَ6yݾqAmz>UD@D 'f}}(^o| -Z+wο5<+~?pJ 4466S/3i@?Bbڿwc}Kb=eޗxvN;bkz\Up'Ȉ62¤F" Q2ho#Hg;s}^Ru'~ʱaGWy_uףό7уbŏEV4Yae?rV[?0ŵ*y"`%]s&LR6tbڿo)쑧Oʡykۃ@>Zɥ6)E)8@."5HG V1|{qFM:eϓ>V٧{3mGMVX@Їύ?}a5U\'oN{:zN,VY/W{{ȷeP./r};y(ȧ D ##ЁM|@[ @[ !ioϲ?.+~ީ{}YwPUS bZκb9 (X(X0ZmuWnw/*y }n75c#Ƹ71, {{_޿&'ǴswS^rɢw(@ ht@@`(Oc+C[?)]pg v6m;ߏws;sգ{_L^ED@:J~% DK-+ `b{rk<vTrLs?CGxAח~"?a-G{g-X vL;fv(K))8@ ht@,mP?()ioOz{=Y[6N;ȃ:gmX(/" !?+|Ķ: )ZHo.0?uϕcTy9 >by}cAن1Zkyr\o]DYI;m;;)" >b+έobD8'Ez[=c+w4f?7ӳ/AaݻrZZ[Ͼ_7}Q[%E)؂EA~=zwmUrD'_w^g;?oc@L+)C>s'd9bghM:)QBO@֙6NHG"?BxZ)虃e.dm>36Sqrh?Hx~q_H&>9!QDD1 8? ^jg/볧Sn;o//z emȷN1ֿGxXk}l>x>bͼ.b9֨ ")>,"@'757 |ԵwX1 x[Qߠw?֑Uq}9.9> "Z8 h)X wm]J3a Є}3o-#tOޠb>,amYId6R=DXMFe!)imbqGyk|zI{m3O8yЀ v}_ysO/UlkBmw7ӻ[/=g&6"N>;j˯Ǹ;8٘y^{`} pļ7B/~;ż'4vwSHamR "5Ju$"@6 1D6QӲ6갢mYr7Qw'|R=a*" O`+o`Bȱ֊#CL !FK1FbPogSb /[zMZ@ ַźo:[>=h-ci\B?A?DKhKSM4>sϜ܌Sbv14~Svvp_E=X$bɵ7m+b0B  -037 ly;ӟY`WB~]r -C9o돜ް>>rW觳q97Z \n)F6)E?:md:HG"6Ϙ~`G=Ll&m}nIC.1{Ax""P:pOoĬBZ+ STRKٹϟ.?g={tק,':p6Yk?> X@.3o\v9Ƿsk}7\ (hR$Sp(.! .NE@&gqBp3O1|(Ǻ ~ھߥzҶ[nk""P"1wӿ'PX$*(( xKس}6q'zfUuu|nǓ47$ %,E:^w1 C L--|˹r !5[' gD@2(!KzưڡmH^i'99k Q(^oN{w" $![ !d _7z͏:`v2. /{i̔Cni^O q舺qGD?~}ǹp|v5ٜk(z$dE hk42 tu6(QO?I+C.'WkjzA*%`N:G,]/c B֊$_T+Emf\uw hn+O/Emq8ӷ>,s!KmEzȷbo9~h>v޾NIY3Qh}ҩ" 9! ` " d .`tV𣝍}=( Yk}瘏>W{nզ{a1*" A~MC/ S_bfLac-|lO9d xwj׬gƐE+Wխ_O1#3Ƶo{OЧO'9[Ӓko#CrK@D@"Q>?ٞyk!C1<Ў>-Ż<}X㜨cv7:Wڍ9*" Nۇ?L>cW֧آK3 ÃO4"[-o&"40NgQ h6NzhKA-G}Ik7ӫ~cڪ GxwFc-|{Pp!G!Fik?ul0K>p޼o1wŌue™=d)M ok}T:{Ǯ\.ӇeA۔"C䔀6r[dB &Ng >sQu!KRУ x_?+{mZwﻥWED7kܯu MMVзWR|~.;tIВKf*W^#uo[hc>bh6tݑõ}VgXQA׆ydI$(@^h /5@&lPسӇ>2B>3o6:+/>Zׯ@ ,[ri9/3SVL+){OYrWխ{1oiWADpӷ^kkk8yݝ8$m3cX C%?IȊ@h 5@ll (>a<}Xg.d}16yGswf>W_Ӷ ]?*" y"|w>:(`>sBܨv[ 'd5O|x%@6S?~NSj{UD@I3;74#A(X[Y|al|OqB{mT:O߹n؃ޝ #;1tu{m.{1:hGQ?*oϥ>xi9_kCkuKrD= '*I @G maL:xg{3Og;!ksi=vj.Y@*AV s~޵OCyM[w.cI֣t ~A{P u&_v߾1Mzb_N9ζdLܦ|G&XO2="lRH[p`@`0T@+%14DRJm[F{Y$c¯h[~q>7t |Hj t˭[ˮЗ%e T#њX ׾ywy`G;G_L䭗%OƓ/'\\㵪hfӶen@B`x 0熙A+ 0'Tnn7b2nVL[fyR(@IDAT{ᝎ8[uDo9_z}Tb=MbR53ޯ}3};t7Dwl?|}¥|gSQ˥'֊nd\u'VjS ־(5ʸN*Y2e7 ɿI !0 oD)ɽc%Km׽_ߝY!JA`d =_qm$I.Q:yR3GL< {U>ٳNmWρ|9ݾ(l簈ܡɟ&eqcَRט"l񺕲Tu ! }л*~$@ԝ[:m=J'/'+:E;޼צH`x;Ͻcg^;I|o8;?뗎}&^~8 }g]Qܛωl9s8ۖNmkYOo1InGx+~1TOؖTQK#o@@5P,!U m9}J]o]2s&K}!oo{sݏ|xQO:xO䋒0'@9)_8^wO{A?_vN9QuEۺTOjۖJS>'9;ֵշG-5,Ben>D`2B%P'e2&q|sd9gdL^1v^Qw?}QJ5z\WN/ru[   T9ULX+C] Rzpr:N7yă{=6_C|KobpIUb}Y25=55<_yXq< ګ?o|_?twz> +fnύlwJjm'hMlܘyQk_>XgQT#M(&6yޘ5 `#@I}F{'7_M-蟼}-| z  P?5Il1jr!s6Ly붭_ڷy}f->dIowغ.T _pw޽o}{8K>yQ?%특+&/;K_&@޺-_L|>ζe귝'sFѤQ~ToT$66}= \ܡ~QJO'[/e%{ /xv#8 F7|q_y \RDJa >1>dtj8]<|}'~?ٺVyP((g;TCIh)w}\']sa;]mIXX#M(6qY t#ɿn;Om']*<ۛ5$NWX?[ݶϟn p$( Չ⃏oht˶[څθK._Z5yTo$Zu֝Ll'莉tQڟקXvnzUh-=  Ԃ8,賞EmK%n@^ <?1> @o߱^z'7Dz  o{s~a3jA`oַ/S};G߼vZ1sR距J%Y2Me;yw].Td8t>lKض'YH(P P;l @` b#7u'NUo=}1/&dY=?91Ϳ ԙss/z{>}ʙ_:ɓ\֣l$>ndU}r9_~?[^'>;T bL:W>ݮ}sVu,B%Uu I zWV6!J%9їm=Jo>e#_=6op/ןϽN<o_^~>I2͓ݗ>|W˯Vߧ(Ev%]mdj:19NUouez+ף5.GwM z`A} F@NݡD^QJ/;d_qs[_{ީ@VSw|qvS#tӟȇ'F,>ڿ\qzw?~YWܼv%;flEk6g$z~:ɘ[2eɻ|n;'}b;v2[T% O cV@6|oS7Qu8ُ}Yy~9p !B`>|9NrRs''OOMR{n~pEe~\[,:I.Dی]o;T,oi_.p"zN1YOe۲hd`='SQew!Z`֧A Ǎ47ᲭG)=JXl;ٗm='п IGTr󒷝,"&Eja˾{O-OҶ6ͷlካ茳ϻnZvĿ7[JM1̞S%,N];yjsLz=ϭɾTx*u}$"hoV @7|S-xm;JߴGD_v?;'N~K{#(Q*Jo}O|Lr|-C&'ܨ|ٵkk~|܋/~kn/^Xs{¶uvsz*c:Q#Mm*տۤznl]uֽVlGY7XITvY]eHp@`d02B"VMvyOe[o?a +|;XzLZ{o{too}Λzp ~pƗyݮy ҿsd[Th-֣4_Gi֮eI|%$=c2ٮұX-(B1%U%5$  6F`@`Pa#7Q&>'zN66(2w/á;(g7޴uW+&G%S=9߼Ɖ_||퐻Cp? r?j vvnz7ݲkW\/?߾?Y imq֣CbhU9ήw]o;'}ng;J'E;gve,W.;= Wg='SQeIlig p# T7ݮR7zIƛ~&^{1{;_CA2d,@rs_+Nowɑu'\$.Ή`Syđxmز^{ocMs ( 3{L95=aa~z;v-ؽ{n;w۱k]w(+v;n[/u~Kڹ߾innN\wmIV߶}`v6Ow#۝ y}?ю2=Ƨc؎eQM]>TF_n& <6F 0(=lnRzC7}cb`oj}w;@2n۾s?|%fJ.뒩.[I\*I:1RzΎ6#Υ誥z|.縟9z'=)ՕTu-g:f#zNsu.r|sN讅_ΖOE}-+$ː``/@$&oG$A>'&N]g[2nLz>?x_{UQ 0Tw_N߫9Iɘ9y/&wQWRк[19U~i#}~W㡉V\?눶tjilKFuvSsdG:Mc|S(;f(k& @ ` @ 0(kqߨkuso]vzӟ& Ngݲew;r/ߴi w(b)ig;>dBJTr2MȔ.N%uc_鸞[ѴQ~Nyv?L.Φ}[M\_j:b_N77ǖ[\ٖ\r/&˧6v?"BRlD! 7te7woˤ~[S}{ ͚M_g/'󈉒/XR l'ztv6S,K[~ʧ(gԋ.+K(.-ZJNFLTo='O-igvq<9}QYOEd  N@ ` @ ^z7֣}:1aHu'[ gm='1jSO}f!o@: ֙oJ6tq*7!ۭPS|z(F>%il|;uc^Ro# Onu_t:$NN}Nd?q_Q}[Xm[F,źeݔ$M(@ @ 0H%B1o='Q`TWot+5+;kE=!P$Ac;#}~y2(S=MD2&ir\z~xsTjS_sj\K+sNzMe2]휌T;4nΗ(S]vC,]o='/e7   lM  @ݸb;'O%d!N0𧲱e损~޺מ{#N "ϜIW¥N$Z42=m5|>z^lTz(Ǣ9X2nފz؟cJscenwdIWQl!b @ ` @`ДtC}ïT _N $ OL=Q>ڻ''',Q 7\x~d'O9L2&Q:/K߱SO%N(T'=&Nwo?O_W'ƭ?ǿǯm)v ,Y%NR96NJ9K?{n+UR?,kzd~n_.kY蓮K )* ``h@`= i7϶ Q7G̀~+w/ ֔n5t(e[L: |;ɗl}7T?-~u5>ܿuɲ9*&q-t?FƘvz:svŹZW{QF>*꧴X6V֓@WI' pĉ77g ~߱iӆ{jp ւ_O~߸6)N>I*iOy'QF=&y,96e=?I?WtPKse.;s}d.y9}]wgֈkn 6>CӷD\'Q66?NMNn;/Nq2e;R;tR/v|2).Ͻ(s3D_'S3}uI3֣ߺelgd{GmEh˚m=vzZ'YH(P >`pt ?TɿuےND'0W+8~.P *_|l҉5\L(`Z:i-݉uےp}u/I,\e|IO(5lE1ϺdN5w]J:ٱ6"㸜nb]._j;nĿ6!  @IrYte3_:#{[yDaS "n~o+>r57lĤʺdP$PIu'1X؇dzs*h)s.&~Ͻ4i_URm}4Cy{NXXuضru8\imK-_$SC  e&ntHZ},ZrY+׿zXn ;Iړ1!n[RIeLEmؗt WK*vC j>Ur2J[Uz.QOX2e|r2Wz=N(hKWz72Hwq[>81  _ l 0hMO!&sRx8шRk[{c ?]ƛo~_ѭnS$+'ńP'QJ|䷳c[WI|\ QϹ<>R_VmTlKX.Z96Ө׫,b^inضT֗|I룟w[h`@M/=Ƈ >hnot5xCmIMzI=qMS Љ$n~u^:j'=pqƵ^L.ڎRv;}E)==v^%/=ڽܜUm2鹏(u[6Bd8n \֝_XS o@3&im"K}Yg][z[_6†@Jg|g}N/ٝZw)3ԯptϭPusRumu+(S=g5DuE6+e}9>ndI-_onӁ@`!:L@ d6<Q"NF$ @~鍯L}^=7mDB %o{nۮEJNR2=bru'ejռb[-/?RTj-/)ztեյڥhtƏc @`X a=3 @蝩M:agcb(q;Ǵ;wOy>k۶SY+'￿R{"pێIu'eNm;OU*O?zǤ_vW>ۗ8'9}һNsqi?޺eO/7&:7Q@ '*p" ~(Bۖ#q'_oxÖ}~>M[O~kNIV$,J tNʝ;q%G]up{{\8?Ͻp?䴞/J.lo=ʨ?>\}SXk'suh^i1$M(@腄@#H5ws.:9|Ntne5io*~K?(F@ss|XTt}D)סuX{ۧzSucL+cq/깍2QWN<d;.f:eHѬ=ɣv$nL@ @%N mnbͭuk2-Mouw}sW\uݏ?ПkU |}K.%ן]-CI_s'M$cюmrz^vqQjNTө>F]tϭ]kt|tv2^^ ݼ^8vldK-*F 0 @` q7&7 '~BniלLYדv2^wQoeV'ٮu}>|Uy:&ګwO/{].;7lw 辙s]C:oqb@0~'CX+-M{*KoksYS?]{]۝Iw旎}|nߐm6F[0 T?X3|!@&F (C%!8 |K?;==u"2'7S#ʛɞ 鶭K:No%9At/#~z*D[7!]ϣTZUbߢܶ?J-v,z'\蓮v.ƱMIDԘn( dpD(Cɐmjkˮ|kfL\#D??sCWL>_Sxzݼ]RYo2uYv:wǸ޶ƲnW#^zc'FNWBmvſx!!ԙemVA]FQJ? ? [CKSb4(oG<w1$SҘp:At+/tۖ׸q~EU39.CYru/Nji+.ڝNTVc.z2%H\EO|@kF9\[R'gdH]uՏw\u ]Q|S_>'$גdK1aO4w}׮KzNezumQWϿ[qb[r}Iw},(uMɻM(``O>K Љ 6eA1Yxӻ? :͉z/>siX(}55;$sm/S<Ŷ=NVǮDvGC:X5zѶލ,|*'Hҟ ilg:q@7*m{|OӡvIu^+7XgXzs'}  @-nxî$c2ﻊ%Z p˭m;?sIX;KUk&U*^Ks|KvdؿZ~O:Ǵ2c|bnUI[y`A(#' +7QwbLbNm }]k&x Iu[t>SHi[kޫXi8~Z׋ɍmekmWJ:Ƣ7Hҟ @ltD5 lIE9}OGD3֧OҺԶ)lwJ;QԗlY&5yNeYqLs͡۸ضݸ1.*,vE'o@glF@ێIEKRw'2覯t;'=7^&_ 5%3@C&nU,|R;EX6O|ҒP-LQ$vi,mk4{Wnۋ̍S>G¿ @}%@_9 $@%J' Vbee:kg(R?W^v5-L^2LKp?no;'5=/\Oo_7Rm:nIc:MFLl"v ;6zgF @X" \`_.5Ev{?E>_QO2{[w2Tw-9]sUQh7xȍ3(_ˇ X3*Nd &:)yi$\:x~B`>˖ܾ6,u$G]>'>ۦ~%Nj<]-%U^|*8@eYIMM'9al&Z_{;nݶS\cW^wM7oە)>lڐ[wq9TRkis}M9'ARrshд֐k 2% )ѵ.L?w]rɺ|~z>JǸ^|.Qo,&MgFq3S@5#@N(ˁ 0b2H'|en~!^SWqϧؓ}F:'tBJ߾2lj|mmfqt@L g8@u&' S Ezsoqj D.]vq7ˬ/էv.u'S b8n}QR%t@z``=3& b@IT[o:ƸFji?5޶}o'Ȗ)5dJ6I\q!Ikoqǎ]Jci9K/iR.#Pq\ܣݢw[b1 @"H3 %IL*J7?xޛ5?pul^-W\y]8^_?G_;}"XZT,o(<@CvB :h %鶭7@/~x6MWR]|Q:y/;+!݇*źeß ;60@ACb(鶭/r˭lTRWh^1[m}>c_o]ĭKX.Z5"Q!@C Ψ D*&TNzC^y νt ]L:h5d~RM{t=W\ >@``N  P_wHc%/z۶o߾J׸h:ٖ~jǺ2=mc;J>4 Qʿde18 @M >@"ɖl;Ȼkb>xNyFԗ%_}ent]bh@6*p" :NiLbb6v7~}XZ~p?ɬWUv}*^1NXo=WK.wXk@Xol`|@pbeq2660@J?.l&%sɿ};];b]T]K!o@CN !?AL@ tHY AC<꾦a8ǺGuwX.Q @6*q$ pRe,_e+~7z@S92y)% }c:G|i 6 @` p'5ԧA/l CW]74Ltn}F=,sR;c*[`A֙|$#DV@q~n+m7wio[2գ/WS\~Xi @` 0'A;^A}¯s[(m~9'۹;vLΎuNc¯zcv:5?}tl˵l@XgS<>C$]/M\ yt2o8oXlX  @x'@%]235@p;oy[jgK-">|cYzޟ>:nw  E j/f @vJfOMN::nָεwdL[SQK8  @idjK=zbbQ]em]%KkQ6ѾL:Ttǩ)F%.zx Ԅ59,@8k.=хOӁRwMϩ휔χi[2&ߩ.Ug[2-kO @jl arF@IRL~g4Hov@lXhǸ{';V]۠C*E J.& @`591룷zxJU:%1&n'mKo Xڟ=,~$ @eqOw:*6V%?(X|RY@vl b~F$5PlV[2۱]}8۹/cC*I J6& @`4|o8b|bzro8vOmݍ6nc;J*Q\x T>yL@ lٲk:뼾7~ЉuQٝY}D䋥c!@'@O!  POi||\j|lGPRz7G#u]o[*B ( @`μXnݸaOp8TIEc.&[t#!@!@mN%  P7eb|볢] pRd۲0mJ1.%ۇ Ԋ:,@=lcKtq=\U>=/J۶neҸhC*M J>&@~;ĻO=~+m0SD]`G_&wD;oۏ Ԋ:,@ LNXdW D`vLO dI{jcۺeꏶbT="C|r Q!0^";QY(s&%eHommXAB*I J6& @~6M~+t@L.pюz_҇c}QJ@F#sY( $w2bG ,J l) eLɻu+qjsd@ 6*s( 8so_wMf7dV2ȗ+c_죝k| T:]L@?I3S(VVkZf{YncRݦCc : @eq=9ۃp%6iDW^rI}̧Fruk 0S!ԓ?xu=WȪD`|b|GG4bu7_O}ڷuH@@-P" T/{]3^{-𜯛mm5 'П"&@~;EulVNJRGHCe+%Ymڥq9\,>@@e PS!T>/q5W{%p[XI6Z{* Z`֧A>_bf7;f.1/+c @=6jY  !w4ẘ[o{( @ 6 ֎ O?OZ^ ;h˾U's ԝu?ìPSz7 t€ yyͰ%uep #@;:A87n>lӚtH'"P=gI$wK CR @-lfN#O~=H6ph};dx,ڥq9\,>@@e PS! ?}>zˋϖ]|6rvTmm5 'П"&@wIY|[0$pġv KWSs P l4@GϞtɉ+f.38pR'[vi|4+k 0 TY>iߩ ߞ\^k'B{&َ1-5T}qe@56j}zY Џiw8x߻4?';U˴2n@:l )aBK@|c f/8d wNڧ9=bEmv P;l @C@yos ~Իæ'{ yڎRzjO v/#B GMMguкfggq5/ mg[Gs|*]k@lʙfHϾq~C|tD'NcR2_IDAT.>t;R{Y>`$ @c~o&]`:La wN%}V)&Gu׻X@l̩f֞ϳG}XGGz!sX[Y2uBҘh+uQ>!@%@eO .8' 3 ɉ_{)%/J:c P+lt@#Pd_7"#ՅÎ;&kIv]}FBjE ZN@`07T7m̈R'{=5KL}NʻC2/R,EC T:YL08fkNJ1 a#0;;3=f ֻmS>il@@ PS  G͞06v`GeG<谦[LT{[F|EG@@ P! 11q~pأ +sRn*'Q:G>\ J`ҧC 81܈Tw=?S1&^|顺ԷZ;E;-80 @IcMϟxNzlƫ/nܲ3 ȫ8}eR2,źcGBjM Z^@`01ͫ S3S~c[N`=J'k!Dzu `r C/Sg6}H6Qix6-ٹd]>'i1߶,zl@@%@hw-[6@`ظa[^yoL(vRem_;]}gk9-O Ћ! 0Ͽyc O֓]<䎯3V!M£d^R~QO}n6dAq* Ћ F{1q ,H/=w),W}Zm[}"˷)eHp@ !Fg _ô"? yD |KX/߱_.ێq:6""t@fz1@5bG0>6nƿްafr4mg_j˯";<@J@/|@Q.e9_-&1qW^dgEFvoوS9T @`0lg@ NC[퀆c?<;h{%^$9J&@)* @`H0$'i@$>&'WW&XZl>ѠC0``s g{0t}|yΓXt^+ɏl6e)(uUZE#:  Q @`{|toMS;M/ `u PilT1y@]"wa$0559?4S/&d_҇bOcrhGjci@D@@@/p@9o,?dy#B`>z &N[:ƶT"}q &ޒ)&@@M@/n@1x'&^]%$z/D=&sbG1ѷ.֣?J@P jJ _x;Mt,k A컱@ImI,1e>cH^XZl>ѠCz jJ`݇  @`(Ō@$Ų6ri, K&'^'za&n ^f@?4o@D@A ΘL!ݎ} y  mY/LZ; &4  Ԉk$N<ɻ4Om ET*>'9zR>-` ^BF عa%rђX :8zO^u'jg] |8ǖ?VC֓I!1 OAEYnCOȸIx@1Q'뒽l؇t'Q/'Ku*Fc @ 腌@5!01RrXz"E4׽Nv[IJ-Gj& @z@@ ,p|  "ϡpQvn}Ŷ9ݛsjt~ A@4xQ @&'X,%@`؇>rz/MoĤߺu@``O z}ăƞ&p{b1iw&^~'@bm٨+~ Ŏ !@ p  TW<2w^[fv]쉍Po}~I:뒎_)q-݇⤧E~  @` ŋ@&pG>230^S]\t{J}ض?'ctN;Oexcq@}$0  T?b@ p_!t^G.wL˵u%C7JKo' 0h|`5 PiGzJH?/ۇ⬧qN݇|cɾ^ ~I  @`E 8f'NiC/9h?o>;;vۧ2&ֵ.'}Y#BVBOm  5b*|Hn ˮImIӯߺ_>NsRc-+,@Xcz@@ (i%H7v,@BO 3@_St 0;tCIvz˞Ks[Kxh(5}@Ћ*Hl!:S@_ LLN uI%:[6-=FN͛n)DO z@@(Y簃/MPstO`rb<&z8aOeS]jc}hq%U,G,c: @o @1 (2#;CP"Zv}^G:Q/snj2@@'E  T@6S@ L+q.GL5ʧ[ 0 l 4@X+‘k'@N?܆bPsvB+;-9_ @XSz@@ LOXi3e@\B=JS_^Sg=J(@Q @qS?+Jcn]|u9&J-Tv,i}{Z 5$.  T@L ]?BF%ɷl;U}NjF~ XG @`5jѶ_P"^.~'JVKCBJ f0@kJO)N: 2R;ɏt]vcu$|b : @`-ŋ@$0>W*xژNOL:؜ش/ξT-I  Q @@S @̉y1~e-5(sQ @}'@3 >X}"K5 aCZYL;m[tToI\ Ћ*@ npa 7>fvM(MsvY46uQ @CE :L@@13˾ Nޣn_;f@wG쬰\@Xol`|@+%0WVv'0aF?Nv.}Iw}'[rj|ڧM$U /! \B0`JW|IbϺϕ菺bm[  6#@O*_dž }KG?\}ha1c$cD]1юzZC }JJD_ʘ+N/GZYNob߮1H@``]3( ( H5%Pq:%iE)۸fY PE~qܙ3 &0H_,-1$_uSnbb<: @`f(@8y 7Td7ZEϫg_*#Qq `@L Zj/pIZ7ۖ[xKFBCq ,,cň,YMrޮmZ';} @JqX˼ @`x3@ K`jtXgR|: @` f &Ju!05q@K%IZX쳌u' 5*! >0\ b͌D\vܮ>b : @+^: ? |p@N|2;' Gc@G`|lanFgd 7#D_ԇ "Hn ԉ_d-kM`jbW: >MSSCCwJ . ^20~Nn +F#@OO9`K`zz*wM2c[r nr/6 #ݳ"rLNL ɾezt@ qb@@@`|||~!tJ;kSnW$ @``j X[V#@ُsc) PWl̲.@&Yf+%091=z˕K;@Pq >0jgB`r n^́5@#N X> P]|gt+PN P/^ZV@FN@٦AWFw X͋G% <X5C:/+e?E}u8vu1 jl!@Xo#냞Q+@,[Q?v}+#~@@0\g &ݩ֥vF_mn  $@  jX7uƘ LMMW:M݉ex!@B uΠVOzP_Ei^\lE]CviP@,v/ A@O@O-=|[0$ݒ"C{j &a?8ÐF` " P!s`NS0_f/7g| .!  ~px36+ @L;鱾c\۵ l+B?y>ʌPU41On0|E  @/ V: |V tiiEY D .@%0?c'099'zI{cCօ낝A!zP_ovsMI}]_ *MJ/C+I @]O-Z]l9. @# ,x@>`H,'055Qb&穽=kW @@ű˦Az'nE&>>YRff٩N}]B#@w 0tfgnRLCD`vfLhG+\  %60)@ 1:@h" 0z6l9$Ic PK񅱖 dQJ`~bXד˺քt}^Yi^! e/}N!@`L7֎&=ՑT_XibE=ř֥v @ 6N!$U&aC'-+Mm[m&-~@@_t @v+&0355%$N: Q"0QZ! @f6FH/T1g@^!]|fg{7U_. t$@GD@N <1jhLOMŏ/ &LD`j'Z ٙaω5!2 @ᅱ7 u%{v~`];ܻ^&kC֔k  |`p ԧ>٩f Rlp avO`fN-51ㄣ/1&k Wl/Cvwz@ 81XǶ `2Bh%0}Њ -;N-#=OvKD=mӮ.ņ }GCI6C^B`vfZ9JCKy]gʟvm$ @`nlҺpd>B}Fv\}H@P``N = @ASSz$W]!`َj71S@XsSk#B@\wns5?00? sssszav+&U{nsKvC/bTxz^m&_>Ɗq 5|.KRe`bq@-vq5󒫋nաYyا۴ wQ @#0^Ѓt֕G]dqXjWB3%~S|K}q4^dYT7)X x3c]2=)x?icwaUcg__q\+`(ոI  Q @T(թ8!}H'1JK㢾=7KEݱHJ@ח(l[.꩝֩އƱnid7E( ``@_(H4Ɉun4!!z/JzV[QRz<o[znui_@֕늟!@Lb4Xt'A{t![*NKCBW\o]򷻶c]4u蓮:  @`(0I@Xs#9i$sIzK/}ɟcbu#!+xmu}1>u]*5_ީ~@``P u!B%&v]L<ɘA3PcYT},S Zݏ:5-/'֣DZSl  @`  @X%9Cz-Ӥ_K:}ǹ ^ נzԵl;^ӄ9>I/X>b=: @oZ: wJb2%9T#=(1/,GP1*&@sy@|u_,umȧ\g=i/@ l i`քPR&7Nr$oKMZ(w,P7"e)X ].֣u}:@`]/"U[x  0L@ kX't2D&c⯾ݿsd߇cPT5ںEZ5}#W;6@@ w @` (YItu1 Q}+qQnLdrI].wүx*nsη؊GtO@cZ|mN׸}z^Rh29:ےX  @oZ: nD:& QL7^ u,@` T2^e|LmDZG 6 =CX3J*T[c'9Hd?)Qn6w.U׬t]~@m8c[+) u%gp@kF@ɇnKw&%89WDF>q#0[ګeɿUI増G-]*QJO_;^R}Xq wȯ[ gl0C3%N=X']ŧ(64o8FNubX@`tm׫֣t.q ;>J-;Q'I  6;B|'JtTҤI~5o%@6XJQG`gMf=Mg+x2۳t, *`+ 5RKu=M8fd%%瞯o_M[gLtݺcx@-]N ̸ |yk].9%sfh}  @#\)Yج+ @i.Fkc*0yd$ʵyIY $k-ПYLgg>W( @` $@CH4oݾ4I\7;zo:gWuR[wwgH_^/Zg˜o׆7 `NH2bzӾ1綝z-=g&@OpT^ @y.I2tߛlt~֬۾8})2=s5V{sQܾkܾ{|sl'?Ht @\<Ֆ8Q IQR&%8)mg^Ug]q 3=1ۍg}-^f{ϱįE_ 5<[?t}  @^(n_ghRt[vXGl7VϱKq>_ǮůWo I&h @5k֗;& ϱ}_s^׮dz @ dG @$$Mcu{Q;}v ة,K{{{Hy p O$B}ZBYI 8k\ 5TڎNpp*  @ @9) @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @.w6 @$`c @ @nZP5P.IENDB`ic13pzPNG  IHDR\rf AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs%%IR$@IDATx}\U;wiF`ChX]}>|Q}>*vEA4;!=n6lz3mnd72{Nrܹss~_;0t-pߑdz9: s@?% K^,-;u#MF}Mܸqz]evm0JxgL<Ooݺ53Ϥq2XS`8)@ %طmC߰aw…n>ыd뮻EMc&R@@84w'q>zOaQ艹VS]U VTVܖpe;xq{cKݞj[{<|F(8XS2o$_h` q>+W9):2^W?vv]mI2 o&Rɔ+IWSF"f]T{Cm/msx͆lۆboj|?PyT3c3pY8I_}V=Zuft\=cY)ɮnk=̺u"HF@S!@ Yf)8hTh&'sOJ>Ӧ D2$y:v]vC>.( Mc3HߟN2P`6qA` nl^ڝnD!QDmڀf h b  Yjg<l:Ҋx"wRۄUoy, #Τ?>~;AO G=-t2I 0+zC۟}q_6vD(Ed{vf h R;;7O^>R kݻKw=ئ`ڎ1c ')z>"%~<$=`bSWPSUI6nEf&gEo}t7}n3PH/hB݆@b Њ@<>N/tYF.X<pM t dG dZNa `pR$P-~ h7aF9N ApZb6XS"Wwߛu_lzvpSҰ71=_~_c, iNyО; ը̸*j+T۵k(Tw`S&//~ݧOM>`"RGǻGO!^Bh&)F54:@h1#Ab}f'90O>=kڱj+Pv8:?p|')RX$#gלK].UHi ЌD( j Y98ȧqV:ۍ4jWOV@ Ddw\pIcN@W%hB248NmWrA c^N306́{?m~b?4.[h26}o:6aT H@h&"S WW?yiIى`>, PAiTq0M1Lx͜7TRj\ȊClOwL РX}X2X z1N;W6(|ݗ  ǒu= \/~}N R9<nG岼F:!(AO(V_rh +U͞4@S @v"z D\E1=&ue`4 ͨ4ar|@k$TrCs= ڵ>o cb-a7p_?dF?|lAY/F?}5詸,}dʡGyOc,-օ@Ih$?PA3\ӹCnQnF[2,_p]?5Fs"qqqe!|X؜^@/ipLVGP@3"7O.JYSID`ᰔ#ER5AL.K_F%9D/X0v @,/lsT37o*!$W&%n:85ؾxQfT,P;Jd"x>ǨgFs1dE\|% ( @_phKt<,erղXS85|Aَzg9w@z25wU{ , }?%(( [ y7S^pC!QQ (ȵT>z |a nq2't,Ӛ8jaݪ6En bOunYq-$S$Eٖg R5I;M/ U {9ˣ|̋S4&Gؙzs;5c{=hPb18#}X:A"A8<>6(5Gu$Աf.۵N[dv*O؊:"5pTkl;pxOpL47#v-h3E"ZdE]Df=mM>J%369PDrm,¡cP,duGa;G?/;_Ux|>緼^3m( |0UQkxjƻ*L۾ɴMFPl,}*EIp=VG$޲p+NH_g zN`Hcw6,.O0/؇11GHp<;臔Ȕt4:zg%%Պ,?b6Ft&J:\G{'xKΛ=q1Y㧇Ʉ:)*4j!0Mw$_V=(sHSS * ;6h.qM'o:wb ;Nn06KX_GvƎ,\fIᩝlxM1r67Yƥ82AF "G )P J!:xK^sdkL=51X=tAR/9KW!Vy8 6)/(--$1K׌S*_rGkGw2|/ln0#i2P`>+`_i8. I; 0 bfVԃ]|~孙733EJ}2.#H`2 @ tkk+Ju3p9 _]z~pD,Yv9pHǻ4Xd̟LEDZ!74hy6fkg[tOAe[)A'ɂ H}y5L[70Az! |Ҙ"y7W7MT*mC/zZ yx%oasen;O3H&"z RMVܿtwӯ7G`/W"a+KB3M6ñd[ /32ob;tοeloC^" |KHN\)lds[D2 $=nvJf@8o`<lqd>[p|1&׍/Í(p,lqPc.dI7EdMz^X>D3ϛ}6sMW8ѷm]j=md){Sg!0!+Ǐ(o "R Lĩf8N/Ҵ0w\SC Op3 9FC3+jxGl0Aa3v"ץ?/\h|F%Sfʱ@ `^Qhop<Ӆ|BT P˿=Рa2b=V qJ/929qaRs}m\P"yN4[-pC_l?8cb_UzsDsy_K$ac ϶{51n~}sbzjEm\.Psc$".₞`K-.G ~H-~5h(~tda B1WKąD6p~SӲ+Oz`qME<ˆPvn6X6,fRV(PWp zg!Э( cX}>4sD^o.RsҟP C0)@)H@ƎX䑯II̲$ng=cua@ŞH8&3pX,~2.^ 3nwS/>j] p~o>3EPJ^1(D'͏xG˿Җ &V~S)Ps3]w(mgOD sR <ҌکDei0/桵a?f ?/_|}:84XD# dm~LwFc>\~=hs|~,'RڋӶ#wX}d*Q7k[0t~y0O;%VuPbTΤҰ P-&(H]7\O^wc?<5L2QzͶ >?}_=tg 629N˴_yi"ቃ* B>u/w( q'ceO4jb>OJGa$L Ё~㗞{*ʉ+HEKr髿|DsϜ~2嗌त+A0:E熆ƍս8Ǵ繾=d,*wZmSB?ƿs͇ۢ%! y\C4eG Q%LTP4vn[? %hr/#ĹQSݧVCN{xt8 8oaOFG4=DqUDsa>=>%V p=%z44 ޔs[O)L& |w<}Λ eWnW~Ϻ)j2Ո>DҟҞd:鯤0H@@^{-&]_=(cyؑJFyumzS^^nv"Maxgd'i2Aާ!R2 I'b `SMl\cᅠ7-G:⡀OKhJ`nf˴Jn4asԑ ,cZe˶g;|F4ɸN R3𘀡9?$>SRxժUc+&@b,X7={qg3wkgL2 #JB%Тd hdʹhpnFv1ƹ3p{cGGMn}MwyŬS[# }&=u14@5u2 .DC_³-o']I̧NIϛ y3;Jxk0OSwH&M;RJrҔd 0, N$$ sƶm|fJ,/SVs'm0u^K)JE8V)<k̜2A jFVh<ݸЦ;Xvʧ-_եl2ΒdH -0QDE6𶶶w/x)X4J]ni@ڴ'n JfL?1q9gЩ8mdr0oW͎7wM4qe‘+D,Ip.SAoT Ma,hB'&)J@@eL ,hJIuٺk疼ݏ?%҄8[RwQJHk.h UkH[x":7v`o*AAZfr 66n;{J_oЌȨo>6f |3tY^P} ?={v-?ҺgϼenK@FdP9(4 }$Kb%6D#?!{ i S,'``M3_x^Ç>wXz?\lSFR~ 68a <A3~T#%YfѢEhݭ5o|+5յh"r\miÖݛްV߲fbY$OEhYI3qǴ~e|1 <cjlL]dgA3-0fvg)mYMXpࡖ~:gKlXlQ,rHj1Uu+2U)a ;ae ޶/\uƝOGskaj>ԀXo;[?HE .b0vo_:sV&c{Z;6mٳw?D &@-4%jj_sKGg8,/)C3<\l`CR6l=d<{&GrM/{KUXj,:,0 2]]Q,n3o?=~ϢFH& ΌIIp",Cv)QҳLD=46L6CvsN Og[/#(LRe-̔0N'[ﮪu|vVwWgO&@'yO:4y#6+ap~AE%`#iiؕIӍ{G ;m(W6MWHI`!#`à$}y @ ,{q˒H;lrԕ{$& #r,i9C?~8e|'bv:bC+]Aj jjn7|(Mľ~%Ujm/|2w F3EWk'_Ip /3ؿ\0A؆{W~qcgTt Ώ.9ۏ5m'qߕ b3?QΞc.,- &>7}]^ÞjbP;(jM+)҃ iֻ vU0Y=`HBcK|SO\p4v3A\Q}!53eY13*LGaJFL%ťZ= P~jHmFjyz~oUUh؄8b3긒ʬo Qpk'~Co{ʌ! n:z8(;|Cd&fP_{^|80vH]՟k0VT|+7l`?+nyJa Ԑѕ_RЍqnlfuoxܓqj2dMN # i (g$k^{˷^ՕX<ÄAg'p ܹsy:`I;k4Ri;0zw8Kή4okPqK #XZ3+n<#3ȏr)5O~Wxie;& `J2Uxsr|L _1WCw[^ᄉ1S/iF2\z]VLB1ktΧ-Nv `pGS'y&|+MWl@g 746o<{ vbC'|߹RdrS'50W 9эtfNyYI ,FQ i;;W_ӗ`ovqeS 879]o/ͲG) nʍT2 6cƷI<.5sA ,`RNPfبIgcQ{ <+XDžO$J p?6/.G; \Uٷz9L TSUqF8c%| I <*L Ä3t\4X8F [q۟-+ yƛK dwbdqwm\jb(y傂>C휘dF"ם7kZ/CȴZrL>͟yj3OBFZ !A5E)gۺʲ)D2RK$RP [}ac KS4&@@|!hݴ#n㘾2>\<@+ SRy>=*M9a,?#ώҔPaS?CB%4B-h/P(ߝlPTLꢪ lvȅ[rp<߼cUk&+i,g`Mh[y \,힪.-wzD|=L @2[j\ h|;&$R0:;i *iz`0? ;h`q=?H6H_l{C>3QE&5%gX4 0Z \1,94`Ll8?͔QP }3cO'zBHY&NyF'-'B< jaZf`cVǞS $GPs]BcRh9/nu1f&ِ<:;#Q߉JYj+@2WádֻTeo<5Xo$+$U6^끆&O/&P EfmdV!? N@t[!(wP~Zftȧ@mMbh~mL?:3]\JDnO| Hʊr2+'EuuFpz}e0|hHOҐF 3H(R jJ9o]3eSS;3j*Έcb ,C:YH>2U 80ct? /:iڪ-}%A]~y6\5l놟+? Y(4J[| @2(T3sU暧3!#J+d$ P]b caAv\VuIQβRk= @LI'׸ pTO@S*+MYiQRFEY W|i̘ ٞ4zڤڳíƪ?'qC6:)te*ײwfg75*86mZ› )>PMbKʊ? 8ڏߴ>팪ʲYX0ɱ'aդDa fRb#;Wc(1W Cb*F@?΅υr"Q cEܱ\%4(69Le)T8/i%%~M ` iLME9c֔׳_Y(KGGG\ D{n@qg{_tƏ =ӦL4 OUVOx6r{_.̱B j jT|%~3m01ǭf ,6i3'Wb0\1 [cWVfΩ*Ê78ӘyC \ +)x; #p R'T۴ӱ3mBWSl?I02ٖ;}\r&^}'N)e4)WĶm{pOP4N@aqF(`̯,Hc Jn@8UM̨EȌ @L߹g^gX\KR %{'W_5g5r uO ]5:723J~7bdnBuٓ+6nb6'yrKWj<~ɬS$r[ `"g#qnƊSǽ s;*Y;dH @mjasWϛ0 p:oxuؘ1hGFAKtpww1F%`wl `UM4]!{.jaq AZ%Cuh<;0tGco`f ʌ[$ 8aM0n1aj1W\8;LwǓ\ם0]6Cs>ڃҨ%ŢDkk;MJT!SV)) ?BFmrq-̨3SP}Y_ݩ{㲒 >؟o൓5S?qni9p[N0I HW鐳=_() wG{8% fP\LPW_~f[w#L}}~ت-I'tH8-MCQp֕_~\AIhc@)cMO^3)) I& غac`qKkw| 0P[0 58<.xpf>~P . ˽mזDP!DR](4Q[܂۱+ zvJ-zq s@M ?4zo wDɘrq kPTOcj+/)'D=+)zRrg NxyVe;s2WfZ g0QOO02 /;? q}SJf#]w @)wwf1SC  _e:z۶eemw0׎'*Y?|%o:!Rpb@sgcsGXN@/j @'l~p6,\ЍܫW ΊF M{sȎp 8MƎM$fny|Qh|kbLT7~tO|tlݽpO#EΔbZJtJ}8kK;YVQDugzqT t'y,&뗱f\=Yk`Ge\۶m͚5nEթ7N=~8Ҝsd P߃S//kM8Eʋw2a  C+ FhƆ  iyof<|w2ᒲz;nZ./Ƒ)ȆO?VUN@sҕ"2m\ ҟK0o'*`h]zЯOF[K{_XM GDgWW(5H葼Ʋ @ŗXTI|gZA3V@Lgџug׼&IݩH>yƴ*hѯ=&cϜ93p];{&x2ĽJL87aD`h/ƅܶvVt*J߁ʛvuEn8CMlgȠv2byYXˌƣVD7Q$85m ZMpF~zmWZ782E`R&%\n'K5J*؆:ha`Y3W+}9mr4Ǯ\F%p Kojܼؐ}74cx LHP4Ak}Tgώo^/>e|9X'r/2Z%"D ߾[}r񋻠MA*xG9?2N C?TDzgۆšl9ϑz(pvIOo_gM˗Nv,L9ݬh,$! i0LS̒KRʋMG|ey߳3"b 8w2/5X53vK#Jy\T~a($y@͝;7{}?5o9PZiEѴx!p%V%IDAT׼/61fEim^Vŭ44FMJV)6u=xPpddJP'nV?H{?գG3\ UK?+qc񳖯,IA؃[A.hb`"{%T2r8O}!uȅ.fYj'1,?Lp0#*_u.YHi%~{P_CA̿آ#v#XfҟN?'y@ꪯ@U#.3їIY#$RRjpa@"5<qb0SJK"xږZCHs:5N +iv*žߌBx0/wJoBㅃ< ᥺no`yF&9^VxÉ)@q$ 3o~3+e=a#M)A7-eY1l4'BE}0zMHWwnoQsJcvT=y) GG\X"5ebm\0_ H8s7nnnn NbPC&D2x}罋VP;1?U?SwY\a3J5?YSk[te+.ptq5')-( A7.kO:z"-$( ft8.n^YZj-_yu7޹՟ TO%P>!gX 1DیFxj/yjbp #W/ܦ`@bLìSΜ=y2 #~22'i<40}?ޕ`޻P'jR৤'ùH@Л<@A ȸ8Ǿx^qh ]T=cGĠivniYi;iSPf@)MF+Ê (\pO`?l󸺺#/~tD082ghxX龯^{5Ȼ|6Ǎx8K a*I%P>"2R+ug̐ŬXts:쫮o $ZO)/4_g Z8ZgH҃ܽn ;Kƫۘ̒NU EBOW};&bJ X=;kjKVmQsOOK Q Q5JM OT<6JtUv*l<*bӦ`ڪ0tYȆ8Lz[z̃:۞m Ymr= >NOہR}n`O57=M`'n@9( f[OO)̀cea'wGU^~1-^?yA)Rx.H=xm~?dx8 Ґy]!!"TYgWp7^~J'7,[i^tne۟Sd ~;~2Y$? :3eGaƑFvVlXSKנS'ԍĹ|0USZ'-ݎ_¸J>sq7< K?sZP& ~v]mud$@ơL'SQ^b._iGDO gh~PSU|a8,~?BfBS_0i6b]׿^sΌ)SKC^1A2s} P0.];9gڱ `OO9e.%>Gi^XKR ߌ?c϶bFZ' h֗H~͟/'1H5 j0Jdp26*N8+/y~KjKO1ih,ypsF"h17YYV|o` D~?qhZ7WfO=6v?҂!.H H>W 7}Vmp 8J節Ȱ"_~s8]a4!I"&d65͑B0a+>yEϼX(s2S$<>ahů  1'UY^~ٕ[>_>՜׆$t;/+ Vk.?GȪ` ܷ%kރ͸[3bXovpAHP3/v?SF{h" ;#)F@)0WZFP FM1ݕ^$^mc4mH" %X3-5럄]nBwH|KSrx"ñ^lq%H{7M<ѽ:t=rK^k누|^;>ϋ/-MK z4~P}Ќhd*sޅ 0-ygN{}Lq b n(@GU%]X_=[vN:v)߻Ο7 #ѽYAϿKg]0t6F&J sn=ԲȺ}[P/*yRͧO &EK*R3!`Jciddb3 +8¯}⍗4`G`Ht #s7r`_ #(7ҘS[\O {9_ ],h:rb>|;Vp쥕g@0sqv;yrӮ}~z<{><7\T޾;oOn*>=MTޕ&Cv0 ZKQ @eZ~`G:ٸ'mn9F;R_xH}G;U~~D w|~SG@@5'jz3Sjï~wɳ]cO2hDduEY~/{9;oΔ?m oz# (7FGZXca'F:v=/߰wɊuqD/ *;%S[KtםRϏI}L{I) t8W zB>|58+lc݂c Ì$fYwox$b_y{v<8p\xT?|irs羦6h^y[u$(scwH{^&@F 3߼.#pLJ}Р'HC9B_'#G!O)wr̜tf8r'dzC믪(q?hՖ/|O`VU3929mԬt<`%dkkWHxKA*)PӁ1x@uF?A/Q{|^|I_icӇLfOsA9ƕFNs}}~KMk9(u~7\"gtgId/R_)AM H|gc F: ?Q2q[]xM^\ #R8-mݟOlm9d1wWp*-A@?\FV1"Lӡ \8ߞ:#70ax%%46 o1p@s^_sG4F{Puա&߻5wOU@#XndT.h8)A@XiH%_@M Н)yuj @f+``JaO ^} ӧ>{p$j{lؙ!W)CK3P 개d\L2pv~ZL!yx~>/g Ʉ.Ọ?S0r Փ}r,9LvKS=ּm?!9F$?G@M>_& ~#|!3 BSD :fq(5g.3;e%ƅ{1'8o~L| v>bùbS Vj?~N@@yx3}d4Iu8>}}h) )37~_l!GTi#Ioġ]j`jHkgxɪx@%6N&-Rw1a(PQ%( skV8ܺɏ5+_A )h?;{lt<7K.*CS@3o ,Pl3k~"Ķ[٭ .r\Xjq4$:%?1j8)C¥\PȘ0af 2Pe_U~҉df%?>PAkZs~hA'PpZ2ic 츃0EgJ\ΓF$ _iZ(КWXDW]uUť*ctu_l $㡀ȠGasr h `I<G'#8F?N<; 6F0g~'E ƃ~,щ!MDK<֡(506fժU]t]WDzXay0;P` 8򕖄0 1;SVPd@Uϟ~饗B~-`NM,5ss|ܽrMW|1" 9'QS/a/f2n M!| Yu㐸8#~PR rvzÆ )SD>o''-m8ҍC2(Vġ3'`??g[aʱ0J̈́0 `(:<72 .tϝ;7simK)vqG<@'y+.@2c1;3uRMgLh(t;c#MFrN?y흖78%L%0&ߝ8{tC7?/ϘpB e$#c4#mv2o2؅X#w*&`S_L7ἆ@"!,ȵbCPPGPNc>_Q G Vf `knW$HM_J7o?e'9W9BF/mt5XC/`)`X%  (!q1L-88Z@o8g@)T ~:fϞoo;yZLڎ[JLI;.- ۟y?{p?SndlK= 0B`~-H'0&(YGLwL Ӏ3lbvGbvS몃=f z&8KQ#t(G3}\{G~v$T(dm=3T}v z9љ|^|7bM$G62EJE"fk1('N@,N?9;_|}~dʏNDτ ]fxe/wǸ^ %>5:sb BA9t8p %0oݻꇖ-A sLW,H(fupDm D)+ijҟɩiWxוx׹QWT|?{xx甇Y1M2"MJ BDۜypg-Ofڂ%P0_A3x"=u6$@oZ0~r)XnT;dB[ 0c L#XIkkQn?((AJ fih"z `jhjpݳ=j{/#E@!W41W: v!yL@h3@Ѯph0luS޴m%%eF<>p [NEY-#,P~ $0*ՇyVHvWb9D٣a,ɽ̙0v$T%y ~Ѓf\C^;*Fy=j" &sX0aE݆!40z9,, 䤠hLy^"fTI{ޓ%>c4~0AQ@3aŁxwaH%v)GQ GEBێ>?E7>%G +v4 @UWOn$yoyF@RDl3c^[-( H%TFsa0nThXLG5{FsշI%U`@uZTNHaH@.j\ rc91ż*J[nΏXADD>(6`Um.rÑ\ =>5:!w.V}XH `jc{2†{mİ۝$3\{u¹9H0PcIc0;J(zdR@3aY_\U%F<CA`yu z~aOY4j.Z`oX, GN~fC\P(vnivףhp[%D2T"ef$++5#RFƇQ@qP#Nyhf@x(U4 U" CF,GȠ<Ân#" ADpC!P@k_ 겝^J@֫j$ `E3Q r@2\v ҠQ 9h0CpP2< m+)kC-P5aT A (M-fzL<ε M'` ~~C v au=QB.z݌Di]߮J:"AsXi.bƴx6s_B@سx(";YCs`d L Oh=і1NH}+<+=F `1? E.oy~ŵ&ψp߭߿!bHžU΄y䯌1_{5isf^ H@$  H@$  H@$  H@'haIENDB`ic09PNG  IHDRx AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs  @IDATx} ]E=]{;BY DqqysDgQm͌q\gQ uDA@& @@N}ݷ;SNwέﯿ@@"("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("0>gVA ϗ~Sep[7O׊"Al E@@Bb4!x9gUxS @fV|<חeKy?ޣbʑ>önljjomFcpo|egΘ|y4^v)9k&~03, Ȃ"{l6@yҏ[}!">,P p! Xs{-[{z;k[նylk.L,i2a91` j\' qDG:Y6<Á@I(7P| P!EEԒ(Eo;4v%W<$gyܽ{7|sjժU+ۖ\oZ0kU`~qwc[tDO0 ڄ Zw,=F6DW{y^YrKB4/׀@x(S1cY?d2<`lÎ|Ѷ{^\=g.ĤPpY%iEҙ$,Oc+_E@s0^:BN|hٕ+WrM^777.]45n&p~o'Oe=k'IC Ȼao?5ND? )CQ2A|Ȥ\ A-F0À0yQ3Y僑Svizf˞ouo7y )z``Y' ĄY)E{(@ilGK9Rc~k]\s d/~ v_ģ]=); 4r9t`AM~+X<C"2t,l_\&OaB`(dFAU>ݏ>q@+2Cb֋Xx嚮U(c1T-CĆof#7/_NZ$.z׽X"H׶vtr]'@yr=PݳMfb XJ2a 3p0,) ) FCn}$ og㮟h͓wiO`@ ˥!z(3#Z]E@0J_2$g5cpފ+xu]pO}_H%TݵL8qv19<3R?$/ƅ@E rdOFʥsfBA@{W|/>[G C8yBM˒Px6C -*(A@Q"#`?7|8[.3B_vѲ79npvs[{"tB:a-~4S8A~d;c@er"(OBۑňGmm,vY?'4FzX?WH~DA4H+( 0ja$-p Ug/zg| {g 64ul<B$} `? fHe'(;),)pâp4H[خ}7Љ8GCl# PA Q*9bbÞu]]מyFſMmnkmj4?p+ǤDȕH0C"Wc$ L p7 ) hS[|7~psw$H4D;R-q0((@6eU0(o^?ٛg\<X9w`$?e₣VW"顒~|/?2 8kjŁFzV)/xB*H46FC;>խ \6s/xb- ~?(iD`Оi,>z"@FUtYCزԍ~͟G.U)+XH,\6C6`W>l}823/s#ksg%\ҋps"`907EDY2+ R( o{tε0kLo2}.., `g>VNO`@"&${!\aQ#r=~:J(eE;e`- 5{t?oDA pLK i=aۧ(IEԇi@玾*KnxI`Z|PfT aK,bu;qV|`2)+E LFJ2i5!s'An/LC!S@'.nxFPN!:p GG˖-! []xW_{Ɍ[j(d'Φ ɟ=J~QyOM4{/|/#ogK:q72T)y-[1-JC/,p īSv-=oj?a8޿.KN:7>CN0.u3! z䢢o=~+dz^piJhͥV YCLohoBjdAj8/@F6(E@8p,O"pJx/K4?;Lɱr`~tI!̍8E/Q뇰 qKfʓqY >Oa@'ޘA+՞T˻B w3p=ck?nU4f"*Sڵ+|禞ׯ?isl0{Ѳt͞<?R[//zh" P- + sASٮ37,\>'1P5ߗٖrᦫQP#`hO8 hѢSݸ"DO{" Gi dTRstW'Qekh[*c+؛J';{ƅ_l۽p!DțP=4kE!P{(_յv>ٓwCK$SO~xʨ%炒ܙ``| x9Kݰ㱮d|"D?ʑ?$j",*,[9Oxu =Nf@=B"e!u5E^?' FxL'lPA2Q8sLÆ;AG/Ghޥ P0E`P`"`eR+;v=> @ t8ʩː Uq={D8_wٿ +1+z0W~*7?t{`AۓL=g^%΢kϹ"W02IQ 'ԉ%xSsYg8Z?F;rtǀ˪w=d/%ghb?(3ꟼ˗\|Z P>%bL(' '&5k\{7|1KboHo8d ?dD:d޴sd s. A~E0_!q]"(*(K?'oM% 7f?~տx0]>8+]k &C@QJ^WZe{mY7سk?ԛƃX"߬__/ãj+pc.d;TK\rٹ3T42I+E ?E@&* 8MV@@&=a|zg"g2=`i٭dfӻeT_c?oAf!5-OB K_\!K&k c񭍒2Ŀ7;o(~t{l'ږ$#XVqֿI޿ܓ~oAA-Yfd&}KԺSsKE ` &6/x$wmE@AIG8o^ÜzO \*͉3c,P_  PL.] ^KP<g^+EgeE`P`H٠B@>MzŹzBYiH_6tяV`"袳g.D9 @r7Bt+ *5""8zޒMw_n9ܫ7ᒱyǧD3G?|aY D*?kʬSӊW_\go=*F EAѴl/(zimS/u޿I0(u_.?ғꢍ-5Ay48$|I&@_\$U(' 'JFqq?&g6͗S] 8nf!`׿=Wʛ_K73tɼ(7k!M+FP En"+n<_4Lq1_ؿ@U_$>U lc%Cs2 \+ـ gOrž@%-@9!@*>J" DLF_ɽ8kzl˵0g ?WKC91oua]ә䜩uΟ7yFcH_0L~opi}S 1bk/bhy?s, wg| +GxCh%gΘ X~Cl{銟/V_\S?Q pPig-kQR۴:;Zgnouy]le5m.*Hfr- ً>jE`0D&bts?߷2qao.lā?~`%e濗GRw\"yW.ϙR?0I"#}8bE(pP`Mt>y=\kT7B ^N)\{>d%? ffNk7$}޾? (E@"7W[U2w^:I ;l '1p'`:L69n?4<HPN LC#nJ*J"PQߋ9b| 3B73(@K!3/US}_@&jᘄn fly:%F( =!<3V?i^& 81#N@kiH&ˏjJ}M IBf:\Q"p8o享/Y\sg,:l^L S1ɜJ3KzjE`0 &bD"ųܱh鮞͉[g2xm,9& ?4^ҿ"P5*T D۫x4%A_%V`"6(~3X8cO" 0~>3kMp5"P*TD"5kvyo^0ߩ(4FT-  VZ'46),;`߄Or=+OLŭ.U ~U!Zݥer+t`UNHhVɤ疘_X%_B" £7K_=` MKl& h|$Hrk" dB8AXT&|2T lj ּ Am;ocIBܗ|鯦(GP|D/+"5?p7@͏m0`f'BEp [б4wi*> [-g0E@ ldلs6GwnJ37<ܝ *6x8)7xB%XEe/(HT{(|1" =HsȆC]-iZ] Exǽ f%L`W&7h P8 е\S$0W@_n1-!;,)Lv-b U[2 Z"T UQ<Dܾ~O"h*k}ag)k r!">rb|<=mt) H(ZI<12GL]ps4}4]΃MBx۬iy0MS)܌~E`#P(~ؠ[oxa綽nR|c薖>"ѫA k&%Kx.gqZA'H(In>p^6\.Cx) x3G|G!%&M]ZoiqXS^ӈ[ҿ"P5*T Մo`럽t傤J'NK;C>Av$}y= ~hmi53\&sTp5"h(V F'2t =tbQӱ7)NnEcчbtUרzQ 5@U-jV?ΝZD:݂m4? -4'p|UOcHv?yU4`zAr">\p"рp?QA7 ϴ?{;^pԹ/\0gƂٓ-:cH&LS(q9|6 Ÿr=}3S[J>3VWP@ E`0hKƖc.y跭po)8y(_ɏc@AwCߓs"[79NMNp'M 85u QC<1&W\/}gwܸD:=~jC7JSp(Zqzy,w{]݆ǻ:>O-'r LZ_H~q"$FEjb3ɚ MϿʥs1d vw#z+2^/ E1s̡IA`P ]?EB=#qj 4@p@pҔ<8JM a!l[t*5WpW&BG{;rpϝ9!f/>cڜ v 0k@AY޲_v<̙16e?nt$R׶#%޷hk{[XxNA \4 ٰ'v9&% !q%W"^5"P-*T3Y -=GN^h#:9g@BN8@'q-'O+>iIHU£   17݋޴0 Ъ];@Ξ+Λw'p! #fŀ-xѣn|KdO޿Bt"HɉE@Ix!^6u7W{E4.= k֋}Px ynPԌ)ƮmzH xja>2c<qd:;6O=qT Ķk;!x<} `}>Wx }>)^PQp0PSO֢PS D0>NIIt#F\"0B=ݓƧ"( waeitHwg:3'V=}Z^`5)l|N_Y z} xlgb#zL\+/{NL̯{zN z+iNTMQ, NdkmhDT<gL)JKQ"ptaaO-r箞ԳE/j_Lp&GaE+ki^6fYao;^?V[_~5?o?8%_rf0o'SKϙ31aݿ'=x8frЀmߑ:ŽĞ+| {oD@#_؅dQ{5[t&~IwLǫ\(#ݾXiY{/G9O`zjA9g$ok` 8G8,wÁ sNmq'5Xn8V"N2e f)ueڛZXEXXV!0n:Gw i䛡k~!rc<ëFP5P0`iI뜀V?>mɵ^b ރCfOtzC {<~$Gm)=uptG0((˧Cx g/ QںziPn1#N@8#4ꁅ9l>쾝-t'922v 8t%Q! E`<[[дk.[#A7R\P{:I%f:k1B0/V1~,1~Oqo<v>wљ^l!Dvxi `$9BOW_!>dlG\M\&o+%aajE``M$1h^7Ioݳa'G'%ҹ^sȸ`$ahN\"s'?kÁsoqI|3YG!G3tQLrL`z!ZTaz8wI 5,Dx<0? Bg, pRu՝z7]/D;o PN|Q ;=#1$?=@߆IB&ߒO!FP CL4B"A ^; ۿ;Sٔ[SәfX*hd Pq4}}z;•sgMk@M7TD0Ɨ*u˽߄}7GH>>Qa"0dKlҀKν9}WǤh]!céu( BN>1 qg1NIu4 O`e\PMPd[!f×!$.߄.V3/R" +4`K^'v@rHlXKF]NscXl퍷?/ى3kR\'ilNq{܆I^?؊8zE*Dle"᳆Đ]2|+h iߖa}6CVH?e`xBtI~p~Jz%EAjEDDiJcn !ڧn|]nêQ*0f]yw:ڒ_w=~ޖi8i ;,n9 Fx=@IDAT(' '/ЈO[N26$y)÷~zAd0(o]&#E%h@8}Sĵ@]T0bly)?";!Ih7Ә1OZ1_U!"CF@kiĥaƞI"$Hm@_5_}18y熃6⎑/oyI[ zrbD8f<p_ K0ٺih8e ˻$~!ޣe>{V('L:,4"P6B%`FҷbL&eM9嵶1jPX#V?h~݃Ml7fMp]{MD !v:≮U?^ 3 PW!m!r)H̓3Z]E@&&p*ZrqMR5qO0}x>7.\ 0#y5Yh2qǚZ{` ؘ)?{G[0z=v#D]}J__ޫer+(P`i*0nM' :t`?ͭI$. Teg4x5r ]t΃ͻ!ETbuf#90n<{G0Q!P'€g/߃ PN@"  &f#oܥ'8p~O\޷fU{gW*|wC(xJ@/zji?g0[v80{ObsH;Տz1hIB滓z9WIKWzBUB@Oię~J0N  鷽Z;HCV"'OS>޸=!2d?.A)t636Rӻl#z5\m/B~' 5Bf 7dFPF F IͧO>GMW3-X4dgU͓YLS郈şxӓ;>Ø}8$@qIƚpw=}~qɟ؛d-/n^?Ӊ B@!]{Wa B"p*0ri|C_ Yq?7v$jb0Gɟ(a &9H~%2ս-R\{IIT.m ?qkY!1IVD..I~?`zڌ%"bQOe{:[.E`P`ԜG 6x$l}+.LSK[67/NZg<5  õɉ,qjnlso)[Xll!. b¼Uoo7O lǠ! I/y|m3HDϴ'Kdnk G%dfY7/wͼ(U!@U0i?6`$_ʋnlniKF#8j|3[p0],fAMo{Oo r ͼ#~٨ƿ}˫GǚS5}/9dܲd^+3\Oe;iݻ=h:2#~ ,ɞC2ai ŕp%2.)~o<ǻ^j=yի3LMziF[ɞOK? w!DL~41@Nec8[B,ϼo_s- p.]f5>FPӥ}",p&68|gk{z`IȰLbbz!lʽr>c$}%#^୷j=9\,utέkhtº^dJζ&9qviIJ02s]pZ&168!ggiu;̴g[wux `G-Eh=o"}+qy|Rl@p /Su+.ީpy[?tpWȟKG/#X2,4~?-D&nZmk}]OT8 NU4@"Q0/5JiiqUmM12޽7a~9 7 TygMq|i?wl63lH~6=q{a8x"#fPH003n@sE/ 72Hx~+]?f0gZ=1OXR^}Dmغsڻ۸ᅍ[,zQ+ZwfLbl!Vc P6ڬ@~˖-!oͭ7 ֦@6𔫚p7112'rgI8XGWwoGGgʔ4!'xr0?'Z3_hHbE dMHl H .-xO  -gZaZ\za?k֛ODsF W^+r³^=NgJ`u hDKTz.?֢)y$$޳n}χ|vKBA;dBBk9nMM{fGmݼmϚ;>߿Y${ ??]Xދ˹ WB F8ud״[Pz#B)J$/tV0Keђ#( 9ڨFG>c {f ǚ;R,O)H 8&L&֎CnmM |6{Ǟ;P*>-;=sK_OveN~^q ٯ[j͆~$f!gMB{&ayI3tItf-atp\+BFLc% 7$z(ĥH e/k4?h.4ᐏ_#/? 5HD}K-|'Y|C[ @w.|o {,'$xх }ҠKzݯ|ƒLo->e[;:DqiV]߁' sƛ<8O._Ԋ%i2. cD\nb o^ׇo'>~ҶSZ[!+pbd852HSSdj1zG vy&<0g$h.庺)3MoÕ[}C9oµ `fo9K} r[jF*72]2e9MװE L.ͥSCl{aMo޲c=[:=)`>p'7[ktmiGظ6w `^FOxMKe84$|iMܗBV" jm݆+W\ΙsޡMYbXPq>_/`A.LШa-St'kGcEk덯~_w| (_q3w!ׅP;*sTF 1"M= Ιof9fg3)\+zv G|bLiй#dwSH2 Qb f#Æ샺`Am09\^"fK\.+'O 3<]ܓg!3 ^3~?3?dѼwDZnѣ}X!Y恰 g:zUǔ`!\&4w&7]|W}| ɷ ~1%~zqzP?aNM̎;>PqarQ4 (s%TI#G:;j7ƕ~rYOe-Vv/EߞIzI'kt? #Zs7m??_?e}3LX555!LQq$T>E*C7D4X &H17}cG$ $ ɻM?9QN:%Og\6d7Ggo>su5ͯur.zh8dc}oރɀ{6δ2_gNؓĬތ[.Lҳ ޻K#ׅµ=e'7ZU߾ҋӛ%L, +pGȊd'#B2,r]3W?}+^O;tw'.狟= W4"F8d$cZY G${Zid.F#zHjȱd7μ`9!ICҏ@JrF y ɋ@\7 8F|\E]va|J?[ɏ_|;֜ĆNf%?uO bI^*!nNOpw__]xD7,t@n P7+[#^e<5Nl ؃7d9`I$!ro8;흽MG^n"`'~0O'Oi0͉n;B(:Sk5~aA}^9p G B*tI8Bf_ )ıRz޸4f@u0nt~w"(,x]ȿ35]a%C҇=l qnor_nhm!}vP= ./tX4|=qEK^˛ByuD\ !~NjqKͩB@S>g0$ /c~9B${wVsx׊ot'ӘcuE4˟m|՗ hhaPH8 xT`]c'~ $Oל<(>B@Ç߷>ߟ\_Mtn  ..t-i5zZڻ\*6:^'?.Vxony K^`h LA)p`a~E5# #7TL!@~A3N|Omy~u} &% {>&̯ g?YIgTd0t:F,3˫fx{k[Tx;*`P~0rX~mSeT8UHsLHk$mqIݓ|k~~B@ cN27TWΫ'_:k]P8STO|Ƌɂ@ x"pɅ瞿| Po)B T oT\$W3 2h^1Z/0IJG!c'‘ ΛϤ٘lH P_ʪ xuvv`$V@i+zGwaL> -+_mO@\d 'a0H֦Dޕ…+3> n=8.i80B@_K0`")(;P*qit n'΁n;_b{y 7rv:lqwHjQ:nt\'H삋Μ<+ E `M!@"˼ԜT8 jCF$ib^9߼/f:qs;63}}*m9KP`C[p-j͈֍>ng_w̏Θ>cE<On0վ1794T23cjct?|"K@hdߒXf͌'_Gg݅50?L 7nqHOfRY ̴_(@9$a<>sKK[g %;FxZGF` $y 5/jFTP) _]py|?4Իd2u2h HJ*i5mt4k_n* ,l&;!ӎr-~H_=w),7`570`ojqVn TvW V t㩂ዿXEbl.O 1%HiQe|;uuЗ8EXA1G8'%al.85s@VU/wJ՟* f)EW "+J%5k6w^Hͬ /x A56;f%9؊rP' \?iӦ11Nvİ/]8d9w:rEJSP߯B_qXD@M^- sn\_r/SKc]4mTdTL@K(K*ӝQ1Pɔsӳ]$[_~96"&O;pFçzS)J ʦc8즶#4 y0"K7@J@z\>k ٽSse# EJzx.8t;8- 8u[eU@{0M%Mן,~9yl* GU nk0 ㏜w00ˋ_="~]J%ˡ^ SFDJ*S^ұkvWh%3gLOAưT/)E@O?X.̌$ڌ!0J)P@ÏVlC" gn/1~VnwӪwORqW X^Fe%%UةSWWWN'$(M"p&pfD{dM[:6c@&Wqnbvp`QBC/UM.]==y)-'؛M pvO9!l5m3'QTI,jK%釗[VU>fUpb@" "dNhʼn47oy!ʹW'$2jmlr*(/0gCva^|+D] 䰾}CԢØIg7Ϛ1y<ܶFDOYOwӥ,Gfn$rD`uY3_J}_0J*{ȯ]h{'/^:c7gі"ph@7SqȜIpEP.WnS yYe~ϭc=$OB+pңqŪŃFKgd82^Mn "Ν569_n8@ZYBe iQ <ܚG^rd977Kz.>ajS]X"Peњϫ?ݏ=r&Ml9Qe{_Y؃vsl iQU=n_vo#OZZȊbد.0mJ[YiL .`_W\rXSKUG#K 40 l` -sfNnD@l  H e8. A _kwg,Xi8/C )@J{akD@3r/RQ<23TC;+..Yt: hW$Q+z.?rK>S o=}i= zp8qXEKYfDvs/"{ k0^> k0.Mc/L+nʀRx}erǵ}}X([ gvC`L,g.-~N?Tv%+ ،!` T:݀i|!Dgj\ŭ4kOq/C2gƂ$vu㤮b_L!P\ Vr%- -#{Rʀ\e ` hQ ~hH,fUz$TI;;pC\&ܶ-/jd0fs~z,rIaYɋ)r+0( Sy[ԅ;` \U>PHPbu1)*)c|θ<o?7 n\z)&+NBOhj2i|=!4oi@ʱnzf @|+K#*=lYa"BcM;IJT ú3;/3ޣAE Jf Wer\.iT /|uT3!0 |# b6؄fNdJͭ0 C7}RSkmm>&BJpfsJV0)ς-Rr̖?a.mj`]b9Sّ?7k1s^w)L."Uc33@ə5}r3Q]KI#YCB!f"P+%l'HSC[ [1N)|I<_?f'dz]>XFѕS?W2Ps2!HrOF#cԣyXSn*wW^.+=yfΚzto_:3*1@S}KѰ".KO"g 0``ױ@i{g 6Uǰ倭ٲq+JʀF_sd."](`u`}a B E 7J^fFSFLfÃJig*E\H;p\ŧPʗ!T~0^-wJM~70`YWlEUʑI]nsIgۗD(nV 4@")Q&IIu!"|Jipw%f6cA,e*MIJw/xNmv&KgrB:LO*9@a"` 0chML*Ҵ4& @-,ʓa7vQl+WxXXPqx[JW>Er/QeS +'ﻞPW:wVV~NϞpb .V܊Hdb7WlZ/'ONI#YCB!f"`p\$J{*zrHJY:(?`{z1lJUD H#ncer U Fe% F W(/ܖj'<]p+*TfX S:0n ?013H@ߕf?w*\zuor7p!"Poak{'ȝQ{;0ddthgo_{x.c߶%2f.j,rEW=)AuǣдeŊXhnjkMVVYyݵ{&j"-*Ncn{]G0I* C0|`9&#hif1}m?W ̫8tzUL*R|ƺ ?cFn&Sp$4ShBi,L~>+9S; \{/&2Dp4Ͼ)܄fLQ2G#!qп})ԭ h-nVo#A߻;SDVG{ ؟F 5#E7 1a@M!B2s^I9ygT8KPr&4αK t4kC>tZ1_H"=C9}ҡd:hg yk{boL\4/0*6f \wݳ^W@,+ gG@ ϛ3c?/ $O;xHtʵIPon.E_])) 6pBǮM~S^aS`Y@IDAT>0_YBLQ#7Μ2e@e @|lVu*7Z] Ľ]Qplu``as/Su۝ )zʀbNcΌ;RY *i߈F%`YOmpI Y;xӓ Do_oEB$՞T;8C7g"}ngM o q?j?lo8]=} Xpq =!]}uI b֠YpV}<^<+#?Z@H)@o__j{{Q?{"y]nnt~)ɼ>g &2yguش"%$Ҋ4J8ܗL8dCL >] A4e5uiǿM}8 08Q[ڛ>"`;ڴ= gX)"q^nG!"` `5v w.q&a*__wVH [o?C,<Hf\4J@&N=m№)I%B׭`Ō!0 Xb 8ڻzR8*Y@*=00`Z@-_LwC^ĢDWo<0|8S?wƔ4zЋQ_ALldٙSM~Nd:G_%rTB(Iu3f̵WRF6-Thz$'>o=hJ13cpW5R+ sVpj.v'.8/ʥ><8 /|#SZONvN1 nyȝg [8OM#?2stۻ0G~1ܯ z=й|?Gd)o<N3퍽؟Ar|NL@PſnrMSv=}Lk9m~h,.;]U+[vZPo2UXpty=Ը8 a86 #ՙH*S(~; 0,Hm oua3KcJΞX(h8Z$t}ЗC`\ +)m{W-{1曰(=gB.;!"` `ՏE,X9#݋BisJTdbC7[igyê67ܸu{');\IOQ`v3CD!g駟=ȉ:szzzRqoK]):nKv#UB`6N)g^d$ỗ\? CIg4;; l`2O[!ԏ~t3m큺x"Rrn1F``ي5VwgW8qn/D)`G@ݫqYl&¦8pbb1c +p@WO2r:1%Kɏ_!eE <V?[\?p uteqak$Xdi,fTx4yg_98mrS~2}9/_A `C˼V?Q?W\qGLNo3Ԗ}'L,lJXqˎL&A<\Mnf 4 Rϔ_S}dR*{vVNՙIK!PI'`u[$]—K'YIڴP>HlԱoY羳=5qˊE-Dm~ C`ۃ\|ʭxg4"rI{/_*:60t,d _jUw󏳦_ӓIĢҬUPU+K!Pp)?pFŗ6#蝵K刟ncϏ+i73LXs,'GΘEW_ "aZ+a@&iuҦNS59O"{sɟ+! `CC͂T"ŋIfs vԛ S~Xab0Fg &.'}H -0[ ! /A_lIe3;H-ob@ e۲bT/[%q)d˟tJnf 0^LKֿD:i;2hȿ? p[ H8՛}8G]E"=IG\XiQ[@p f'_o mq8ZSk_ھ@Rd*$U ʅAG?K>ƽ@|O*t!՞O-JCC1|Mx]ߝ] m^Wn&  0^Y"a-.e0o|=䖻ĄT/kW׳~BK9/#w}憑nf3@U"@'#d.͆>Ʃ_ѓb_C6*?%1wt'wt.C\++\ H  of  -[~X %KLwk[|ڴ'lں5^TD-@!b}S]嫶2 ]@vG~E= G9G60ʀ놎yI<Ї/O>5n%e83!?Y.Pe6?yZ"~>soO4GXiQк@5?Nwu]}檋sOϘqMqlh3·X ~oHPSlGPt,*jы$/w0"Iv3@z{f?… èlҏ}S?qQӸoܴ)P [daȦDh-;X'F' 9P$lzKҏ.SdG03Ez'k?`W>}y'_oyL$*444JokG K Cz4xrً2\_4$k]n^d?WxI{3CD!g. /}KY篺'qߜ9k։mm;mu F&l֖FƆh>#,*C`Orh"s7vtvF K.vWQ]q(>J3DahK}qYwf{?] 0anυ4/vgG'ciГ`! C`Xi="lW)]—}.^vf)ς/xI?~}An۾#etcC][Xwᶐ^AGo\" oc6EalGi])KQXbw0`Ġ}gw%g𓗜.M׮$`}C7q8@6?(!OgsDWoZi{vN`DK ߽/J2E@.~ܻ?|p;mt_* C$?xm @w |C*z$)rw ++ŗagk!h"ߧ/}'p欙ڶ=Ncf?/38@6?#}\~[uyuH&zk ےw"~A?"k GYBh b^qAľ~?U܇Zƍ 2:"PF{x=EdtrN̽׏CKz~Ͷo?O-GteJI^dl1G+p=GK7>7}=]|]e[{ Igoe}F=$~p8ïJ<]&\m y/q7;-zKrT_2n3#)#E+$"Þyo|}㙷677ݸi#CTw @6 2_81Zd!bF`_wg$o%O|IE26e,6L*O#ci f_K$ڶ-EBw0s5Ʀ&vL2sx]L^P< |_v6ʣ=ʍ>6tiG2{чϝ9mή|KHސ/Z$v u7*D(ERHH[!` 9w"S ^um7ံm:6ѐ7K.=/ڸ{zz=xB䔉-Ms?-:QH,! ^BO)]QHo2W.~|㏚7mt J@4!}RihbuЯG\$p8@R]|&0V5< f恭}46Կ/ h,rf(+)6;&1 9tG8Ν' ygá0xwoxy͞6; 4C) vtC_næ=)9Hv^zJ%eK@eߴԄ`1b敳gF,jV 3o VJW,-<_qt8Xo_`t򅲆qfsx`ϯJ^L<:)*HH(}u5L6kUqBux.)gG,g_w%]]M;^j%~:^̌c$+VgϞ|[>l[T6Ÿ ~ADzw(Vݴ {Њ/P|uwlsm( 8 'BT:[y_%0RƵ00`sOܹsoOzV%kdFX֮VÎBW!d!㒄Rc -}aݺ|j/zQ"*LhOZ҆/A) ooffo!` B6 \2?1p(+su0X>}ehט@X \7WΡ' [> ?!Lg2š Gfw㩜mya -[.?LRʂ"` YQq ~_ é!%s`[{s_a- HK"I ɅJgsvwA_K5$Qhъ2 `vg8~ ߼;\ w(?Ǎc$|xɒ%䋷1Puؔ?1espǝ`̚>utH0$^ T'oñ1ڇzϾA+GcHZgpKMj9yȄ,}e˓"Q`zgxC>{p*2*g#7R¢i'E"~J]%@•"}Iץx EދV{ԥ.{ح_(C硭ߋg .O?C` Nlz)/G:$zכu_^?c)a.h/aV 1ǖ~U~\\O"'Q)# gDF)p\oܗBh%PZ+:'jrtsZ[ڄ$OvI%W `@,&ѧە."}*cSJP8~?­){*&(;7?Xa\IGߺK QBKЊCla$  ȅDCq{Н+ܵ/{-\(ot=K$d^OYqùIu>B0B@J4[.\F?m/-=}G |t} {R#9pqG4=)vO#.JuoR u3D"k@^6!8\`zۡs C/$u`W=(0⅓){ݪ1c%ھ&.UVe,Ջ7 M]؍.t?.@THU.E"8EFH7D{+Ll}lv\<'pӝ?JKc,)+wlc?7`WtΝL]@]o3*7Y`iM{0/8#͙5y6IC P/2/2QREIR;WD^ ϮX[@r%0MNTH$uں~]+Dぐ;I{u`EųcҥKm'`Сh>^h՝xpYZ Z'Խm; 9ؽ)M!rHܹ[L!ecBV&W<;~vnWOxX]e@~$^񹒟^F p5Fay_5E;8bm<[ @Q}BC! pR`kŝRkƤ@ ]A,̷4GzloǠ`}$.^NpP>3!Eg=dKs@:dj~}5/ޒ@oҸ)\ƒ@PiD{>Qa|8v\bպu@6B)X<jwW 9Cx՚S)]wdxCIww#` ^z_Pe&h_/tL6lʋK9߸ÑUL2:Kq (Nr^K^ے}3,^ftG-IFNK=_.of0`W54oZ-NG6J%?*Zt{U,X[| oŽb `P Ne[pT7u7qn= x4'yɕW/^L8 'VT x./'/җDT; ̌_~9E-q͟clkb?j5Npz_*:yBO;~{XXEǶtlݝl ckn1ERJ~_vxvXjqBCK*"}NE7WA]qP] g3)v -+<oE#-Xe@/!\@~ GEO='쑿Nzͫ"aK$hgy^K7s%@.ShkѨNgs uЕ?;y%o4I{O/EnJ@-B[5y2j>ը&?ğn{x l@7]aޕ7 L Ob/#+?&?Ϋ2gɭreTO'bg6z (f1&Qb__*ooyW/\JK~wI~޻=W/KR*\V4f ?al_y|&Ot${cpxh1c p[ &CK;{Dhdt{Xy=5$dvbKSx߷Z$tK.K!(GR*ژ@徟?P)u+.~8ybK}w_*Մ ~ȕ k>e,~l 8# p&!AtߋIKU +t330`WXFcJ@X$`@~ZI6UY'G~H,H"z%] yݫ.5dDAq|ScdS+6~=&+"rR0Cʄ_ (X{B?v7Ƿ1_0Lg25S){˞{ :D%~{陫 8kƸ?!3/l|_=e*{"uI`=K=_#ƴ@WkOkK~͹=}-McHqXҏ\qwWP/JMll6z=[ p̟-O d.CgEƔ"k>S .KꙟDXU1ҾHGSƊJyo}W|ݏƆ?. )h!PlуI `y:~ү=zgLsY?V<.[4~q_O)җtW=J>@}zq`аnG>k>҄&lRK[U.*X]ϯ^ifu/}9I犁4&Ό3@g84U0\1+ֿ}8?V?e/t"@%@䯸?}) TE1"A'{_xt# 1= &PTl\Xaj޾#7ŎUd{IyؼY ϣ'x1,#;9eoܒe ֥ȟd߽_ yI*If!C2SRWˡ [ D< Yi:P*2i`A˜ksx4[W%mr*w_v CW++L ]a I÷ݽd~&P1M_MI2w%W ?ŘCFP.gP[6ׇ}n՚OTbPO2'PF :1vĎ?BC]mYNP/T=(1E( !C rHf.ɱlM$EKE"8; 7tʛN?} xdx* lީ~8P_g_vKOpI"~_ϰ˜1~/w!` >*^V~_ = B C-yV/u&{g3$!Kr$*V4Ws?S';4B-0p[> ͡¥W]s'Zx{$R6H"fIO.[R*^G{y0LRaQ(J=_>+<% <iу]=f͘:uƔ -a69M/_?ϱ"/=6EfّDN";VcW(;97\ۛ G1mKJR|sS}hoŸhףcySxx ]§]S_*nJϽpLQɣ߽_7TR^e E.-sx=;0?`i'6qLXʣ5EE"6O/۲ ̓V?Éd`O2T:]-CW .x?X [~s߻e,-D"rЏE~җ_/)w_!hd0v@s.G?v+V2 V.-w?ח" Ԩa.M6ɂ!D$C/$vVrg% 8VXlwܷ$0s~PQy@n(ʐ+23r{L*oj ? BKDEI0F=o;z2 t4+* 0LɟD1;Z{dž-ۻ#e~$f΄H8П.H\җO~@Ի)e\LVTȇdd] Ut\ץ$zS}=t-w>,9uڔ xJ[2E‘ *=tuʫ]>R3vm%W^5v%ғk>/<2s?Xb Z-8ɯm{gϗ׋??bs`Wߘ0/ oc @~ N_\R̼+rݭV:^@gO_=?{1˖3k~ǵ%6:sWA{$*ݥQbõ-ʶ45w=.F2MeJG L&̓ϿKTA Ԋ+7+ 6:X1ۘ%+?voYy0pM}I?%[ #eR k)e\LVW+ KNyTn\I;'QINS]E+ilpss̚4o<{/G:pݚ"P-90`78]fS[wR ˅Dz$.X1gpi'̟/tҀ=*h={ |R-q0,VA1~OV?_N HRD.{y9UDNjx ^tv3 C`(_.*&>snñJ(ޮ;-Y˦Mimsԩg6b \%λ?U׷ K&]|xqͦNaC17C}M OB0FrB*.@ 0Т\~p?A?tֺȻ?/.3)%dO͘Pm_;-2J[i[NF ZsTbk7ldպMk1e&O-,$)NR(0 TVaJjҙ\Sc};??=tr ch˿|8S?@I̟1І-ۺk`GGo~b@ JU#{Q p/H(^}Jj41_C-CS*7, P]hLav 7%;{Ƣ!TZ`#R M?v3}<=$81)"{k}Y{{?{/?ŀ,|.[hjjacܟm=Kya_MI|H|NRN] Ԃa2c NH(]~e 0 tϞoʸ;=cOkCݽ\ |! VE.q.khL^z?՛؂'E?e0}:xގ,}ӯq~Nx!64 EÏ/[\Wm`?0[%w)="jy/%*Jƫ轔hjUU`KhE#')K H众tkC& O DEuU ^}Ew̛3}VWw/ OeEf w0ZA _yʗ6{%ޤds~_]o?i\gOo6x"]T2T>zu[:_-g wp!G7`'8SR%=ys5p͟(Fs갎G h*wΛ=mܗ/~9'9ݹ0A GgYb1~YL@?0?ض==~sSɯRw? FdS| R."7Q||?J$<{3U**φ%B/Uh2@wo5?" ~p'̩K5X/T̾D~{̗^XjMs W$Z}8W_n\s}={{8S~ !x7*H(س8kE;::ӷ.|t斧Vkۆ5+~K"eZ$qE."~e80=+w%{nfjj#VAr&+iŊv_'ʀgn$wQ l&Ϻ~3%Hl8xl~'^|vg c\5s/~={)GӛsH, qGxLXR4: X5OloL/|੕?MX :/b.'1:E$l)nv]!K KT1;b,#[dw%HO7ow8z Ѧo\s wrY n}M)!?5㐟Ǟ^]y3[ԜQV? G}Y'r);-?*~Es8 xRA5ںz~sߖZ 1{"~YįVK.v׿_ vIvfNKifjU5B!;]I;/ hX@R GWg3^ Ukh:lzmѯm~o6 or_쿜v?,8&`׼L!IuPxHHHɌY; g"{aݶko{.xa~ €yv Z/IvLR(yI{hgVhgC`/\j )H#/{w%J_Pop͚K8cnGwO.# n5?ZTxnr.>I#RAOievֱG<}GggBBws>qezy %捇X/Uxpë'ccP'Y/dDjE֒n+^-}]e8A/$4JNlk#7jD_uJySغ;%_ҝ@;7X'N?H%nu$1oZgKqVe?z7c~9ͯ;/zיq :d_H9i4DcT,_Xyv-yշȊ:9QcPXH$Y']1Dʒ 뒷9\y3qMCWnWu Is ~w{GaϑЂ-"mYLC#]eh>2kݓiH aÅ`GCmN=_\އ~>zc[G10a[ edt..I$p)"sI_aV)n/3t73Fha٬eSK=Jo4/g 4a~?C\ANAn=k 4,OGq}ח>?Mǵma|t|r:?uH'={î$|=5v,{nzj>iÖ%PP{T /=Eؒ"rR"~W J+h$vmTv.-wՌꞲ՟ E@C@F3&샗b~Y3zaf˰ff}Iō~ֿ1|n[tחNC#W_ Nܶ#YCl~xx')J`x/`\O>`A$(`le[GǺMmۖ=Қ?K\;{bx_ާqP%ֶ1K~ߍO҈Fa,bX+br_L7K*>UƮTH G v`v-<{W`my zS[fVx73XȽ8. S3~(ͧ4ٷ mk6l/1-+_ؾykG7QXEFE"mE[/4,?99Sysvy9ɯ`RYg;PdOZ C QF Pyhe'qѯcy*~9C  ?w ߽I\4"*Yrٳ5?xG6g&55bţq@oMM&cEB6׳ K !BAԤw"!@GJ bDg^XG X\")ク ojJԅD1, 2Bo~.Iuty>q_ڽ^}qro- NR2"r/yK#y׍t*z+4N#{D*k)nݕ$} *"% OtڕTþUabfOp<z7;1O[vb/ᒗZ$>(= zN!:%/Nsơ*yH#Bƹ@k#˯+4"U:].9+/L'/e?WXW=>N㗮mX+U++Y?%>O;S{.Eo٤ soYrpd7]JD JW/Z^$I#{cN` =9$z]}nՏ{K +ߥ%q6/Χ%DiޓT8W*~STfW");f O5 ZA@V2r4=1?USضfJP-#^?ZWwū0/yIWF;J$I&]qZ7JZ| Ra~I]} μSz%/GyT:nIx݅͌!` DU\Y{ +;y=z0eXO*c4/-?ŵYo&Ś&<~o^|/ݫH.Z*UZ$N6iˮ{W*=;(^GRvʻ_"]pC`.-;.$[j%'F2qܵW1[H32{*`_4b+K3㥖?QHx"@,p;j/\(U`2nI7JB|Rݰ.+`Its}fvCC?3@-# ~wƭ(UJ2ótuu8s12-DaiwZV녞{a|[c)P-^jK)j%337ݔR@$IUL\)73>RGiSZ)~X !` Ճ$ˮ ahxu+Z'6^CN|T,fT7II~xۻu%Sދ+$gIIºӸS|z+dSʯޯ+?O=ɓTz<>sOשׁ޸?L.k|w=B4}čظFO(>3Iv{^Tv'?`I_%uo0 *XC쎨U^ [8c[ű1=.NIqL*on3/\_=8 %B GDִER (Q~CJ+FGM, 'J,{0`АYAOتhw=ae^jͦ3N}zz40:fWp"X㷣˾{{W_6,1'P&sɘX+=Jq%3{S.{e#*]R Iw)Ǟ]_w뛛dϰs-֜Z!Ǧ ևz `DH,Dn"=K.ѳկuѺ'TJ*޽'wҟ?)'uO(~)|(e\;g2 : XC x/9Kr]Sj~~2l.՛>$haDIrEwIӟ.#w)%OE~)Y ` pг`V?8ɭd00{LL,,6M1V fZE|N`O$J.tZi)'?w%?r=g߻nQ 0P%@-[nԋgK&3¦\ WF "TZxS+{͍K~H%b*[u%.~{D{=c87-GqJҝv^~Rq{/(v3cQ{l X[ѓx߃w7ܳh5w55&X@UٹBp{; ILgVUVuu$FWƖ1Hcad F:%`vKedր$KeiddJH[]2k7^79UUݙYN5&Ll՟~uN;| O9ޢ.Aey6vi ĹYTAc3/gK_O~~D1I MC[[Ns|Ϗ2Y_3n}X6r NcY3/zm7~œ-4Q~(ĐCv\!}{o#_CǼr(Pp ?sA8ڋ}[KNà4m! `[ /l'_}G|n$]T"6v'_hT>KWwğ-/b<[ǰcL֟=s20(dE` ./s aŶ>N]>Vj؜{N^؍wբ>p[\`)v }8,e}]{qecOTP.U@VlpJW/yOrl?{ eUKOyU)tEB> C@G04 ,P(T _ 0 c. mrcѨWN|uϞֺ5_O9t [aELAD`br&F3J- Cٍ/N{ "O3iF$F(aB<ؓ&y6Lb?WGSgW_8~rђ`8͍BKq{`뢵$i(ŗӴ>* rږmU0As =hת_E֯})0enCwuWZ埾=o]m|d w3 R >EyHʫ4 # ?|QOTڟ6{QY=" [&ϺYAD`vso&aٽkKg[szݍ T"ss 8q!$-$-S@_4'~'OsNIуv@Ȋ8ϼI+% `sis]nl[J5bX dŜkYG" S$ `0UlkfW~߿~/uwXhr4` m^qg7e:eE@l1p lŽ[{Zmk!ߍK`In`w aݝ:5xy$>b2L+77m᫮KKQT+kJ;`s{sA8Es>,K(."0%rRՔOgV;Z1կSk{Paw;Aݎ&BЅ1 L"F@׉FC6o2>|?^Unb?WJ?:Nge<ؾ@r%@q[ҕ=ܓΛyuسg ++Mn17-Ro:pEĀ~'K `6Y YAM Z.4EZj+_w/ٳœN$zx <}Z YDn)TMo y/;pWJ̔P-pu\qپoz5hOG(YS 6m8N)&`>oT"8餿D@Vo*=L$Dq|tu"P2rJ֡j `M'GO]Z\1kgϠh`o~5ejy}4`Ahl?9Iz}pho0Ej\+aE@{ PgS};" I@fU݅"W!qKGjn] n0 b [jyK>xSa!7}mqM$&BU@ @p{Ç׏eRt]܉[fqw# m+Zj̓,&}ݕzx ?a> l " [A@VP9rMqfOATÞ?z#=ȗ >;oGX(D?y_#+"0]0*[=ztn߾}g{CIg?ʊ [267ЦGpy_-./~ODDʀ[g>6<6,aD@6MjOe~o摇嗎I>Ʊk źw_uOOsgeE@B@Q@o_=tP 3_~/=g=ncS/Zz1~1\YmW]q| \,56>*sfwtUw (@ر/_㟯գ:Vd6ܑN6xՏ}?ۣZXo}b۪m8޲a]AD`4Q*_8E/^:n|ѮTt&^$@뵪Bb~7`߲ 1A#.B^ٮfQAfc}>_Q$ݹ@_H%sLbV -7^.6?g?_k'uڸ#+"q8CPu]ʷ/~wjnwڏDN(th6;߼$`RF lܧ?W(}6JP GF*/ׇ8tlB q0)j_gWz'~INN[՜8s0`MlF6JP ACsd'?&mjf+B/bA{4(:sf1s׏{')}A#7h$` W*D@H@sHB ][ol=?rGQicsXXM7Oǖ|%ij|h`"l*T${キ|Sǿ?{_2|VKӪoG FpӁy8 @;h4}3YVQ /QuMkVZwlv_4[b{5pEj`poIw@xG;| Gݡ:i @iV #<]uͭc_N+vI$In¿ +sfݹh];?6 PSr؟Θߐm-M})#$T0 [R{ꥧ~-}/kK!ژ׋(ajT]Zm_rs`Df(nL di0 #wp:lL:}?ލwt@{,@t H)@Z6sh(٬xG},I} "0 9PS\x_F ʟR׿t`=_v^;7,Gp.F l}nUչ.˟z={Ȥ?z?9tJ>䢽Y1c#Q  `UG`cwW9[SM(jTuNv6v\~]v)P;HqPED@&%oIɩ\. [M6#E?eJxQuI;?h^aM؏qOG#<:pD@& `h*6/+wRRy[U+L; TC :w>λo= եUfh JdѮ' Pt<~-HgwqPDkfdܡF n!Պ=BO'6σ|Y)7 "0)|Dl DGflegJ_C)Xqo Pay(ÎuP>L "PYAOmڵkOj~z qʿAhk0}#, 9E ?HgSi1:t 9r$ݍuq:[!([!]Fj5 9"ȇ4_OlQ4 V !`&9%ШqgS:BD3ՔsTnqXb3; @:E?k9:>\ "Pه6'giaRG{'`X<[ "  `T4zVVVwm`MEgj5|x1A'cC@:tª?7=P'CVD`Br&b!`{7ޒ_fRG9JHT(@.ī 1){$IE@̖eNGl1osGe}" c0&0e/0rC 9i|H a@1$FSQ-} DQ>^ˊL@@T8lz䨻l!A9^qai9j.EG`Їx0XH!7jՁ>?1ƙ" 04)a& GU2,=O" c0,e-0:y q_+=Oѵ@ (t'I3ںQ%3taG;(D@F 08BQeH=S/l@z:8qf`_AD`U@s99F/LNqϩ"PHr mQ $-QQmIjGi>Ƿ:urfgE2 zvלM3LME D 5m.[!=n7'Т^M3d0MVD` L/$$)}7W,LpMY`*TDD+.&z 07FjDeg2)̃4gAɸTah!\tU{GA>žE,0K=mM7Lу>g4oS*-:joZ8b{ o{@h)_'U $悄 )a0y{ՖZB@8qz5XKFۍcL<-ip@5!PXsڄT !6qzl1v{pF=A@xnvS{O{5qOo һTk7isY|\1P VGQՂf~D VGCu"P0r aܑ m.1ӎOsla&.zAJl6/ ivjqRI) 6;8@::#O_Tŕe[   i "  ~m@^ [n:Օj ۑ|I+3qZHx݉'X7烤;gsIm(ƑL*v آF\yeea̓ { 8m2 +-%G:m۶ Q :vHAD`}q #@B FۜyG:3odql??vHa7dpcc',?ac9CA&%GS8Wm!ySh;Lb,Zq3I☿qTϺW Xa@< 5Dq/:t Й1$!(ڴ`ɍ} CZg9E@0FbMQ , 8zqN᧵l C5-y7gt'ũW Ha@( eR(%@@^ҹY4 <e/$=BO _?:Q L IBAbO|~0F Xp?: `cVYE@0HA @h  :8;Ae/{!D}Gd: 6XD@# `<^]\ X >Ñ?G ^}.$~ `|;_ " c0,e-Cƾw q>(Q`~;0/X<>?u㒔UfYl5"BxG 9h~%oÂ=6/<, "0!9SB`@D *>8sx,ş֒F @~#s<-3/2ED`P VSl(a>/h[/ZC<+MD@0KB^t>ĆQ:%{Z/g7E} " c0&0e/[4@ ѧ̏}Ai<&#>p6Q𑞍iA@_\kѡsq}eJf1G𢞍Ȧr, "0~QDYE: iqXOS|S{4$c>6}kHRqo}{<>㰃(l " #З؈4ܧEC+>/,EzdE@F / )K Ϧe=|&l/"0}IYJK`H{IkF,3C@uD?}+%|aBIENDB`ic07>xPNG  IHDR>a AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs  3IDATx]`\ŵ}m%q6šǐC/? OB @HHB %S 6`K-[J޾ϙ' mْ,wyf{Ν;ɶmKJ=M+eY>@mYT!E?!z{k=< @mYJ\_aJiHiI{R[ ȯ(Dz3!@dǗ iR2K*rUPXˏH u‰&0(l!`>LJlT DOH$MFo.KR<G^I3j4vڄqc'cƍ7< Ļi2U3sS+(aFRV#+Zͺ6iʯ(8!R7.QUݎ&DSg#V_ݞzP۶U1mO>{J#F J  @- ۶<% [?H]ߏn-[{72 Q H>eR]PutD^to>4Mt\570Fm2)IQe%o=yY5_)sr}. ;1IlNϭnMfMUo?oa@.Ln;E0@MCWRnسY9n]x0*H.UQ4|듞"P&F(wK^{pqtU6d2'O.\(qHx8iI ?󇵮 yqݴR zzF ؛Ds Ia%pUYǖ{kWbX/u !Wϟqm1"2:h۴@_?B8& >]1 ˗WRݶ-bMQիW۳f͂8j}rX䒧YzGb豐j.LRۖVd/ Hڂ`M=m銇L?3oHaI:tuui7t,t8]wf BM?\c\| y\BٔaQ-AtÃi !I+si>%'O`ܳL[ aYzݫVR>DBR`1TaΝRt d٘,]TpCP5W<nl՚'0aLC* `¬ D u᪑ A+`?Xڰtqtٖkd0ga@0`Y~M9"g,0 d:H XB&W3^Z@r2h̓UVvC1<|M( C}0:1 TU岲2Q %1;XoJz);áШ`qǏ6-Ӭ<27/pǣKsaj%%%";W 3= H`r<[+ZrMXwhP"ڨ,pStUlA_Ce:l2bAt2rmw~wCpJ!<ܼI2TGs]qБYRgz]Qο~#wÁ><"@"^Z^g& q/ DqeM0 _vRN8Dp,=ʆ{K͛0|CH|(bH^;6[M.H\rPvsNtsߪ;ژ601]Tnxp/=}tQOS]2 h|NϘaٓ;۷vIs vD aRd-k8wQs艒iI^p{g[:c6fڮpI!p ,aei2$~</kΜ9,@_C3v%_teq/x%% (QT]ɪ 6se/HH8O[obIe'Uwv?XȲ-zO}Nr>nz x\߾Q;m(u cp"'':ssg }"8FIg==J0 9ҝ%}TD;|ͫxZVm݆iW\“"۝pCupLCϬھ՛!jŨЎQ0 Q/H+\5L$t;pb08ff''۱Xe8%`C0,c*;2{93ΰx7*z%g|mڼ'艄EMz=Cb&A,vwֵcm!;T~M/uh> ڐ mı_ 7zIb!-MSqW2.R3#>b!Lls.. װFSywW,qւ5[`{E.pKPa0X;ŵ+o3W7W|-(ip86y5wDd o&@2MK7n7ojk:aj0@5&Sƍ-͘q{ʔ &Zi/caf 'Zɡ Hzܢ ]Ow{?>׍ˊ]Y֡V(pKYA+MH ωlqčW>x55zEpa"GQGe'?ߏ ;^\R,}cˋOl#1~ryLȧM;#-֝e뷮xgIiҴib4%=go ><3eE.f T jnb]|e 7lT|! ]w|#G;i(8XIUAK^X~agؾ|uJ*ng?jJGW]*Ӥ8Ufш'?ǭ&tc~17n'F $v]?RrCc7.n(0/05ʲ2$PHt<oY=,N %] /_S[b8?G, 3 '!^ޒ$(eW'pp4 ,bU/=48H)^X{JLX%s}nx~ _O|QA|sm_?nLIv ˁq<|ZSNM1pjElEbz+ޥ.\pWO;a⻟{mͣ o}|7'|ᔜꗞ ɖ!ODk涮N}q{="tJ -e;?7@s90waj=_>`_uTϬ{|Y_@yz:)}Wqjhj ._|ޏb܋/V>lacA $3Ortd)^~_1`@8}ASJaaa/|˖{u+gl}}Y0%,S'Vvv>GqϏ!ŦPXpxa|宪m~+]{B;Cx Xv \RSd0 6 [,H'|2X:(4⎀߈~eO-gD^pw@~%DI)&g+_{5EN;زyK_-Pp)TO /'!5XwI7|鬯 ~<&$#"z)_Sd >P Vp//~ ̙ۑHi hfh, SKo)QC'O%hmjKKT_wt(n Ir u9GH$FLFQ0%nj?Ϝg}$ؙA NH#֮^d_x衇آ\n}ܹ\ #/ڲ&aA$p-%(w+A'j+J pX4DG{(v?U(jjW$f͜:v#()n0=cb$?[SN\4 o HxI jW]y|ꩧ+{K$<7T. @͜붽ߣ#bjO=aH|5+v?n <'zzÀ B[d`G4RsAƌiguH>6EZhD_]#3HvzՅ)œM)k&C:~x^.#rUVVʘj)al(?8Ra_|v}Ejh۠q[ǸeN84C7UmH0ARxpe0Ŭ&)M?+ !-€;='e·gGQ0^l}.ۚ x#M, @x92P0: a`U5e~Ro(q8=.!8?'7jDEճeC G<@ MW}@D\?~|qUhp0Е@=Dn1E5#lN%$|`S-8Ey#,h-jغu1`3$; ~b Ϙ9Kvӆyu~8H&s聿(ZS\V1?J fFٍ҃suuurUUYj gJo-fOi ~L1o\.Xύz @#ΙtNR7Nu!e~Q=rLvfhBY2A~e<R,Z;(|GHP) vv(")̺t4jn ! gJ]g_yr† wY %!*z$%ƪls;jمyYi@*h Ub_LSt@@zq1%DGbyGf?eqdC3*WE9F8z9/!Li3~J1~L&x,H pC k40s?9}ҷt@bbrlrc#S?~|e,GM3 {fNV)WUCY g$15z:r`Q=/ 1d.zi^a@G@gc`:r1G"qEb$Tے:( #˺q#*)9)\Wl \mS`.fLs@P> _`EMن٭wKTҍ:"1b0Y'W\OJIIXv<ӰGH c( Y12p&`7o^p)s΂k^rA 6)7wa vN0f REj5Q~KnnH4k&v\H,]q &~̉);PsUTHmlcsʂT5l8# S cƌUճ%}_H<vl]c`wOW?C92 6EsKPT.hdߧ*7*EĶ UԷq {^Pt#ir72Ag|eHx'ۮK֞иiQJ*ҧa2LQie='#jPX38pwi @ƍaU#V/" \ t[׭ߣlQ\Ax&%-T|6>^շ!|%[ͷ YW;叜.$1I`rk-IKh\[- d݉'xh1A8i"Zmz]k*^wLV#WVtT#:q,5HX<]#6u`ٜCҡ\]Zvke3^ANfgg[{pS 0j|QkXFhnD| ~ʟ_t`R.& ЯTpELK+heo_h,S|ؾKJ~lT~VaX,A.WfILM[O.i\庅@7s#J_ޝ+KP/YB_tشsD'CMA {nNT{˞ b܃ 2 ~}9ssG8i^גp$9Ht{[7T6BO OQ\ j1>Wnx5ؖqp EKb=t,Si>᫏#&x'>TV͔ZX"w3SY;[Uhư;wu1PQ~i(&w kbf2Qc zJp'~a`#CXRnGb7WV@4D,CK[^ .(Z_,Ŀ x^%Dxl5n7 ~2XWQ/m >~:2`q&aוp5sqG D8(*+nXWXfeX\r?kh h6gLl*mR+|!|ә):D;sL)??/eB+֖t7짿y-awD6G(p 8$_$^q{ @a\vԈѱ*Ny #! mPvV5u\_?e(ү\{jAA3J2_T_]c((Sjii8 Gte3Xxt#Zۻ!} ݥ-zu'4v($Vo?\.wozwr@I`s6Ƞӯz//G(-#w:@ b ?^U!(v;V:i"85V@/yg^0|# !3K! #{? KM+|࿯G!<}{yղl bxrϥ:(m7 ` Bf>rO6`C ޭpOeCGYU8!n2~tFZۿ~ѵϙ<Bj(Y@rofR*P*'^<_hnD9(bz=ח]r#(s;v_P07^}U3C#=aV p<]֬Y#RK.C>' "8 E7c:8禛\pAWw qpG1RX&v:x\ʺ⊆oyْu.=Q5iD_5&X.mxW@ ɟ"alPQ=z/ڰwfA:AyyhOSA7R>l 90i NsŢm@V)M1.D8|;//>U 8wvGeęRsKgbÖZHn;@`\o4`FQXCu^?B qip F Zt= mѺ&ٹs&O]U.*Un:)CLj|{Pۮh~@q`ш @<Hgmhy+<J+0úDλsȀϕց΀ >X%rݍ.gt7PS2 T|uWKFi߿؞1sS; r ǻ$>3;$A }S\XX\Z ſ njvj`BV s}'2t}{zyy|}{~wR{|||yw-d|{pv2QWXmxc)aa:{WV⨝]}3]«3=BX׬^j i#> zA$1٪EڱsqǤ 6Mǩ a̷{([t8­Fz̥J1 GlgL JIZæ2oIͪ2"쭶b ߜ@ܰƾź,<B T]YY]Z eI<7-W XOLKZoJOJOPPIUUPKNPMJQJZ0CXNJRaHHX5L,RdB@Mb"uJ9*\BXӉ{meR2wH| @9wA ~bG| lyQ A ~]14 ~|G͈s뢀d}Xw );F^Is:h5];1 3SPx9 INA~i2i~8}$$ㄋJ u~zBه ?  B V_[[_[l8mk RlXF _  7ڲ+wJ<UġG%:]:xKԍ ^Pf<_LgR 3q+1$ 1$g 3aRcic11PNG  IHDR szz AiCCPICC ProfileH wTSϽ7" %z ;HQIP&vDF)VdTG"cE b PQDE݌k 5ޚYg}׺PtX4X\XffGD=HƳ.d,P&s"7C$ E6<~&S2)212 "įl+ɘ&Y4Pޚ%ᣌ\%g|eTI(L0_&l2E9r9hxgIbטifSb1+MxL 0oE%YmhYh~S=zU&ϞAYl/$ZUm@O ޜl^ ' lsk.+7oʿ9V;?#I3eE妧KD d9i,UQ h A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs%%IR$OIDATX W l8sƐG0` 4AJTD))Q !/TTB6ԡJpiQK$FU0!566׾iwnfof)L9RQtwDẌɲDDI9"DfZn:;3sge1PJ pǑ2ks1f'1 S7ϖn=>bF >DI*{ǁ9SUG?r;k2m&Sjm TR,pmt3E&>- +7cCP"s:ßR af "^.7jX ,Hg@bc\D͙y?joTU_k$ɣG,3rKut=F{͚e9@(A!riq6!"y΂]XIS-l"p;īyވdC%̻ ֶvuYiL[#i>)Cumƿ$ckާ_^j^Opl&{GIϣ {3UF=2zBkf?Tڎܕ&_tIBE{,1ݿ0ASI3}lA֧;nRE$[^{w㝽1\Xc ⒒W~OneFex o\zWfIK A3:f{vmɧs0 e8 ui ҬY%mَM_B ͙X{eYm(P $G #<+S|TuK|$REm<3agLO5MLI`n62P?qֵk=w~ ! LM!} K64&]vU{Q)FF 0aDP'}*e2?<o,Gut\`B(i}`5۹n8 Ve*O_?^(LK4IHI)''@{TEl{BFât2xE|dx* <ם**mq1%/ B!H}Q` ^[m3$p۝_=rڵDDa#Du *j 'GJOWH埧Q _@>K?bk]V4MD/X5gu20E٫q%gCn8ܗNU34O"s<ײrsr*wz A1vjpԁzN6p\W p G@ K0ށiABZyCAP8C@&*CP=#t] 4}a ٰ;GDxJ>,_“@FXDBX$!k"EHqaYbVabJ0՘cVL6f3bձX'?v 6-V``[a;p~\2n5׌ &x*sb|! ߏƿ' Zk! $l$T4QOt"y\b)AI&NI$R$)TIj"]&=&!:dGrY@^O$ _%?P(&OJEBN9J@y@yCR nXZOD}J}/G3ɭk{%Oחw_.'_!JQ@SVF=IEbbbb5Q%O@%!BӥyҸM:e0G7ӓ e%e[(R0`3R46i^)*n*|"fLUo՝mO0j&jajj.ϧwϝ_4갺zj=U45nɚ4ǴhZ ZZ^0Tf%9->ݫ=cXgN].[7A\SwBOK/X/_Q>QG[ `Aaac#*Z;8cq>[&IIMST`ϴ kh&45ǢYYF֠9<|y+ =X_,,S-,Y)YXmĚk]c}džjcΦ浭-v};]N"&1=xtv(}'{'IߝY) Σ -rqr.d._xpUەZM׍vm=+KGǔ ^WWbj>:>>>v}/avO8 FV> 2 u/_$\BCv< 5 ]s.,4&yUx~xw-bEDCĻHGKwFGEGME{EEKX,YFZ ={$vrK .3\rϮ_Yq*©L_wד+]eD]cIIIOAu_䩔)3ѩiB%a+]3='/40CiU@ёL(sYfLH$%Y jgGeQn~5f5wugv5k֮\۹Nw]m mHFˍenQQ`hBBQ-[lllfjۗ"^bO%ܒY}WwvwXbY^Ю]WVa[q`id2JjGէ{׿m>PkAma꺿g_DHGGu;776ƱqoC{P38!9 ҝˁ^r۽Ug9];}}_~imp㭎}]/}.{^=}^?z8hc' O*?f`ϳgC/Oϩ+FFGGόzˌㅿ)ѫ~wgbk?Jި9mdwi獵ޫ?cǑOO?w| x&mf2:Y~ pHYs%%IR$@IDATx} ]E=]{;BY DqqysDgQm͌q\gQ uDA@& @@N}ݷ;SNwέﯿ@@"("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("("0>gVA ϗ~Sep[7O׊"Al E@@Bb4!x9gUxS @fV|<חeKy?ޣbʑ>önljjomFcpo|egΘ|y4^v)9k&~03, Ȃ"{l6@yҏ[}!">,P p! Xs{-[{z;k[նylk.L,i2a91` j\' qDG:Y6<Á@I(7P| P!EEԒ(Eo;4v%W<$gyܽ{7|sjժU+ۖ\oZ0kU`~qwc[tDO0 ڄ Zw,=F6DW{y^YrKB4/׀@x(S1cY?d2<`lÎ|Ѷ{^\=g.ĤPpY%iEҙ$,Oc+_E@s0^:BN|hٕ+WrM^777.]45n&p~o'Oe=k'IC Ȼao?5ND? )CQ2A|Ȥ\ A-F0À0yQ3Y僑Svizf˞ouo7y )z``Y' ĄY)E{(@ilGK9Rc~k]\s d/~ v_ģ]=); 4r9t`AM~+X<C"2t,l_\&OaB`(dFAU>ݏ>q@+2Cb֋Xx嚮U(c1T-CĆof#7/_NZ$.z׽X"H׶vtr]'@yr=PݳMfb XJ2a 3p0,) ) FCn}$ og㮟h͓wiO`@ ˥!z(3#Z]E@0J_2$g5cpފ+xu]pO}_H%TݵL8qv19<3R?$/ƅ@E rdOFʥsfBA@{W|/>[G C8yBM˒Px6C -*(A@Q"#`?7|8[.3B_vѲ79npvs[{"tB:a-~4S8A~d;c@er"(OBۑňGmm,vY?'4FzX?WH~DA4H+( 0ja$-p Ug/zg| {g 64ul<B$} `? fHe'(;),)pâp4H[خ}7Љ8GCl# PA Q*9bbÞu]]מyFſMmnkmj4?p+ǤDȕH0C"Wc$ L p7 ) hS[|7~psw$H4D;R-q0((@6eU0(o^?ٛg\<X9w`$?e₣VW"顒~|/?2 8kjŁFzV)/xB*H46FC;>խ \6s/xb- ~?(iD`Оi,>z"@FUtYCزԍ~͟G.U)+XH,\6C6`W>l}823/s#ksg%\ҋps"`907EDY2+ R( o{tε0kLo2}.., `g>VNO`@"&${!\aQ#r=~:J(eE;e`- 5{t?oDA pLK i=aۧ(IEԇi@玾*KnxI`Z|PfT aK,bu;qV|`2)+E LFJ2i5!s'An/LC!S@'.nxFPN!:p GG˖-! []xW_{Ɍ[j(d'Φ ɟ=J~QyOM4{/|/#ogK:q72T)y-[1-JC/,p īSv-=oj?a8޿.KN:7>CN0.u3! z䢢o=~+dz^piJhͥV YCLohoBjdAj8/@F6(E@8p,O"pJx/K4?;Lɱr`~tI!̍8E/Q뇰 qKfʓqY >Oa@'ޘA+՞T˻B w3p=ck?nU4f"*Sڵ+|禞ׯ?isl0{Ѳt͞<?R[//zh" P- + sASٮ37,\>'1P5ߗٖrᦫQP#`hO8 hѢSݸ"DO{" Gi dTRstW'Qekh[*c+؛J';{ƅ_l۽p!DțP=4kE!P{(_յv>ٓwCK$SO~xʨ%炒ܙ``| x9Kݰ㱮d|"D?ʑ?$j",*,[9Oxu =Nf@=B"e!u5E^?' FxL'lPA2Q8sLÆ;AG/Ghޥ P0E`P`"`eR+;v=> @ t8ʩː Uq={D8_wٿ +1+z0W~*7?t{`AۓL=g^%΢kϹ"W02IQ 'ԉ%xSsYg8Z?F;rtǀ˪w=d/%ghb?(3ꟼ˗\|Z P>%bL(' '&5k\{7|1KboHo8d ?dD:d޴sd s. A~E0_!q]"(*(K?'oM% 7f?~տx0]>8+]k &C@QJ^WZe{mY7سk?ԛƃX"߬__/ãj+pc.d;TK\rٹ3T42I+E ?E@&* 8MV@@&=a|zg"g2=`i٭dfӻeT_c?oAf!5-OB K_\!K&k c񭍒2Ŀ7;o(~t{l'ږ$#XVqֿI޿ܓ~oAA-Yfd&}KԺSsKE ` &6/x$wmE@AIG8o^ÜzO \*͉3c,P_  PL.] ^KP<g^+EgeE`P`H٠B@>MzŹzBYiH_6tяV`"袳g.D9 @r7Bt+ *5""8zޒMw_n9ܫ7ᒱyǧD3G?|aY D*?kʬSӊW_\go=*F EAѴl/(zimS/u޿I0(u_.?ғꢍ-5Ay48$|I&@_\$U(' 'JFqq?&g6͗S] 8nf!`׿=Wʛ_K73tɼ(7k!M+FP En"+n<_4Lq1_ؿ@U_$>U lc%Cs2 \+ـ gOrž@%-@9!@*>J" DLF_ɽ8kzl˵0g ?WKC91oua]ә䜩uΟ7yFcH_0L~opi}S 1bk/bhy?s, wg| +GxCh%gΘ X~Cl{銟/V_\S?Q pPig-kQR۴:;Zgnouy]le5m.*Hfr- ً>jE`0D&bts?߷2qao.lā?~`%e濗GRw\"yW.ϙR?0I"#}8bE(pP`Mt>y=\kT7B ^N)\{>d%? ffNk7$}޾? (E@"7W[U2w^:I ;l '1p'`:L69n?4<HPN LC#nJ*J"PQߋ9b| 3B73(@K!3/US}_@&jᘄn fly:%F( =!<3V?i^& 81#N@kiH&ˏjJ}M IBf:\Q"p8o享/Y\sg,:l^L S1ɜJ3KzjE`0 &bD"ųܱh鮞͉[g2xm,9& ?4^ҿ"P5*T D۫x4%A_%V`"6(~3X8cO" 0~>3kMp5"P*TD"5kvyo^0ߩ(4FT-  VZ'46),;`߄Or=+OLŭ.U ~U!Zݥer+t`UNHhVɤ疘_X%_B" £7K_=` MKl& h|$Hrk" dB8AXT&|2T lj ּ Am;ocIBܗ|鯦(GP|D/+"5?p7@͏m0`f'BEp [б4wi*> [-g0E@ ldلs6GwnJ37<ܝ *6x8)7xB%XEe/(HT{(|1" =HsȆC]-iZ] Exǽ f%L`W&7h P8 е\S$0W@_n1-!;,)Lv-b U[2 Z"T UQ<Dܾ~O"h*k}ag)k r!">rb|<=mt) H(ZI<12GL]ps4}4]΃MBx۬iy0MS)܌~E`#P(~ؠ[oxa綽nR|c薖>"ѫA k&%Kx.gqZA'H(In>p^6\.Cx) x3G|G!%&M]ZoiqXS^ӈ[ҿ"P5*T Մo`럽t傤J'NK;C>Av$}y= ~hmi53\&sTp5"h(V F'2t =tbQӱ7)NnEcчbtUרzQ 5@U-jV?ΝZD:݂m4? -4'p|UOcHv?yU4`zAr">\p"рp?QA7 ϴ?{;^pԹ/\0gƂٓ-:cH&LS(q9|6 Ÿr=}3S[J>3VWP@ E`0hKƖc.y跭po)8y(_ɏc@AwCߓs"[79NMNp'M 85u QC<1&W\/}gwܸD:=~jC7JSp(Zqzy,w{]݆ǻ:>O-'r LZ_H~q"$FEjb3ɚ MϿʥs1d vw#z+2^/ E1s̡IA`P ]?EB=#qj 4@p@pҔ<8JM a!l[t*5WpW&BG{;rpϝ9!f/>cڜ v 0k@AY޲_v<̙16e?nt$R׶#%޷hk{[XxNA \4 ٰ'v9&% !q%W"^5"P-*T3Y -=GN^h#:9g@BN8@'q-'O+>iIHU£   17݋޴0 Ъ];@Ξ+Λw'p! #fŀ-xѣn|KdO޿Bt"HɉE@Ix!^6u7W{E4.= k֋}Px ynPԌ)ƮmzH xja>2c<qd:;6O=qT Ķk;!x<} `}>Wx }>)^PQp0PSO֢PS D0>NIIt#F\"0B=ݓƧ"( waeitHwg:3'V=}Z^`5)l|N_Y z} xlgb#zL\+/{NL̯{zN z+iNTMQ, NdkmhDT<gL)JKQ"ptaaO-r箞ԳE/j_Lp&GaE+ki^6fYao;^?V[_~5?o?8%_rf0o'SKϙ31aݿ'=x8frЀmߑ:ŽĞ+| {oD@#_؅dQ{5[t&~IwLǫ\(#ݾXiY{/G9O`zjA9g$ok` 8G8,wÁ sNmq'5Xn8V"N2e f)ueڛZXEXXV!0n:Gw i䛡k~!rc<ëFP5P0`iI뜀V?>mɵ^b ރCfOtzC {<~$Gm)=uptG0((˧Cx g/ QںziPn1#N@8#4ꁅ9l>쾝-t'922v 8t%Q! E`<[[дk.[#A7R\P{:I%f:k1B0/V1~,1~Oqo<v>wљ^l!Dvxi `$9BOW_!>dlG\M\&o+%aajE``M$1h^7Ioݳa'G'%ҹ^sȸ`$ahN\"s'?kÁsoqI|3YG!G3tQLrL`z!ZTaz8wI 5,Dx<0? Bg, pRu՝z7]/D;o PN|Q ;=#1$?=@߆IB&ߒO!FP CL4B"A ^; ۿ;Sٔ[SәfX*hd Pq4}}z;•sgMk@M7TD0Ɨ*u˽߄}7GH>>Qa"0dKlҀKν9}WǤh]!céu( BN>1 qg1NIu4 O`e\PMPd[!f×!$.߄.V3/R" +4`K^'v@rHlXKF]NscXl퍷?/ى3kR\'ilNq{܆I^?؊8zE*Dle"᳆Đ]2|+h iߖa}6CVH?e`xBtI~p~Jz%EAjEDDiJcn !ڧn|]nêQ*0f]yw:ڒ_w=~ޖi8i ;,n9 Fx=@IDAT(' '/ЈO[N26$y)÷~zAd0(o]&#E%h@8}Sĵ@]T0bly)?";!Ih7Ә1OZ1_U!"CF@kiĥaƞI"$Hm@_5_}18y熃6⎑/oyI[ zrbD8f<p_ K0ٺih8e ˻$~!ޣe>{V('L:,4"P6B%`FҷbL&eM9嵶1jPX#V?h~݃Ml7fMp]{MD !v:≮U?^ 3 PW!m!r)H̓3Z]E@&&p*ZrqMR5qO0}x>7.\ 0#y5Yh2qǚZ{` ؘ)?{G[0z=v#D]}J__ޫer+(P`i*0nM' :t`?ͭI$. Teg4x5r ]t΃ͻ!ETbuf#90n<{G0Q!P'€g/߃ PN@"  &f#oܥ'8p~O\޷fU{gW*|wC(xJ@/zji?g0[v80{ObsH;Տz1hIB滓z9WIKWzBUB@Oię~J0N  鷽Z;HCV"'OS>޸=!2d?.A)t636Rӻl#z5\m/B~' 5Bf 7dFPF F IͧO>GMW3-X4dgU͓YLS郈şxӓ;>Ø}8$@qIƚpw=}~qɟ؛d-/n^?Ӊ B@!]{Wa B"p*0ri|C_ Yq?7v$jb0Gɟ(a &9H~%2ս-R\{IIT.m ?qkY!1IVD..I~?`zڌ%"bQOe{:[.E`P`ԜG 6x$l}+.LSK[67/NZg<5  õɉ,qjnlso)[Xll!. b¼Uoo7O lǠ! I/y|m3HDϴ'Kdnk G%dfY7/wͼ(U!@U0i?6`$_ʋnlniKF#8j|3[p0],fAMo{Oo r ͼ#~٨ƿ}˫GǚS5}/9dܲd^+3\Oe;iݻ=h:2#~ ,ɞC2ai ŕp%2.)~o<ǻ^j=yի3LMziF[ɞOK? w!DL~41@Nec8[B,ϼo_s- p.]f5>FPӥ}",p&68|gk{z`IȰLbbz!lʽr>c$}%#^୷j=9\,utέkhtº^dJζ&9qviIJ02s]pZ&168!ggiu;̴g[wux `G-Eh=o"}+qy|Rl@p /Su+.ީpy[?tpWȟKG/#X2,4~?-D&nZmk}]OT8 NU4@"Q0/5JiiqUmM12޽7a~9 7 TygMq|i?wl63lH~6=q{a8x"#fPH003n@sE/ 72Hx~+]?f0gZ=1OXR^}Dmغsڻ۸ᅍ[,zQ+ZwfLbl!Vc P6ڬ@~˖-!oͭ7 ֦@6𔫚p7112'rgI8XGWwoGGgʔ4!'xr0?'Z3_hHbE dMHl H .-xO  -gZaZ\za?k֛ODsF W^+r³^=NgJ`u hDKTz.?֢)y$$޳n}χ|vKBA;dBBk9nMM{fGmݼmϚ;>߿Y${ ??]Xދ˹ WB F8ud״[Pz#B)J$/tV0Keђ#( 9ڨFG>c {f ǚ;R,O)H 8&L&֎CnmM |6{Ǟ;P*>-;=sK_OveN~^q ٯ[j͆~$f!gMB{&ayI3tItf-atp\+BFLc% 7$z(ĥH e/k4?h.4ᐏ_#/? 5HD}K-|'Y|C[ @w.|o {,'$xх }ҠKzݯ|ƒLo->e[;:DqiV]߁' sƛ<8O._Ԋ%i2. cD\nb o^ׇo'>~ҶSZ[!+pbd852HSSdj1zG vy&<0g$h.庺)3MoÕ[}C9oµ `fo9K} r[jF*72]2e9MװE L.ͥSCl{aMo޲c=[:=)`>p'7[ktmiGظ6w `^FOxMKe84$|iMܗBV" jm݆+W\ΙsޡMYbXPq>_/`A.LШa-St'kGcEk덯~_w| (_q3w!ׅP;*sTF 1"M= Ιof9fg3)\+zv G|bLiй#dwSH2 Qb f#Æ샺`Am09\^"fK\.+'O 3<]ܓg!3 ^3~?3?dѼwDZnѣ}X!Y恰 g:zUǔ`!\&4w&7]|W}| ɷ ~1%~zqzP?aNM̎;>PqarQ4 (s%TI#G:;j7ƕ~rYOe-Vv/EߞIzI'kt? #Zs7m??_?e}3LX555!LQq$T>E*C7D4X &H17}cG$ $ ɻM?9QN:%Og\6d7Ggo>su5ͯur.zh8dc}oރɀ{6δ2_gNؓĬތ[.Lҳ ޻K#ׅµ=e'7ZU߾ҋӛ%L, +pGȊd'#B2,r]3W?}+^O;tw'.狟= W4"F8d$cZY G${Zid.F#zHjȱd7μ`9!ICҏ@JrF y ɋ@\7 8F|\E]va|J?[ɏ_|;֜ĆNf%?uO bI^*!nNOpw__]xD7,t@n P7+[#^e<5Nl ؃7d9`I$!ro8;흽MG^n"`'~0O'Oi0͉n;B(:Sk5~aA}^9p G B*tI8Bf_ )ıRz޸4f@u0nt~w"(,x]ȿ35]a%C҇=l qnor_nhm!}vP= ./tX4|=qEK^˛ByuD\ !~NjqKͩB@S>g0$ /c~9B${wVsx׊ot'ӘcuE4˟m|՗ hhaPH8 xT`]c'~ $Oל<(>B@Ç߷>ߟ\_Mtn  ..t-i5zZڻ\*6:^'?.Vxony K^`h LA)p`a~E5# #7TL!@~A3N|Omy~u} &% {>&̯ g?YIgTd0t:F,3˫fx{k[Tx;*`P~0rX~mSeT8UHsLHk$mqIݓ|k~~B@ cN27TWΫ'_:k]P8STO|Ƌɂ@ x"pɅ瞿| Po)B T oT\$W3 2h^1Z/0IJG!c'‘ ΛϤ٘lH P_ʪ xuvv`$V@i+zGwaL> -+_mO@\d 'a0H֦Dޕ…+3> n=8.i80B@_K0`")(;P*qit n'΁n;_b{y 7rv:lqwHjQ:nt\'H삋Μ<+ E `M!@"˼ԜT8 jCF$ib^9߼/f:qs;63}}*m9KP`C[p-j͈֍>ng_w̏Θ>cE<On0վ1794T23cjct?|"K@hdߒXf͌'_Gg݅50?L 7nqHOfRY ̴_(@9$a<>sKK[g %;FxZGF` $y 5/jFTP) _]py|?4Իd2u2h HJ*i5mt4k_n* ,l&;!ӎr-~H_=w),7`570`ojqVn TvW V t㩂ዿXEbl.O 1%HiQe|;uuЗ8EXA1G8'%al.85s@VU/wJ՟* f)EW "+J%5k6w^Hͬ /x A56;f%9؊rP' \?iӦ11Nvİ/]8d9w:rEJSP߯B_qXD@M^- sn\_r/SKc]4mTdTL@K(K*ӝQ1Pɔsӳ]$[_~96"&O;pFçzS)J ʦc8즶#4 y0"K7@J@z\>k ٽSse# EJzx.8t;8- 8u[eU@{0M%Mן,~9yl* GU nk0 ㏜w00ˋ_="~]J%ˡ^ SFDJ*S^ұkvWh%3gLOAưT/)E@O?X.̌$ڌ!0J)P@ÏVlC" gn/1~VnwӪwORqW X^Fe%%UةSWWWN'$(M"p&pfD{dM[:6c@&Wqnbvp`QBC/UM.]==y)-'؛M pvO9!l5m3'QTI,jK%釗[VU>fUpb@" "dNhʼn47oy!ʹW'$2jmlr*(/0gCva^|+D] 䰾}CԢØIg7Ϛ1y<ܶFDOYOwӥ,Gfn$rD`uY3_J}_0J*{ȯ]h{'/^:c7gі"ph@7SqȜIpEP.WnS yYe~ϭc=$OB+pңqŪŃFKgd82^Mn "Ν569_n8@ZYBe iQ <ܚG^rd977Kz.>ajS]X"Peњϫ?ݏ=r&Ml9Qe{_Y؃vsl iQU=n_vo#OZZȊbد.0mJ[YiL .`_W\rXSKUG#K 40 l` -sfNnD@l  H e8. A _kwg,Xi8/C )@J{akD@3r/RQ<23TC;+..Yt: hW$Q+z.?rK>S o=}i= zp8qXEKYfDvs/"{ k0^> k0.Mc/L+nʀRx}erǵ}}X([ gvC`L,g.-~N?Tv%+ ،!` T:݀i|!Dgj\ŭ4kOq/C2gƂ$vu㤮b_L!P\ Vr%- -#{Rʀ\e ` hQ ~hH,fUz$TI;;pC\&ܶ-/jd0fs~z,rIaYɋ)r+0( Sy[ԅ;` \U>PHPbu1)*)c|θ<o?7 n\z)&+NBOhj2i|=!4oi@ʱnzf @|+K#*=lYa"BcM;IJT ú3;/3ޣAE Jf Wer\.iT /|uT3!0 |# b6؄fNdJͭ0 C7}RSkmm>&BJpfsJV0)ς-Rr̖?a.mj`]b9Sّ?7k1s^w)L."Uc33@ə5}r3Q]KI#YCB!f"P+%l'HSC[ [1N)|I<_?f'dz]>XFѕS?W2Ps2!HrOF#cԣyXSn*wW^.+=yfΚzto_:3*1@S}KѰ".KO"g 0``ױ@i{g 6Uǰ倭ٲq+JʀF_sd."](`u`}a B E 7J^fFSFLfÃJig*E\H;p\ŧPʗ!T~0^-wJM~70`YWlEUʑI]nsIgۗD(nV 4@")Q&IIu!"|Jipw%f6cA,e*MIJw/xNmv&KgrB:LO*9@a"` 0chML*Ҵ4& @-,ʓa7vQl+WxXXPqx[JW>Er/QeS +'ﻞPW:wVV~NϞpb .V܊Hdb7WlZ/'ONI#YCB!f"`p\$J{*zrHJY:(?`{z1lJUD H#ncer U Fe% F W(/ܖj'<]p+*TfX S:0n ?013H@ߕf?w*\zuor7p!"Poak{'ȝQ{;0ddthgo_{x.c߶%2f.j,rEW=)AuǣдeŊXhnjkMVVYyݵ{&j"-*Ncn{]G0I* C0|`9&#hif1}m?W ̫8tzUL*R|ƺ ?cFn&Sp$4ShBi,L~>+9S; \{/&2Dp4Ͼ)܄fLQ2G#!qп})ԭ h-nVo#A߻;SDVG{ ؟F 5#E7 1a@M!B2s^I9ygT8KPr&4αK t4kC>tZ1_H"=C9}ҡd:hg yk{boL\4/0*6f \wݳ^W@,+ gG@ ϛ3c?/ $O;xHtʵIPon.E_])) 6pBǮM~S^aS`Y@IDAT>0_YBLQ#7Μ2e@e @|lVu*7Z] Ľ]Qplu``as/Su۝ )zʀbNcΌ;RY *i߈F%`YOmpI Y;xӓ Do_oEB$՞T;8C7g"}ngM o q?j?lo8]=} Xpq =!]}uI b֠YpV}<^<+#?Z@H)@o__j{{Q?{"y]nnt~)ɼ>g &2yguش"%$Ҋ4J8ܗL8dCL >] A4e5uiǿM}8 08Q[ڛ>"`;ڴ= gX)"q^nG!"` `5v w.q&a*__wVH [o?C,<Hf\4J@&N=m№)I%B׭`Ō!0 Xb 8ڻzR8*Y@*=00`Z@-_LwC^ĢDWo<0|8S?wƔ4zЋQ_ALldٙSM~Nd:G_%rTB(Iu3f̵WRF6-Thz$'>o=hJ13cpW5R+ sVpj.v'.8/ʥ><8 /|#SZONvN1 nyȝg [8OM#?2stۻ0G~1ܯ z=й|?Gd)o<N3퍽؟Ar|NL@PſnrMSv=}Lk9m~h,.;]U+[vZPo2UXpty=Ը8 a86 #ՙH*S(~; 0,Hm oua3KcJΞX(h8Z$t}ЗC`\ +)m{W-{1曰(=gB.;!"` `ՏE,X9#݋BisJTdbC7[igyê67ܸu{');\IOQ`v3CD!g駟=ȉ:szzzRqoK]):nKv#UB`6N)g^d$ỗ\? CIg4;; l`2O[!ԏ~t3m큺x"Rrn1F``ي5VwgW8qn/D)`G@ݫqYl&¦8pbb1c +p@WO2r:1%Kɏ_!eE <V?[\?p uteqak$Xdi,fTx4yg_98mrS~2}9/_A `C˼V?Q?W\qGLNo3Ԗ}'L,lJXqˎL&A<\Mnf 4 Rϔ_S}dR*{vVNՙIK!PI'`u[$]—K'YIڴP>HlԱoY羳=5qˊE-Dm~ C`ۃ\|ʭxg4"rI{/_*:60t,d _jUw󏳦_ӓIĢҬUPU+K!Pp)?pFŗ6#蝵K刟ncϏ+i73LXs,'GΘEW_ "aZ+a@&iuҦNS59O"{sɟ+! `CC͂T"ŋIfs vԛ S~Xab0Fg &.'}H -0[ ! /A_lIe3;H-ob@ e۲bT/[%q)d˟tJnf 0^LKֿD:i;2hȿ? p[ H8՛}8G]E"=IG\XiQ[@p f'_o mq8ZSk_ھ@Rd*$U ʅAG?K>ƽ@|O*t!՞O-JCC1|Mx]ߝ] m^Wn&  0^Y"a-.e0o|=䖻ĄT/kW׳~BK9/#w}憑nf3@U"@'#d.͆>Ʃ_ѓb_C6*?%1wt'wt.C\++\ H  of  -[~X %KLwk[|ڴ'lں5^TD-@!b}S]嫶2 ]@vG~E= G9G60ʀ놎yI<Ї/O>5n%e83!?Y.Pe6?yZ"~>soO4GXiQк@5?Nwu]}檋sOϘqMqlh3·X ~oHPSlGPt,*jы$/w0"Iv3@z{f?… èlҏ}S?qQӸoܴ)P [daȦDh-;X'F' 9P$lzKҏ.SdG03Ez'k?`W>}y'_oyL$*444JokG K Cz4xrً2\_4$k]n^d?WxI{3CD!g. /}KY篺'qߜ9k։mm;mu F&l֖FƆh>#,*C`Orh"s7vtvF K.vWQ]q(>J3DahK}qYwf{?] 0anυ4/vgG'ciГ`! C`Xi="lW)]—}.^vf)ς/xI?~}An۾#etcC][Xwᶐ^AGo\" oc6EalGi])KQXbw0`Ġ}gw%g𓗜.M׮$`}C7q8@6?(!OgsDWoZi{vN`DK ߽/J2E@.~ܻ?|p;mt_* C$?xm @w |C*z$)rw ++ŗagk!h"ߧ/}'p欙ڶ=Ncf?/38@6?#}\~[uyuH&zk ےw"~A?"k GYBh b^qAľ~?U܇Zƍ 2:"PF{x=EdtrN̽׏CKz~Ͷo?O-GteJI^dl1G+p=GK7>7}=]|]e[{ Igoe}F=$~p8ïJ<]&\m y/q7;-zKrT_2n3#)#E+$"Þyo|}㙷677ݸi#CTw @6 2_81Zd!bF`_wg$o%O|IE26e,6L*O#ci f_K$ڶ-EBw0s5Ʀ&vL2sx]L^P< |_v6ʣ=ʍ>6tiG2{чϝ9mή|KHސ/Z$v u7*D(ERHH[!` 9w"S ^um7ံm:6ѐ7K.=/ڸ{zz=xB䔉-Ms?-:QH,! ^BO)]QHo2W.~|㏚7mt J@4!}RihbuЯG\$p8@R]|&0V5< f恭}46Կ/ h,rf(+)6;&1 9tG8Ν' ygá0xwoxy͞6; 4C) vtC_næ=)9Hv^zJ%eK@eߴԄ`1b敳gF,jV 3o VJW,-<_qt8Xo_`t򅲆qfsx`ϯJ^L<:)*HH(}u5L6kUqBux.)gG,g_w%]]M;^j%~:^̌c$+VgϞ|[>l[T6Ÿ ~ADzw(Vݴ {Њ/P|uwlsm( 8 'BT:[y_%0RƵ00`sOܹsoOzV%kdFX֮VÎBW!d!㒄Rc -}aݺ|j/zQ"*LhOZ҆/A) ooffo!` B6 \2?1p(+su0X>}ehט@X \7WΡ' [> ?!Lg2š Gfw㩜mya -[.?LRʂ"` YQq ~_ é!%s`[{s_a- HK"I ɅJgsvwA_K5$Qhъ2 `vg8~ ߼;\ w(?Ǎc$|xɒ%䋷1Puؔ?1espǝ`̚>utH0$^ T'oñ1ڇzϾA+GcHZgpKMj9yȄ,}e˓"Q`zgxC>{p*2*g#7R¢i'E"~J]%@•"}Iץx EދV{ԥ.{ح_(C硭ߋg .O?C` Nlz)/G:$zכu_^?c)a.h/aV 1ǖ~U~\\O"'Q)# gDF)p\oܗBh%PZ+:'jrtsZ[ڄ$OvI%W `@,&ѧە."}*cSJP8~?­){*&(;7?Xa\IGߺK QBKЊCla$  ȅDCq{Н+ܵ/{-\(ot=K$d^OYqùIu>B0B@J4[.\F?m/-=}G |t} {R#9pqG4=)vO#.JuoR u3D"k@^6!8\`zۡs C/$u`W=(0⅓){ݪ1c%ھ&.UVe,Ջ7 M]؍.t?.@THU.E"8EFH7D{+Ll}lv\<'pӝ?JKc,)+wlc?7`WtΝL]@]o3*7Y`iM{0/8#͙5y6IC P/2/2QREIR;WD^ ϮX[@r%0MNTH$uں~]+Dぐ;I{u`EųcҥKm'`Сh>^h՝xpYZ Z'Խm; 9ؽ)M!rHܹ[L!ecBV&W<;~vnWOxX]e@~$^񹒟^F p5Fay_5E;8bm<[ @Q}BC! pR`kŝRkƤ@ ]A,̷4GzloǠ`}$.^NpP>3!Eg=dKs@:dj~}5/ޒ@oҸ)\ƒ@PiD{>Qa|8v\bպu@6B)X<jwW 9Cx՚S)]wdxCIww#` ^z_Pe&h_/tL6lʋK9߸ÑUL2:Kq (Nr^K^ے}3,^ftG-IFNK=_.of0`W54oZ-NG6J%?*Zt{U,X[| oŽb `P Ne[pT7u7qn= x4'yɕW/^L8 'VT x./'/җDT; ̌_~9E-q͟clkb?j5Npz_*:yBO;~{XXEǶtlݝl ckn1ERJ~_vxvXjqBCK*"}NE7WA]qP] g3)v -+<oE#-Xe@/!\@~ GEO='쑿Nzͫ"aK$hgy^K7s%@.ShkѨNgs uЕ?;y%o4I{O/EnJ@-B[5y2j>ը&?ğn{x l@7]aޕ7 L Ob/#+?&?Ϋ2gɭreTO'bg6z (f1&Qb__*ooyW/\JK~wI~޻=W/KR*\V4f ?al_y|&Ot${cpxh1c p[ &CK;{Dhdt{Xy=5$dvbKSx߷Z$tK.K!(GR*ژ@徟?P)u+.~8ybK}w_*Մ ~ȕ k>e,~l 8# p&!AtߋIKU +t330`WXFcJ@X$`@~ZI6UY'G~H,H"z%] yݫ.5dDAq|ScdS+6~=&+"rR0Cʄ_ (X{B?v7Ƿ1_0Lg25S){˞{ :D%~{陫 8kƸ?!3/l|_=e*{"uI`=K=_#ƴ@WkOkK~͹=}-McHqXҏ\qwWP/JMll6z=[ p̟-O d.CgEƔ"k>S .KꙟDXU1ҾHGSƊJyo}W|ݏƆ?. )h!PlуI `y:~ү=zgLsY?V<.[4~q_O)җtW=J>@}zq`аnG>k>҄&lRK[U.*X]ϯ^ifu/}9I犁4&Ό3@g84U0\1+ֿ}8?V?e/t"@%@䯸?}) TE1"A'{_xt# 1= &PTl\Xaj޾#7ŎUd{IyؼY ϣ'x1,#;9eoܒe ֥ȟd߽_ yI*If!C2SRWˡ [ D< Yi:P*2i`A˜ksx4[W%mr*w_v CW++L ]a I÷ݽd~&P1M_MI2w%W ?ŘCFP.gP[6ׇ}n՚OTbPO2'PF :1vĎ?BC]mYNP/T=(1E( !C rHf.ɱlM$EKE"8; 7tʛN?} xdx* lީ~8P_g_vKOpI"~_ϰ˜1~/w!` >*^V~_ = B C-yV/u&{g3$!Kr$*V4Ws?S';4B-0p[> ͡¥W]s'Zx{$R6H"fIO.[R*^G{y0LRaQ(J=_>+<% <iу]=f͘:uƔ -a69M/_?ϱ"/=6EfّDN";VcW(;97\ۛ G1mKJR|sS}hoŸhףcySxx ]§]S_*nJϽpLQɣ߽_7TR^e E.-sx=;0?`i'6qLXʣ5EE"6O/۲ ̓V?Éd`O2T:]-CW .x?X [~s߻e,-D"rЏE~җ_/)w_!hd0v@s.G?v+V2 V.-w?ח" Ԩa.M6ɂ!D$C/$vVrg% 8VXlwܷ$0s~PQy@n(ʐ+23r{L*oj ? BKDEI0F=o;z2 t4+* 0LɟD1;Z{dž-ۻ#e~$f΄H8П.H\җO~@Ի)e\LVTȇdd] Ut\ץ$zS}=t-w>,9uڔ xJ[2E‘ *=tuʫ]>R3vm%W^5v%ғk>/<2s?Xb Z-8ɯm{gϗ׋??bs`Wߘ0/ oc @~ N_\R̼+rݭV:^@gO_=?{1˖3k~ǵ%6:sWA{$*ݥQbõ-ʶ45w=.F2MeJG L&̓ϿKTA Ԋ+7+ 6:X1ۘ%+?voYy0pM}I?%[ #eR k)e\LVW+ KNyTn\I;'QINS]E+ilpss̚4o<{/G:pݚ"P-90`78]fS[wR ˅Dz$.X1gpi'̟/tҀ=*h={ |R-q0,VA1~OV?_N HRD.{y9UDNjx ^tv3 C`(_.*&>snñJ(ޮ;-Y˦Mimsԩg6b \%λ?U׷ K&]|xqͦNaC17C}M OB0FrB*.@ 0Т\~p?A?tֺȻ?/.3)%dO͘Pm_;-2J[i[NF ZsTbk7ldպMk1e&O-,$)NR(0 TVaJjҙ\Sc};??=tr ch˿|8S?@I̟1І-ۺk`GGo~b@ JU#{Q p/H(^}Jj41_C-CS*7, P]hLav 7%;{Ƣ!TZ`#R M?v3}<=$81)"{k}Y{{?{/?ŀ,|.[hjjacܟm=Kya_MI|H|NRN] Ԃa2c NH(]~e 0 tϞoʸ;=cOkCݽ\ |! VE.q.khL^z?՛؂'E?e0}:xގ,}ӯq~Nx!64 EÏ/[\Wm`?0[%w)="jy/%*Jƫ轔hjUU`KhE#')K H众tkC& O DEuU ^}Ew̛3}VWw/ OeEf w0ZA _yʗ6{%ޤds~_]o?i\gOo6x"]T2T>zu[:_-g wp!G7`'8SR%=ys5p͟(Fs갎G h*wΛ=mܗ/~9'9ݹ0A GgYb1~YL@?0?ض==~sSɯRw? FdS| R."7Q||?J$<{3U**φ%B/Uh2@wo5?" ~p'̩K5X/T̾D~{̗^XjMs W$Z}8W_n\s}={{8S~ !x7*H(س8kE;::ӷ.|t斧Vkۆ5+~K"eZ$qE."~e80=+w%{nfjj#VAr&+iŊv_'ʀgn$wQ l&Ϻ~3%Hl8xl~'^|vg c\5s/~={)GӛsH, qGxLXR4: X5OloL/|੕?MX :/b.'1:E$l)nv]!K KT1;b,#[dw%HO7ow8z Ѧo\s wrY n}M)!?5㐟Ǟ^]y3[ԜQV? G}Y'r);-?*~Es8 xRA5ںz~sߖZ 1{"~YįVK.v׿_ vIvfNKifjU5B!;]I;/ hX@R GWg3^ Ukh:lzmѯm~o6 or_쿜v?,8&`׼L!IuPxHHHɌY; g"{aݶko{.xa~ €yv Z/IvLR(yI{hgVhgC`/\j )H#/{w%J_Pop͚K8cnGwO.# n5?ZTxnr.>I#RAOievֱG<}GggBBws>qezy %捇X/Uxpë'ccP'Y/dDjE֒n+^-}]e8A/$4JNlk#7jD_uJySغ;%_ҝ@;7X'N?H%nu$1oZgKqVe?z7c~9ͯ;/zיq :d_H9i4DcT,_Xyv-yշȊ:9QcPXH$Y']1Dʒ 뒷9\y3qMCWnWu Is ~w{GaϑЂ-"mYLC#]eh>2kݓiH aÅ`GCmN=_\އ~>zc[G10a[ edt..I$p)"sI_aV)n/3t73Fha٬eSK=Jo4/g 4a~?C\ANAn=k 4,OGq}ח>?Mǵma|t|r:?uH'={î$|=5v,{nzj>iÖ%PP{T /=Eؒ"rR"~W J+h$vmTv.-wՌꞲ՟ E@C@F3&샗b~Y3zaf˰ff}Iō~ֿ1|n[tחNC#W_ Nܶ#YCl~xx')J`x/`\O>`A$(`le[GǺMmۖ=Қ?K\;{bx_ާqP%ֶ1K~ߍO҈Fa,bX+br_L7K*>UƮTH G v`v-<{W`my zS[fVx73XȽ8. S3~(ͧ4ٷ mk6l/1-+_ؾykG7QXEFE"mE[/4,?99Sysvy9ɯ`RYg;PdOZ C QF Pyhe'qѯcy*~9C  ?w ߽I\4"*Yrٳ5?xG6g&55bţq@oMM&cEB6׳ K !BAԤw"!@GJ bDg^XG X\")ク ojJԅD1, 2Bo~.Iuty>q_ڽ^}qro- NR2"r/yK#y׍t*z+4N#{D*k)nݕ$} *"% OtڕTþUabfOp<z7;1O[vb/ᒗZ$>(= zN!:%/Nsơ*yH#Bƹ@k#˯+4"U:].9+/L'/e?WXW=>N㗮mX+U++Y?%>O;S{.Eo٤ soYrpd7]JD JW/Z^$I#{cN` =9$z]}nՏ{K +ߥ%q6/Χ%DiޓT8W*~STfW");f O5 ZA@V2r4=1?USضfJP-#^?ZWwū0/yIWF;J$I&]qZ7JZ| Ra~I]} μSz%/GyT:nIx݅͌!` DU\Y{ +;y=z0eXO*c4/-?ŵYo&Ś&<~o^|/ݫH.Z*UZ$N6iˮ{W*=;(^GRvʻ_"]pC`.-;.$[j%'F2qܵW1[H32{*`_4b+K3㥖?QHx"@,p;j/\(U`2nI7JB|Rݰ.+`Its}fvCC?3@-# ~wƭ(UJ2ótuu8s12-DaiwZV녞{a|[c)P-^jK)j%337ݔR@$IUL\)73>RGiSZ)~X !` Ճ$ˮ ahxu+Z'6^CN|T,fT7II~xۻu%Sދ+$gIIºӸS|z+dSʯޯ+?O=ɓTz<>sOשׁ޸?L.k|w=B4}čظFO(>3Iv{^Tv'?`I_%uo0 *XC쎨U^ [8c[ű1=.NIqL*on3/\_=8 %B GDִER (Q~CJ+FGM, 'J,{0`АYAOتhw=ae^jͦ3N}zz40:fWp"X㷣˾{{W_6,1'P&sɘX+=Jq%3{S.{e#*]R Iw)Ǟ]_w뛛dϰs-֜Z!Ǧ ևz `DH,Dn"=K.ѳկuѺ'TJ*޽'wҟ?)'uO(~)|(e\;g2 : XC x/9Kr]Sj~~2l.՛>$haDIrEwIӟ.#w)%OE~)Y ` pг`V?8ɭd00{LL,,6M1V fZE|N`O$J.tZi)'?w%?r=g߻nQ 0P%@-[nԋgK&3¦\ WF "TZxS+{͍K~H%b*[u%.~{D{=c87-GqJҝv^~Rq{/(v3cQ{l X[ѓx߃w7ܳh5w55&X@UٹBp{; ILgVUVuu$FWƖ1Hcad F:%`vKedր$KeiddJH[]2k7^79UUݙYN5&Ll՟~uN;| O9ޢ.Aey6vi ĹYTAc3/gK_O~~D1I MC[[Ns|Ϗ2Y_3n}X6r NcY3/zm7~œ-4Q~(ĐCv\!}{o#_CǼr(Pp ?sA8ڋ}[KNà4m! `[ /l'_}G|n$]T"6v'_hT>KWwğ-/b<[ǰcL֟=s20(dE` ./s aŶ>N]>Vj؜{N^؍wբ>p[\`)v }8,e}]{qecOTP.U@VlpJW/yOrl?{ eUKOyU)tEB> C@G04 ,P(T _ 0 c. mrcѨWN|uϞֺ5_O9t [aELAD`br&F3J- Cٍ/N{ "O3iF$F(aB<ؓ&y6Lb?WGSgW_8~rђ`8͍BKq{`뢵$i(ŗӴ>* rږmU0As =hת_E֯})0enCwuWZ埾=o]m|d w3 R >EyHʫ4 # ?|QOTڟ6{QY=" [&ϺYAD`vso&aٽkKg[szݍ T"ss 8q!$-$-S@_4'~'OsNIуv@Ȋ8ϼI+% `sis]nl[J5bX dŜkYG" S$ `0UlkfW~߿~/uwXhr4` m^qg7e:eE@l1p lŽ[{Zmk!ߍK`In`w aݝ:5xy$>b2L+77m᫮KKQT+kJ;`s{sA8Es>,K(."0%rRՔOgV;Z1կSk{Paw;Aݎ&BЅ1 L"F@׉FC6o2>|?^Unb?WJ?:Nge<ؾ@r%@q[ҕ=ܓΛyuسg ++Mn17-Ro:pEĀ~'K `6Y YAM Z.4EZj+_w/ٳœN$zx <}Z YDn)TMo y/;pWJ̔P-pu\qپoz5hOG(YS 6m8N)&`>oT"8餿D@Vo*=L$Dq|tu"P2rJ֡j `M'GO]Z\1kgϠh`o~5ejy}4`Ahl?9Iz}pho0Ej\+aE@{ PgS};" I@fU݅"W!qKGjn] n0 b [jyK>xSa!7}mqM$&BU@ @p{Ç׏eRt]܉[fqw# m+Zj̓,&}ݕzx ?a> l " [A@VP9rMqfOATÞ?z#=ȗ >;oGX(D?y_#+"0]0*[=ztn߾}g{CIg?ʊ [267ЦGpy_-./~ODDʀ[g>6<6,aD@6MjOe~o摇嗎I>Ʊk źw_uOOsgeE@B@Q@o_=tP 3_~/=g=ncS/Zz1~1\YmW]q| \,56>*sfwtUw (@ر/_㟯գ:Vd6ܑN6xՏ}?ۣZXo}b۪m8޲a]AD`4Q*_8E/^:n|ѮTt&^$@뵪Bb~7`߲ 1A#.B^ٮfQAfc}>_Q$ݹ@_H%sLbV -7^.6?g?_k'uڸ#+"q8CPu]ʷ/~wjnwڏDN(th6;߼$`RF lܧ?W(}6JP GF*/ׇ8tlB q0)j_gWz'~INN[՜8s0`MlF6JP ACsd'?&mjf+B/bA{4(:sf1s׏{')}A#7h$` W*D@H@sHB ][ol=?rGQicsXXM7Oǖ|%ij|h`"l*T${キ|Sǿ?{_2|VKӪoG FpӁy8 @;h4}3YVQ /QuMkVZwlv_4[b{5pEj`poIw@xG;| Gݡ:i @iV #<]uͭc_N+vI$In¿ +sfݹh];?6 PSr؟Θߐm-M})#$T0 [R{ꥧ~-}/kK!ژ׋(ajT]Zm_rs`Df(nL di0 #wp:lL:}?ލwt@{,@t H)@Z6sh(٬xG},I} "0 9PS\x_F ʟR׿t`=_v^;7,Gp.F l}nUչ.˟z={Ȥ?z?9tJ>䢽Y1c#Q  `UG`cwW9[SM(jTuNv6v\~]v)P;HqPED@&%oIɩ\. [M6#E?eJxQuI;?h^aM؏qOG#<:pD@& `h*6/+wRRy[U+L; TC :w>λo= եUfh JdѮ' Pt<~-HgwqPDkfdܡF n!Պ=BO'6σ|Y)7 "0)|Dl DGflegJ_C)Xqo Pay(ÎuP>L "PYAOmڵkOj~z qʿAhk0}#, 9E ?HgSi1:t 9r$ݍuq:[!([!]Fj5 9"ȇ4_OlQ4 V !`&9%ШqgS:BD3ՔsTnqXb3; @:E?k9:>\ "Pه6'giaRG{'`X<[ "  `T4zVVVwm`MEgj5|x1A'cC@:tª?7=P'CVD`Br&b!`{7ޒ_fRG9JHT(@.ī 1){$IE@̖eNGl1osGe}" c0&0e/0rC 9i|H a@1$FSQ-} DQ>^ˊL@@T8lz䨻l!A9^qai9j.EG`Їx0XH!7jՁ>?1ƙ" 04)a& GU2,=O" c0,e-0:y q_+=Oѵ@ (t'I3ںQ%3taG;(D@F 08BQeH=S/l@z:8qf`_AD`U@s99F/LNqϩ"PHr mQ $-QQmIjGi>Ƿ:urfgE2 zvלM3LME D 5m.[!=n7'Т^M3d0MVD` L/$$)}7W,LpMY`*TDD+.&z 07FjDeg2)̃4gAɸTah!\tU{GA>žE,0K=mM7Lу>g4oS*-:joZ8b{ o{@h)_'U $悄 )a0y{ՖZB@8qz5XKFۍcL<-ip@5!PXsڄT !6qzl1v{pF=A@xnvS{O{5qOo һTk7isY|\1P VGQՂf~D VGCu"P0r aܑ m.1ӎOsla&.zAJl6/ ivjqRI) 6;8@::#O_Tŕe[   i "  ~m@^ [n:Օj ۑ|I+3qZHx݉'X7烤;gsIm(ƑL*v آF\yeea̓ { 8m2 +-%G:m۶ Q :vHAD`}q #@B FۜyG:3odql??vHa7dpcc',?ac9CA&%GS8Wm!ySh;Lb,Zq3I☿qTϺW Xa@< 5Dq/:t Й1$!(ڴ`ɍ} CZg9E@0FbMQ , 8zqN᧵l C5-y7gt'ũW Ha@( eR(%@@^ҹY4 <e/$=BO _?:Q L IBAbO|~0F Xp?: `cVYE@0HA @h  :8;Ae/{!D}Gd: 6XD@# `<^]\ X >Ñ?G ^}.$~ `|;_ " c0,e-Cƾw q>(Q`~;0/X<>?u㒔UfYl5"BxG 9h~%oÂ=6/<, "0!9SB`@D *>8sx,ş֒F @~#s<-3/2ED`P VSl(a>/h[/ZC<+MD@0KB^t>ĆQ:%{Z/g7E} " c0&0e/[4@ ѧ̏}Ai<&#>p6Q𑞍iA@_\kѡsq}eJf1G𢞍Ȧr, "0~QDYE: iqXOS|S{4$c>6}kHRqo}{<>㰃(l " #З؈4ܧEC+>/,EzdE@F / )K Ϧe=|&l/"0}IYJK`H{IkF,3C@uD?}+%|aBIENDB`coq-8.15.0/ide/coqide/Make000066400000000000000000000001131417001151100151560ustar00rootroot00000000000000interface.mli xmlprotocol.mli xmlprotocol.ml ide_slave.ml coqidetop.mllib coq-8.15.0/ide/coqide/config_lexer.mli000066400000000000000000000014351417001151100175350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string list Util.String.Map.t -> unit val load_file : string -> string list Util.String.Map.t coq-8.15.0/ide/coqide/config_lexer.mll000066400000000000000000000042011417001151100175320ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* s))::l) lexbuf } |ignore+ { List.rev l} and string = parse | '"' { Buffer.add_char string_buffer '"' } | '\\' '"' | _ { Buffer.add_string string_buffer (lexeme lexbuf); string lexbuf } | eof { eprintf "coqiderc: unterminated string\n@." } { let load_file f = let c = open_in f in let lb = from_channel c in let m = prefs Util.String.Map.empty lb in close_in c; m let print_file f m = let c = open_out f in let fmt = formatter_of_out_channel c in let rec print_list fmt = function | [] -> () | s :: sl -> fprintf fmt "%S@ %a" s print_list sl in Util.String.Map.iter (fun k s -> fprintf fmt "@[%s = %a@]@\n" k print_list s) m; fprintf fmt "@."; close_out c } coq-8.15.0/ide/coqide/configwin.ml000066400000000000000000000054141417001151100167040ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) type parameter_kind = Configwin_types.parameter_kind type configuration_structure = Configwin_types.configuration_structure = Section of string * GtkStock.id option * parameter_kind list | Section_list of string * GtkStock.id option * configuration_structure list type return_button = Configwin_types.return_button = Return_apply | Return_ok | Return_cancel let string = Configwin_ihm.string (* let strings = Configwin_ihm.strings let list = Configwin_ihm.list *) let bool = Configwin_ihm.bool let combo = Configwin_ihm.combo let custom = Configwin_ihm.custom let modifiers = Configwin_ihm.modifiers let edit ?(apply=(fun () -> ())) title ?parent ?width ?height conf_struct_list = Configwin_ihm.edit ~with_apply: true ~apply title ?parent ?width ?height conf_struct_list coq-8.15.0/ide/coqide/configwin.mli000066400000000000000000000200651417001151100170540ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) (** This module is the interface of the Configwin library. *) (** {2 Types} *) (** This type represents the different kinds of parameters. *) type parameter_kind;; (** This type represents the structure of the configuration window. *) type configuration_structure = | Section of string * GtkStock.id option * parameter_kind list (** label of the section, icon, parameters *) | Section_list of string * GtkStock.id option * configuration_structure list (** label of the section, icon, list of the sub sections *) ;; (** To indicate what button pushed the user when the window is closed. *) type return_button = Return_apply (** The user clicked on Apply at least once before closing the window with Cancel or the window manager. *) | Return_ok (** The user closed the window with the ok button. *) | Return_cancel (** The user closed the window with the cancel button or the window manager but never clicked on the apply button.*) (** {2 Functions to create parameters} *) (** [string label value] creates a string parameter. @param editable indicate if the value is editable (default is [true]). @param expand indicate if the entry widget must expand or not (default is [true]). @param help an optional help message. @param f the function called to apply the value (default function does nothing). *) val string : ?editable: bool -> ?expand: bool -> ?help: string -> ?f: (string -> unit) -> string -> string -> parameter_kind (** [bool label value] creates a boolean parameter. @param editable indicate if the value is editable (default is [true]). @param help an optional help message. @param f the function called to apply the value (default function does nothing). *) val bool : ?editable: bool -> ?help: string -> ?f: (bool -> unit) -> string -> bool -> parameter_kind (* (** [strings label value] creates a string list parameter. @param editable indicate if the value is editable (default is [true]). @param help an optional help message. @param f the function called to apply the value (default function does nothing). @param add the function returning a list of strings when the user wants to add strings (default returns an empty list). @param eq the comparison function, used not to have doubles in list. Default is [Pervasives.(=)]. If you want to allow doubles in the list, give a function always returning false. *) val strings : ?editable: bool -> ?help: string -> ?f: (string list -> unit) -> ?eq: (string -> string -> bool) -> ?add: (unit -> string list) -> string -> string list -> parameter_kind (** [list label f_strings value] creates a list parameter. [f_strings] is a function taking a value and returning a list of strings to display it. The list length should be the same for any value, and the same as the titles list length. The [value] is the initial list. @param editable indicate if the value is editable (default is [true]). @param help an optional help message. @param f the function called to apply the value (default function does nothing). @param eq the comparison function, used not to have doubles in list. Default is [Pervasives.(=)]. If you want to allow doubles in the list, give a function always returning false. @param edit an optional function to use to edit an element of the list. The function returns an element, no matter if element was changed or not. When this function is given, a "Edit" button appears next to the list. @param add the function returning a list of values when the user wants to add values (default returns an empty list). @param titles an optional list of titles for the list. If the [f_strings] function returns a list with more than one element, then you must give a list of titles. @param color an optional function returning the optional color for a given element. This color is used to display the element in the list. The default function returns no color for any element. *) val list : ?editable: bool -> ?help: string -> ?f: ('a list -> unit) -> ?eq: ('a -> 'a -> bool) -> ?edit: ('a -> 'a) -> ?add: (unit -> 'a list) -> ?titles: string list -> ?color: ('a -> string option) -> string -> ('a -> string list) -> 'a list -> parameter_kind *) (** [combo label choices value] creates a combo parameter. @param editable indicate if the value is editable (default is [true]). @param expand indicate if the entry widget must expand or not (default is [true]). @param help an optional help message. @param f the function called to apply the value (default function does nothing). @param new_allowed indicate if a entry not in the list of choices is accepted (default is [false]). @param blank_allowed indicate if the empty selection [""] is accepted (default is [false]). *) val combo : ?editable: bool -> ?expand: bool -> ?help: string -> ?f: (string -> unit) -> ?new_allowed: bool -> ?blank_allowed: bool -> string -> string list -> string -> parameter_kind val modifiers : ?editable: bool -> ?expand: bool -> ?help: string -> ?allow:(Gdk.Tags.modifier list) -> ?f: (Gdk.Tags.modifier list -> unit) -> string -> Gdk.Tags.modifier list -> parameter_kind (** [custom box f expand] creates a custom parameter, with the given [box], the [f] function is called when the user wants to apply his changes, and [expand] indicates if the box must expand in its father. @param label if a value is specified, a the box is packed into a frame. *) val custom : ?label: string -> GPack.box -> (unit -> unit) -> bool -> parameter_kind (** {2 Functions creating configuration windows and boxes} *) (** This function takes a configuration structure and creates a window to configure the various parameters. @param apply this function is called when the apply button is clicked, after giving new values to parameters. *) val edit : ?apply: (unit -> unit) -> string -> ?parent:GWindow.window -> ?width:int -> ?height:int -> configuration_structure list -> return_button coq-8.15.0/ide/coqide/configwin_ihm.ml000066400000000000000000000644621417001151100175510ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) (** This module contains the gui functions of Configwin.*) open Configwin_types let set_help_tip wev = function | None -> () | Some help -> GtkBase.Widget.Tooltip.set_text wev#as_widget help let select_arch m m_osx = if Coq_config.arch = "Darwin" then m_osx else m (* How the modifiers are named in the preference box *) let modifiers_to_string m = let rec iter m s = match m with [] -> s | c :: m -> iter m (( match c with `CONTROL -> "" | `SHIFT -> "" | `LOCK -> "" | `META -> select_arch "" "" | `MOD1 -> "" | `MOD2 -> "" | `MOD3 -> "" | `MOD4 -> "" | `MOD5 -> "" | _ -> raise Not_found ) ^ s) in iter m "" class type widget = object method box : GObj.widget method apply : unit -> unit end let debug = false let dbg s = if debug then Minilib.log s else () (* (** This class builds a frame with a clist and two buttons : one to add items and one to remove the selected items. The class takes in parameter a function used to add items and a string list ref which is used to store the content of the clist. At last, a title for the frame is also in parameter, so that each instance of the class creates a frame. *) class ['a] list_selection_box (listref : 'a list ref) titles_opt help_opt f_edit_opt f_strings f_color (eq : 'a -> 'a -> bool) add_function title editable = let _ = dbg "list_selection_box" in let wev = GBin.event_box () in let wf = GBin.frame ~label: title ~packing: wev#add () in let hbox = GPack.hbox ~packing: wf#add () in (* the scroll window and the clist *) let wscroll = GBin.scrolled_window ~vpolicy: `AUTOMATIC ~hpolicy: `AUTOMATIC ~packing: (hbox#pack ~expand: true) () in let wlist = match titles_opt with None -> GList.clist ~selection_mode: `MULTIPLE ~titles_show: false ~packing: wscroll#add () | Some l -> GList.clist ~selection_mode: `MULTIPLE ~titles: l ~titles_show: true ~packing: wscroll#add () in let _ = set_help_tip wev help_opt in (* the vbox for the buttons *) let vbox_buttons = GPack.vbox () in let _ = if editable then let _ = hbox#pack ~expand: false vbox_buttons#coerce in () else () in let _ = dbg "list_selection_box: wb_add" in let wb_add = GButton.button ~label: Configwin_messages.mAdd ~packing: (vbox_buttons#pack ~expand:false ~padding:2) () in let wb_edit = GButton.button ~label: Configwin_messages.mEdit () in let _ = match f_edit_opt with None -> () | Some _ -> vbox_buttons#pack ~expand:false ~padding:2 wb_edit#coerce in let wb_up = GButton.button ~label: Configwin_messages.mUp ~packing: (vbox_buttons#pack ~expand:false ~padding:2) () in let wb_remove = GButton.button ~label: Configwin_messages.mRemove ~packing: (vbox_buttons#pack ~expand:false ~padding:2) () in let _ = dbg "list_selection_box: object(self)" in object (self) (** the list of selected rows *) val mutable list_select = [] (** This method returns the frame created. *) method box = wev method update l = (* set the new list in the provided listref *) listref := l; (* insert the elements in the clist *) wlist#freeze (); wlist#clear (); List.iter (fun ele -> ignore (wlist#append (f_strings ele)); match f_color ele with None -> () | Some c -> try wlist#set_row ~foreground: (`NAME c) (wlist#rows - 1) with _ -> () ) !listref; (match titles_opt with None -> wlist#columns_autosize () | Some _ -> GToolbox.autosize_clist wlist); wlist#thaw (); (* the list of selectd elements is now empty *) list_select <- [] (** Move up the selected rows. *) method up_selected = let rec iter n selrows l = match selrows with [] -> (l, []) | m :: qrows -> match l with [] -> ([],[]) | [_] -> (l,[]) | e1 :: e2 :: q when m = n + 1 -> let newl, newrows = iter (n+1) qrows (e1 :: q) in (e2 :: newl, n :: newrows) | e1 :: q -> let newl, newrows = iter (n+1) selrows q in (e1 :: newl, newrows) in let sorted_select = List.sort compare list_select in let new_list, new_rows = iter 0 sorted_select !listref in self#update new_list; List.iter (fun n -> wlist#select n 0) new_rows (** Make the user edit the first selected row. *) method edit_selected f_edit = let sorted_select = List.sort compare list_select in match sorted_select with [] -> () | n :: _ -> try let ele = List.nth !listref n in let ele2 = f_edit ele in let rec iter m = function [] -> [] | e :: q -> if n = m then ele2 :: q else e :: (iter (m+1) q) in self#update (iter 0 !listref); wlist#select n 0 with Not_found -> () initializer (* create the functions called when the buttons are clicked *) let f_add () = (* get the files to add with the function provided *) let l = add_function () in (* remove from the list the ones which are already in the listref, using the eq predicate *) let l2 = List.fold_left (fun acc -> fun ele -> if List.exists (eq ele) acc then acc else acc @ [ele]) !listref l in self#update l2 in let f_remove () = (* remove the selected items from the listref and the clist *) let rec iter n = function [] -> [] | h :: q -> if List.mem n list_select then iter (n+1) q else h :: (iter (n+1) q) in let new_list = iter 0 !listref in self#update new_list in let _ = dbg "list_selection_box: connecting wb_add" in (* connect the functions to the buttons *) ignore (wb_add#connect#clicked ~callback:f_add); let _ = dbg "list_selection_box: connecting wb_remove" in ignore (wb_remove#connect#clicked ~callback:f_remove); let _ = dbg "list_selection_box: connecting wb_up" in ignore (wb_up#connect#clicked ~callback:(fun () -> self#up_selected)); ( match f_edit_opt with None -> () | Some f -> let _ = dbg "list_selection_box: connecting wb_edit" in ignore (wb_edit#connect#clicked ~callback:(fun () -> self#edit_selected f)) ); (* connect the selection and deselection of items in the clist *) let f_select ~row ~column ~event = try list_select <- row :: list_select with Failure _ -> () in let f_unselect ~row ~column ~event = try let new_list_select = List.filter (fun n -> n <> row) list_select in list_select <- new_list_select with Failure _ -> () in (* connect the select and deselect events *) let _ = dbg "list_selection_box: connecting select_row" in ignore(wlist#connect#select_row ~callback:f_select); let _ = dbg "list_selection_box: connecting unselect_row" in ignore(wlist#connect#unselect_row ~callback:f_unselect); (* initialize the clist with the listref *) self#update !listref end;; *) (** This class is used to build a box for a string parameter.*) class string_param_box param = let _ = dbg "string_param_box" in let hbox = GPack.hbox () in let wev = GBin.event_box ~packing: (hbox#pack ~expand: false ~padding: 2) () in let _wl = GMisc.label ~text: param.string_label ~packing: wev#add () in let we = GEdit.entry ~editable: param.string_editable ~packing: (hbox#pack ~expand: param.string_expand ~padding: 2) () in let _ = set_help_tip wev param.string_help in let _ = we#set_text (param.string_to_string param.string_value) in object (self) (** This method returns the main box ready to be packed. *) method box = hbox#coerce (** This method applies the new value of the parameter. *) method apply = let new_value = param.string_of_string we#text in if new_value <> param.string_value then let _ = param.string_f_apply new_value in param.string_value <- new_value else () end ;; (** This class is used to build a box for a combo parameter.*) class combo_param_box param = let _ = dbg "combo_param_box" in let hbox = GPack.hbox () in let wev = GBin.event_box ~packing: (hbox#pack ~expand: false ~padding: 2) () in let _wl = GMisc.label ~text: param.combo_label ~packing: wev#add () in let _ = set_help_tip wev param.combo_help in let get_value = if not param.combo_new_allowed then let wc = GEdit.combo_box_text ~strings: param.combo_choices ?active:(let rec aux i = function |[] -> None |h::_ when h = param.combo_value -> Some i |_::t -> aux (succ i) t in aux 0 param.combo_choices) ~packing: (hbox#pack ~expand: param.combo_expand ~padding: 2) () in fun () -> match GEdit.text_combo_get_active wc with |None -> "" |Some s -> s else let (wc,_) = GEdit.combo_box_entry_text ~strings: param.combo_choices ~packing: (hbox#pack ~expand: param.combo_expand ~padding: 2) () in let _ = wc#entry#set_editable param.combo_editable in let _ = wc#entry#set_text param.combo_value in fun () -> wc#entry#text in object (self) (** This method returns the main box ready to be packed. *) method box = hbox#coerce (** This method applies the new value of the parameter. *) method apply = let new_value = get_value () in if new_value <> param.combo_value then let _ = param.combo_f_apply new_value in param.combo_value <- new_value else () end ;; (** Class used to pack a custom box. *) class custom_param_box param = let _ = dbg "custom_param_box" in let top = match param.custom_framed with None -> param.custom_box#coerce | Some l -> let wf = GBin.frame ~label: l () in wf#add param.custom_box#coerce; wf#coerce in object (self) method box = top method apply = param.custom_f_apply () end (** This class is used to build a box for a text parameter.*) class text_param_box param = let _ = dbg "text_param_box" in let wf = GBin.frame ~label: param.string_label ~height: 100 () in let wev = GBin.event_box ~packing: wf#add () in let wscroll = GBin.scrolled_window ~vpolicy: `AUTOMATIC ~hpolicy: `AUTOMATIC ~packing: wev#add () in let wview = GText.view ~editable: param.string_editable ~packing: wscroll#add () in let _ = set_help_tip wev param.string_help in let _ = dbg "text_param_box: buffer creation" in let buffer = GText.buffer () in let _ = wview#set_buffer buffer in let _ = buffer#insert (param.string_to_string param.string_value) in let _ = dbg "text_param_box: object(self)" in object (self) val wview = wview (** This method returns the main box ready to be packed. *) method box = wf#coerce (** This method applies the new value of the parameter. *) method apply = let v = param.string_of_string (buffer#get_text ()) in if v <> param.string_value then ( dbg "apply new value!"; let _ = param.string_f_apply v in param.string_value <- v ) else () end ;; (** This class is used to build a box for a boolean parameter.*) class bool_param_box param = let _ = dbg "bool_param_box" in let wchk = GButton.check_button ~label: param.bool_label () in let _ = set_help_tip wchk param.bool_help in let _ = wchk#set_active param.bool_value in let _ = wchk#misc#set_sensitive param.bool_editable in object (self) (** This method returns the check button ready to be packed. *) method box = wchk#coerce (** This method applies the new value of the parameter. *) method apply = let new_value = wchk#active in if new_value <> param.bool_value then let _ = param.bool_f_apply new_value in param.bool_value <- new_value else () end ;; class modifiers_param_box param = let hbox = GPack.hbox () in let wev = GBin.event_box ~packing: (hbox#pack ~expand:true ~fill:true ~padding: 2) () in let _wl = GMisc.label ~text: param.md_label ~packing: wev#add () in let value = ref param.md_value in let _ = List.map (fun modifier -> let but = GButton.toggle_button ~label:(modifiers_to_string [modifier]) ~active:(List.mem modifier param.md_value) ~packing:(hbox#pack ~expand:false) () in ignore (but#connect#toggled ~callback:(fun _ -> if but#active then value := modifier::!value else value := List.filter ((<>) modifier) !value))) param.md_allow in let _ = set_help_tip wev param.md_help in object (self) (** This method returns the main box ready to be packed. *) method box = hbox#coerce (** This method applies the new value of the parameter. *) method apply = let new_value = !value in if new_value <> param.md_value then let _ = param.md_f_apply new_value in param.md_value <- new_value else () end ;; (* (** This class is used to build a box for a parameter whose values are a list.*) class ['a] list_param_box (param : 'a list_param) = let _ = dbg "list_param_box" in let listref = ref param.list_value in let frame_selection = new list_selection_box listref param.list_titles param.list_help param.list_f_edit param.list_strings param.list_color param.list_eq param.list_f_add param.list_label param.list_editable tt in object (self) (** This method returns the main box ready to be packed. *) method box = frame_selection#box#coerce (** This method applies the new value of the parameter. *) method apply = param.list_f_apply !listref ; param.list_value <- !listref end ;; *) (** This class creates a configuration box from a configuration structure *) class configuration_box conf_struct = let main_box = GPack.hbox () in let columns = new GTree.column_list in let icon_col = columns#add GtkStock.conv in let label_col = columns#add Gobject.Data.string in let box_col = columns#add Gobject.Data.caml in let () = columns#lock () in let pane = GPack.paned `HORIZONTAL ~packing:main_box#add () in (* Tree view part *) let scroll = GBin.scrolled_window ~hpolicy:`NEVER ~packing:pane#pack1 () in let tree = GTree.tree_store columns in let view = GTree.view ~model:tree ~headers_visible:false ~packing:scroll#add_with_viewport () in let selection = view#selection in let _ = selection#set_mode `SINGLE in let menu_box = GPack.vbox ~packing:pane#pack2 () in let renderer = (GTree.cell_renderer_pixbuf [], ["stock-id", icon_col]) in let col = GTree.view_column ~renderer () in let _ = view#append_column col in let renderer = (GTree.cell_renderer_text [], ["text", label_col]) in let col = GTree.view_column ~renderer () in let _ = view#append_column col in let make_param (main_box : #GPack.box) = function | String_param p -> let box = new string_param_box p in let _ = main_box#pack ~expand: false ~padding: 2 box#box in box | Combo_param p -> let box = new combo_param_box p in let _ = main_box#pack ~expand: false ~padding: 2 box#box in box | Text_param p -> let box = new text_param_box p in let _ = main_box#pack ~expand: p.string_expand ~padding: 2 box#box in box | Bool_param p -> let box = new bool_param_box p in let _ = main_box#pack ~expand: false ~padding: 2 box#box in box | List_param f -> let box = f () in let _ = main_box#pack ~expand: true ~padding: 2 box#box in box | Custom_param p -> let box = new custom_param_box p in let _ = main_box#pack ~expand: p.custom_expand ~padding: 2 box#box in box | Modifiers_param p -> let box = new modifiers_param_box p in let _ = main_box#pack ~expand: false ~padding: 2 box#box in box in let set_icon iter = function | None -> () | Some icon -> tree#set ~row:iter ~column:icon_col icon in (* Populate the tree *) let rec make_tree iter conf_struct = (* box is not shown at first *) let box = GPack.vbox ~packing:(menu_box#pack ~expand:true) ~show:false () in let new_iter = match iter with | None -> tree#append () | Some parent -> tree#append ~parent () in match conf_struct with | Section (label, icon, param_list) -> let params = List.map (make_param box) param_list in let widget = object method box = box#coerce method apply () = List.iter (fun param -> param#apply) params end in let () = tree#set ~row:new_iter ~column:label_col label in let () = set_icon new_iter icon in let () = tree#set ~row:new_iter ~column:box_col widget in () | Section_list (label, icon, struct_list) -> let widget = object (* Section_list does not contain any effect widget, so we do not have to apply anything. *) method apply () = () method box = box#coerce end in let () = tree#set ~row:new_iter ~column:label_col label in let () = set_icon new_iter icon in let () = tree#set ~row:new_iter ~column:box_col widget in List.iter (make_tree (Some new_iter)) struct_list in let () = List.iter (make_tree None) conf_struct in (* Dealing with signals *) let current_prop : widget option ref = ref None in let select_iter iter = let () = match !current_prop with | None -> () | Some box -> box#box#misc#hide () in let box = tree#get ~row:iter ~column:box_col in let () = box#box#misc#show () in current_prop := Some box in let when_selected () = let rows = selection#get_selected_rows in match rows with | [] -> () | row :: _ -> let iter = tree#get_iter row in select_iter iter in (* Focus on a box when selected *) let _ = selection#connect#changed ~callback:when_selected in let _ = match tree#get_iter_first with | None -> () | Some iter -> select_iter iter in object method box = main_box method apply = let foreach _ iter = let widget = tree#get ~row:iter ~column:box_col in widget#apply(); false in tree#foreach foreach end (** This function takes a configuration structure list and creates a window to configure the various parameters. *) let edit ?(with_apply=true) ?(apply=(fun () -> ())) title ?parent ?width ?height conf_struct = let dialog = GWindow.dialog ~position:`CENTER ~modal: true ~title: title ~type_hint:`DIALOG ?parent ?height ?width () in let config_box = new configuration_box conf_struct in let _ = dialog#vbox#pack ~expand:true config_box#box#coerce in if with_apply then dialog#add_button Configwin_messages.mApply `APPLY; dialog#add_button Configwin_messages.mOk `OK; dialog#add_button Configwin_messages.mCancel `CANCEL; let destroy () = dialog#destroy (); in let rec iter rep = try match dialog#run () with | `APPLY -> config_box#apply; iter Return_apply | `OK -> config_box#apply; destroy (); Return_ok | _ -> destroy (); rep with Failure s -> GToolbox.message_box ~title:"Error" s; iter rep | e -> GToolbox.message_box ~title:"Error" (Printexc.to_string e); iter rep in iter Return_cancel (* let edit_string l s = match GToolbox.input_string ~title: l ~text: s Configwin_messages.mValue with None -> s | Some s2 -> s2 *) (** Create a string param. *) let string ?(editable=true) ?(expand=true) ?help ?(f=(fun _ -> ())) label v = String_param { string_label = label ; string_help = help ; string_value = v ; string_editable = editable ; string_f_apply = f ; string_expand = expand ; string_to_string = (fun x -> x) ; string_of_string = (fun x -> x) ; } (** Create a bool param. *) let bool ?(editable=true) ?help ?(f=(fun _ -> ())) label v = Bool_param { bool_label = label ; bool_help = help ; bool_value = v ; bool_editable = editable ; bool_f_apply = f ; } (* (** Create a list param. *) let list ?(editable=true) ?help ?(f=(fun (_:'a list) -> ())) ?(eq=Pervasives.(=)) ?(edit:('a -> 'a) option) ?(add=(fun () -> ([] : 'a list))) ?titles ?(color=(fun (_:'a) -> (None : string option))) label (f_strings : 'a -> string list) v = List_param (fun () -> new list_param_box { list_label = label ; list_help = help ; list_value = v ; list_editable = editable ; list_titles = titles; list_eq = eq ; list_strings = f_strings ; list_color = color ; list_f_edit = edit ; list_f_add = add ; list_f_apply = f ; } ) (** Create a strings param. *) let strings ?(editable=true) ?help ?(f=(fun _ -> ())) ?(eq=Pervasives.(=)) ?(add=(fun () -> [])) label v = list ~editable ?help ~f ~eq ~edit: (edit_string label) ~add label (fun s -> [s]) v *) (** Create a combo param. *) let combo ?(editable=true) ?(expand=true) ?help ?(f=(fun _ -> ())) ?(new_allowed=false) ?(blank_allowed=false) label choices v = Combo_param { combo_label = label ; combo_help = help ; combo_value = v ; combo_editable = editable ; combo_choices = choices ; combo_new_allowed = new_allowed ; combo_blank_allowed = blank_allowed ; combo_f_apply = f ; combo_expand = expand ; } let modifiers ?(editable=true) ?(expand=true) ?help ?(allow=[`CONTROL;`SHIFT;`LOCK;`META;`MOD1;`MOD2;`MOD3;`MOD4;`MOD5]) ?(f=(fun _ -> ())) label v = Modifiers_param { md_label = label ; md_help = help ; md_value = v ; md_editable = editable ; md_f_apply = f ; md_expand = expand ; md_allow = allow ; } (** Create a custom param.*) let custom ?label box f expand = Custom_param { custom_box = box ; custom_f_apply = f ; custom_expand = expand ; custom_framed = label ; } (* Copying lablgtk question_box + forbidding hiding *) let question_box ~title ~buttons ?(default=1) ?icon ?parent message = let button_nb = ref 0 in let window = GWindow.dialog ~position:`CENTER ~modal:true ?parent ~type_hint:`DIALOG ~title () in let hbox = GPack.hbox ~border_width:10 ~packing:window#vbox#add () in let bbox = window#action_area in begin match icon with None -> () | Some i -> hbox#pack i#coerce ~padding:4 end; ignore (GMisc.label ~text: message ~packing: hbox#add ()); (* the function called to create each button by iterating *) let rec iter_buttons n = function [] -> () | button_label :: q -> let b = GButton.button ~label: button_label ~packing:(bbox#pack ~expand:true ~padding:4) () in ignore (b#connect#clicked ~callback: (fun () -> button_nb := n; window#destroy ())); (* If it's the first button then give it the focus *) if n = default then b#grab_default () else (); iter_buttons (n+1) q in iter_buttons 1 buttons; ignore (window#connect#destroy ~callback: GMain.Main.quit); window#set_position `CENTER; window#show (); GMain.Main.main (); !button_nb let message_box ~title ?icon ?parent ?(ok="Ok") message = ignore (question_box ?icon ?parent ~title message ~buttons:[ ok ]) coq-8.15.0/ide/coqide/configwin_ihm.mli000066400000000000000000000073071417001151100177150ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) open Configwin_types val string : ?editable: bool -> ?expand: bool -> ?help: string -> ?f: (string -> unit) -> string -> string -> parameter_kind val bool : ?editable: bool -> ?help: string -> ?f: (bool -> unit) -> string -> bool -> parameter_kind (* val strings : ?editable: bool -> ?help: string -> ?f: (string list -> unit) -> ?eq: (string -> string -> bool) -> ?add: (unit -> string list) -> string -> string list -> parameter_kind val list : ?editable: bool -> ?help: string -> ?f: ('a list -> unit) -> ?eq: ('a -> 'a -> bool) -> ?edit: ('a -> 'a) -> ?add: (unit -> 'a list) -> ?titles: string list -> ?color: ('a -> string option) -> string -> ('a -> string list) -> 'a list -> parameter_kind *) val combo : ?editable: bool -> ?expand: bool -> ?help: string -> ?f: (string -> unit) -> ?new_allowed: bool -> ?blank_allowed: bool -> string -> string list -> string -> parameter_kind val modifiers : ?editable: bool -> ?expand: bool -> ?help: string -> ?allow:(Gdk.Tags.modifier list) -> ?f: (Gdk.Tags.modifier list -> unit) -> string -> Gdk.Tags.modifier list -> parameter_kind val custom : ?label: string -> GPack.box -> (unit -> unit) -> bool -> parameter_kind val edit : ?with_apply:bool -> ?apply:(unit -> unit) -> string -> ?parent:GWindow.window -> ?width:int -> ?height:int -> configuration_structure list -> return_button val question_box : title:string -> buttons:string list -> ?default:int -> ?icon:#GObj.widget -> ?parent:GWindow.window -> string -> int val message_box : title:string -> ?icon:#GObj.widget -> ?parent:GWindow.window -> ?ok:string -> string -> unit coq-8.15.0/ide/coqide/configwin_messages.ml000066400000000000000000000050171417001151100205720ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) (** Module containing the messages of Configwin.*) let software = "Configwin";; let version = "1.2";; let html_config = "Configwin bindings configurator for html parameters" let home = Option.default "" (Glib.get_home_dir ()) let mCapture = "Capture";; let mType_key = "Type key" ;; let mAdd = "Add";; let mRemove = "Remove";; let mUp = "Up";; let mEdit = "Edit";; let mOk = "Ok";; let mCancel = "Cancel";; let mApply = "Apply";; let mValue = "Value" let mKey = "Key" let shortcuts = "Shortcuts" let html_end = "End with" let html_begin = "Begin with" coq-8.15.0/ide/coqide/configwin_types.ml000066400000000000000000000145601417001151100201320ustar00rootroot00000000000000(*********************************************************************************) (* Cameleon *) (* *) (* Copyright (C) 2005 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. *) (* *) (* This program is free software; you can redistribute it and/or modify *) (* it under the terms of the GNU Library General Public License as *) (* published by the Free Software Foundation; either version 2 of the *) (* License, or any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General Public *) (* License along with this program; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA *) (* 02111-1307 USA *) (* *) (* Contact: Maxence.Guesdon@inria.fr *) (* *) (*********************************************************************************) (** This module contains the types used in Configwin. *) (** This type represents a string or filename parameter, or any other type, depending on the given conversion functions. *) type 'a string_param = { string_label : string; (** the label of the parameter *) mutable string_value : 'a; (** the current value of the parameter *) string_editable : bool ; (** indicates if the value can be changed *) string_f_apply : ('a -> unit) ; (** the function to call to apply the new value of the parameter *) string_help : string option ; (** optional help string *) string_expand : bool ; (** expand or not *) string_to_string : 'a -> string ; string_of_string : string -> 'a ; } ;; (** This type represents a boolean parameter. *) type bool_param = { bool_label : string; (** the label of the parameter *) mutable bool_value : bool; (** the current value of the parameter *) bool_editable : bool ; (** indicates if the value can be changed *) bool_f_apply : (bool -> unit) ; (** the function to call to apply the new value of the parameter *) bool_help : string option ; (** optional help string *) } ;; (** This type represents a parameter whose value is a list of ['a]. *) type 'a list_param = { list_label : string; (** the label of the parameter *) mutable list_value : 'a list; (** the current value of the parameter *) list_titles : string list option; (** the titles of columns, if they must be displayed *) list_f_edit : ('a -> 'a) option; (** optional edition function *) list_eq : ('a -> 'a -> bool) ; (** the comparison function used to get list without doubles *) list_strings : ('a -> string list); (** the function to get a string list from a ['a]. *) list_color : ('a -> string option) ; (** a function to get the optional color of an element *) list_editable : bool ; (** indicates if the value can be changed *) list_f_add : unit -> 'a list ; (** the function to call to add list *) list_f_apply : ('a list -> unit) ; (** the function to call to apply the new value of the parameter *) list_help : string option ; (** optional help string *) } ;; type combo_param = { combo_label : string ; mutable combo_value : string ; combo_choices : string list ; combo_editable : bool ; combo_blank_allowed : bool ; combo_new_allowed : bool ; combo_f_apply : (string -> unit); combo_help : string option ; (** optional help string *) combo_expand : bool ; (** expand the entry widget or not *) } ;; type custom_param = { custom_box : GPack.box ; custom_f_apply : (unit -> unit) ; custom_expand : bool ; custom_framed : string option ; (** optional label for an optional frame *) } ;; type modifiers_param = { md_label : string ; (** the label of the parameter *) mutable md_value : Gdk.Tags.modifier list ; (** The value, as a list of modifiers and a key code *) md_editable : bool ; (** indicates if the value can be changed *) md_f_apply : Gdk.Tags.modifier list -> unit ; (** the function to call to apply the new value of the parameter *) md_help : string option ; (** optional help string *) md_expand : bool ; (** expand or not *) md_allow : Gdk.Tags.modifier list } (** This type represents the different kinds of parameters. *) type parameter_kind = String_param of string string_param | List_param of (unit -> ) | Bool_param of bool_param | Text_param of string string_param | Combo_param of combo_param | Custom_param of custom_param | Modifiers_param of modifiers_param ;; (** This type represents the structure of the configuration window. *) type configuration_structure = | Section of string * GtkStock.id option * parameter_kind list (** label of the section, icon, parameters *) | Section_list of string * GtkStock.id option * configuration_structure list (** label of the section, list of the sub sections *) ;; (** To indicate what button was pushed by the user when the window is closed. *) type return_button = Return_apply (** The user clicked on Apply at least once before closing the window with Cancel or the window manager. *) | Return_ok (** The user closed the window with the ok button. *) | Return_cancel (** The user closed the window with the cancel button or the window manager but never clicked on the apply button.*) coq-8.15.0/ide/coqide/coq-ssreflect.lang000066400000000000000000000230461417001151100200050ustar00rootroot00000000000000 *.v \(\* \*\) # **) Require Bool. Require Import ssreflect ssrfun. (** A theory of boolean predicates and operators. A large part of this file is concerned with boolean reflection. Definitions and notations: is_true b == the coercion of b : bool to Prop (:= b = true). This is just input and displayed as `b''. reflect P b == the reflection inductive predicate, asserting that the logical proposition P : Prop holds iff the formula b : bool is equal to true. Lemmas asserting reflect P b are often referred to as "views". iffP, appP, sameP, rwP :: lemmas for direct manipulation of reflection views: iffP is used to prove reflection from logical equivalence, appP to compose views, and sameP and rwP to perform boolean and setoid rewriting. elimT :: coercion reflect >-> Funclass, which allows the direct application of `reflect' views to boolean assertions. decidable P <-> P is effectively decidable (:= {P} + {~ P}). contra, contraL, ... :: contraposition lemmas. altP my_viewP :: natural alternative for reflection; given lemma myviewP: reflect my_Prop my_formula, have #[#myP | not_myP#]# := altP my_viewP. generates two subgoals, in which my_formula has been replaced by true and false, resp., with new assumptions myP : my_Prop and not_myP: ~~ my_formula. Caveat: my_formula must be an APPLICATION, not a variable, constant, let-in, etc. (due to the poor behaviour of dependent index matching). boolP my_formula :: boolean disjunction, equivalent to altP (idP my_formula) but circumventing the dependent index capture issue; destructing boolP my_formula generates two subgoals with assumptions my_formula and ~~ my_formula. As with altP, my_formula must be an application. \unless C, P <-> we can assume property P when a something that holds under condition C (such as C itself). := forall G : Prop, (C -> G) -> (P -> G) -> G. This is just C \/ P or rather its impredicative encoding, whose usage better fits the above description: given a lemma UCP whose conclusion is \unless C, P we can assume P by writing: wlog hP: / P by apply/UCP; (prove C -> goal). or even apply: UCP id _ => hP if the goal is C. classically P <-> we can assume P when proving is_true b. := forall b : bool, (P -> b) -> b. This is equivalent to ~ (~ P) when P : Prop. implies P Q == wrapper variant type that coerces to P -> Q and can be used as a P -> Q view unambiguously. Useful to avoid spurious insertion of <-> views when Q is a conjunction of foralls, as in Lemma all_and2 below; conversely, avoids confusion in apply views for impredicative properties, such as \unless C, P. Also supports contrapositives. a && b == the boolean conjunction of a and b. a || b == the boolean disjunction of a and b. a ==> b == the boolean implication of b by a. ~~ a == the boolean negation of a. a (+) b == the boolean exclusive or (or sum) of a and b. #[# /\ P1 , P2 & P3 #]# == multiway logical conjunction, up to 5 terms. #[# \/ P1 , P2 | P3 #]# == multiway logical disjunction, up to 4 terms. #[#&& a, b, c & d#]# == iterated, right associative boolean conjunction with arbitrary arity. #[#|| a, b, c | d#]# == iterated, right associative boolean disjunction with arbitrary arity. #[#==> a, b, c => d#]# == iterated, right associative boolean implication with arbitrary arity. and3P, ... == specific reflection lemmas for iterated connectives. andTb, orbAC, ... == systematic names for boolean connective properties (see suffix conventions below). prop_congr == a tactic to move a boolean equality from its coerced form in Prop to the equality in bool. bool_congr == resolution tactic for blindly weeding out like terms from boolean equalities (can fail). This file provides a theory of boolean predicates and relations: pred T == the type of bool predicates (:= T -> bool). simpl_pred T == the type of simplifying bool predicates, based on the simpl_fun type from ssrfun.v. mem_pred T == a specialized form of simpl_pred for "collective" predicates (see below). rel T == the type of bool relations. := T -> pred T or T -> T -> bool. simpl_rel T == type of simplifying relations. := T -> simpl_pred T predType == the generic predicate interface, supported for for lists and sets. pred_sort == the predType >-> Type projection; pred_sort is itself a Coercion target class. Declaring a coercion to pred_sort is an alternative way of equipping a type with a predType structure, which interoperates better with coercion subtyping. This is used, e.g., for finite sets, so that finite groups inherit the membership operation by coercing to sets. {pred T} == a type convertible to pred T, but whose head constant is pred_sort. This type should be used for parameters that can be used as collective predicates (see below), as this will allow passing in directly collections that implement predType by coercion as described above, e.g., finite sets. := pred_sort (predPredType T) If P is a predicate the proposition "x satisfies P" can be written applicatively as (P x), or using an explicit connective as (x \in P); in the latter case we say that P is a "collective" predicate. We use A, B rather than P, Q for collective predicates: x \in A == x satisfies the (collective) predicate A. x \notin A == x doesn't satisfy the (collective) predicate A. The pred T type can be used as a generic predicate type for either kind, but the two kinds of predicates should not be confused. When a "generic" pred T value of one type needs to be passed as the other the following conversions should be used explicitly: SimplPred P == a (simplifying) applicative equivalent of P. mem A == an applicative equivalent of collective predicate A: mem A x simplifies to x \in A, as mem A has in fact type mem_pred T. --> In user notation collective predicates _only_ occur as arguments to mem: A only appears as (mem A). This is hidden by notation, e.g., x \in A := in_mem x (mem A) here, enum A := enum_mem (mem A) in fintype. This makes it possible to unify the various ways in which A can be interpreted as a predicate, for both pattern matching and display. Alternatively one can use the syntax for explicit simplifying predicates and relations (in the following x is bound in E): #[#pred x | E#]# == simplifying (see ssrfun) predicate x => E. #[#pred x : T | E#]# == predicate x => E, with a cast on the argument. #[#pred : T | P#]# == constant predicate P on type T. #[#pred x | E1 & E2#]# == #[#pred x | E1 && E2#]#; an x : T cast is allowed. #[#pred x in A#]# == #[#pred x | x in A#]#. #[#pred x in A | E#]# == #[#pred x | x in A & E#]#. #[#pred x in A | E1 & E2#]# == #[#pred x in A | E1 && E2#]#. #[#predU A & B#]# == union of two collective predicates A and B. #[#predI A & B#]# == intersection of collective predicates A and B. #[#predD A & B#]# == difference of collective predicates A and B. #[#predC A#]# == complement of the collective predicate A. #[#preim f of A#]# == preimage under f of the collective predicate A. predU P Q, ..., preim f P == union, etc of applicative predicates. pred0 == the empty predicate. predT == the total (always true) predicate. if T : predArgType, then T coerces to predT. {: T} == T cast to predArgType (e.g., {: bool * nat}). In the following, x and y are bound in E: #[#rel x y | E#]# == simplifying relation x, y => E. #[#rel x y : T | E#]# == simplifying relation with arguments cast. #[#rel x y in A & B | E#]# == #[#rel x y | #[#&& x \in A, y \in B & E#]# #]#. #[#rel x y in A & B#]# == #[#rel x y | (x \in A) && (y \in B) #]#. #[#rel x y in A | E#]# == #[#rel x y in A & A | E#]#. #[#rel x y in A#]# == #[#rel x y in A & A#]#. relU R S == union of relations R and S. relpre f R == preimage of relation R under f. xpredU, ..., xrelpre == lambda terms implementing predU, ..., etc. Explicit values of type pred T (i.e., lamdba terms) should always be used applicatively, while values of collection types implementing the predType interface, such as sequences or sets should always be used as collective predicates. Defined constants and functions of type pred T or simpl_pred T as well as the explicit simpl_pred T values described below, can generally be used either way. Note however that x \in A will not auto-simplify when A is an explicit simpl_pred T value; the generic simplification rule inE must be used (when A : pred T, the unfold_in rule can be used). Constants of type pred T with an explicit simpl_pred value do not auto-simplify when used applicatively, but can still be expanded with inE. This behavior can be controlled as follows: Let A : collective_pred T := #[#pred x | ... #]#. The collective_pred T type is just an alias for pred T, but this cast stops rewrite inE from expanding the definition of A, thus treating A into an abstract collection (unfold_in or in_collective can be used to expand manually). Let A : applicative_pred T := #[#pred x | ... #]#. This cast causes inE to turn x \in A into the applicative A x form; A will then have to be unfolded explicitly with the /A rule. This will also apply to any definition that reduces to A (e.g., Let B := A). Canonical A_app_pred := ApplicativePred A. This declaration, given after definition of A, similarly causes inE to turn x \in A into A x, but in addition allows the app_predE rule to turn A x back into x \in A; it can be used for any definition of type pred T, which makes it especially useful for ambivalent predicates as the relational transitive closure connect, that are used in both applicative and collective styles. Purely for aesthetics, we provide a subtype of collective predicates: qualifier q T == a pred T pretty-printing wrapper. An A : qualifier q T coerces to pred_sort and thus behaves as a collective predicate, but x \in A and x \notin A are displayed as: x \is A and x \isn't A when q = 0, x \is a A and x \isn't a A when q = 1, x \is an A and x \isn't an A when q = 2, respectively. #[#qualify x | P#]# := Qualifier 0 (fun x => P), constructor for the above. #[#qualify x : T | P#]#, #[#qualify a x | P#]#, #[#qualify an X | P#]#, etc. variants of the above with type constraints and different values of q. We provide an internal interface to support attaching properties (such as being multiplicative) to predicates: pred_key p == phantom type that will serve as a support for properties to be attached to p : {pred _}; instances should be created with Fact/Qed so as to be opaque. KeyedPred k_p == an instance of the interface structure that attaches (k_p : pred_key P) to P; the structure projection is a coercion to pred_sort. KeyedQualifier k_q == an instance of the interface structure that attaches (k_q : pred_key q) to (q : qualifier n T). DefaultPredKey p == a default value for pred_key p; the vernacular command Import DefaultKeying attaches this key to all predicates that are not explicitly keyed. Keys can be used to attach properties to predicates, qualifiers and generic nouns in a way that allows them to be used transparently. The key projection of a predicate property structure such as unsignedPred should be a pred_key, not a pred, and corresponding lemmas will have the form Lemma rpredN R S (oppS : @opprPred R S) (kS : keyed_pred oppS) : {mono -%%R: x / x \in kS}. Because x \in kS will be displayed as x \in S (or x \is S, etc), the canonical instance of opprPred will not normally be exposed (it will also be erased by /= simplification). In addition each predicate structure should have a DefaultPredKey Canonical instance that simply issues the property as a proof obligation (which can be caught by the Prop-irrelevant feature of the ssreflect plugin). Some properties of predicates and relations: A =i B <-> A and B are extensionally equivalent. {subset A <= B} <-> A is a (collective) subpredicate of B. subpred P Q <-> P is an (applicative) subpredicate or Q. subrel R S <-> R is a subrelation of S. In the following R is in rel T: reflexive R <-> R is reflexive. irreflexive R <-> R is irreflexive. symmetric R <-> R (in rel T) is symmetric (equation). pre_symmetric R <-> R is symmetric (implication). antisymmetric R <-> R is antisymmetric. total R <-> R is total. transitive R <-> R is transitive. left_transitive R <-> R is a congruence on its left hand side. right_transitive R <-> R is a congruence on its right hand side. equivalence_rel R <-> R is an equivalence relation. Localization of (Prop) predicates; if P1 is convertible to forall x, Qx, P2 to forall x y, Qxy and P3 to forall x y z, Qxyz : {for y, P1} <-> Qx{y / x}. {in A, P1} <-> forall x, x \in A -> Qx. {in A1 & A2, P2} <-> forall x y, x \in A1 -> y \in A2 -> Qxy. {in A &, P2} <-> forall x y, x \in A -> y \in A -> Qxy. {in A1 & A2 & A3, Q3} <-> forall x y z, x \in A1 -> y \in A2 -> z \in A3 -> Qxyz. {in A1 & A2 &, Q3} := {in A1 & A2 & A2, Q3}. {in A1 && A3, Q3} := {in A1 & A1 & A3, Q3}. {in A &&, Q3} := {in A & A & A, Q3}. {in A, bijective f} <-> f has a right inverse in A. {on C, P1} <-> forall x, (f x) \in C -> Qx when P1 is also convertible to Pf f, e.g., {on C, involutive f}. {on C &, P2} == forall x y, f x \in C -> f y \in C -> Qxy when P2 is also convertible to Pf f, e.g., {on C &, injective f}. {on C, P1' & g} == forall x, (f x) \in cd -> Qx when P1' is convertible to Pf f and P1' g is convertible to forall x, Qx, e.g., {on C, cancel f & g}. {on C, bijective f} == f has a right inverse on C. This file extends the lemma name suffix conventions of ssrfun as follows: A -- associativity, as in andbA : associative andb. AC -- right commutativity. ACA -- self-interchange (inner commutativity), e.g., orbACA : (a || b) || (c || d) = (a || c) || (b || d). b -- a boolean argument, as in andbb : idempotent andb. C -- commutativity, as in andbC : commutative andb, or predicate complement, as in predC. CA -- left commutativity. D -- predicate difference, as in predD. E -- elimination, as in negbFE : ~~ b = false -> b. F or f -- boolean false, as in andbF : b && false = false. I -- left/right injectivity, as in addbI : right_injective addb, or predicate intersection, as in predI. l -- a left-hand operation, as andb_orl : left_distributive andb orb. N or n -- boolean negation, as in andbN : a && (~~ a) = false. P -- a characteristic property, often a reflection lemma, as in andP : reflect (a /\ b) (a && b). r -- a right-hand operation, as orb_andr : right_distributive orb andb. T or t -- boolean truth, as in andbT: right_id true andb. U -- predicate union, as in predU. W -- weakening, as in in1W : (forall x, P) -> {in D, forall x, P}. **) Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Notation reflect := Bool.reflect. Notation ReflectT := Bool.ReflectT. Notation ReflectF := Bool.ReflectF. Reserved Notation "~~ b" (at level 35, right associativity). Reserved Notation "b ==> c" (at level 55, right associativity). Reserved Notation "b1 (+) b2" (at level 50, left associativity). Reserved Notation "x \in A" (at level 70, no associativity, format "'[hv' x '/ ' \in A ']'"). Reserved Notation "x \notin A" (at level 70, no associativity, format "'[hv' x '/ ' \notin A ']'"). Reserved Notation "x \is A" (at level 70, no associativity, format "'[hv' x '/ ' \is A ']'"). Reserved Notation "x \isn't A" (at level 70, no associativity, format "'[hv' x '/ ' \isn't A ']'"). Reserved Notation "x \is 'a' A" (at level 70, no associativity, format "'[hv' x '/ ' \is 'a' A ']'"). Reserved Notation "x \isn't 'a' A" (at level 70, no associativity, format "'[hv' x '/ ' \isn't 'a' A ']'"). Reserved Notation "x \is 'an' A" (at level 70, no associativity, format "'[hv' x '/ ' \is 'an' A ']'"). Reserved Notation "x \isn't 'an' A" (at level 70, no associativity, format "'[hv' x '/ ' \isn't 'an' A ']'"). Reserved Notation "p1 =i p2" (at level 70, no associativity, format "'[hv' p1 '/ ' =i p2 ']'"). Reserved Notation "{ 'subset' A <= B }" (at level 0, A, B at level 69, format "'[hv' { 'subset' A '/ ' <= B } ']'"). Reserved Notation "{ : T }" (at level 0, format "{ : T }"). Reserved Notation "{ 'pred' T }" (at level 0, format "{ 'pred' T }"). Reserved Notation "[ 'predType' 'of' T ]" (at level 0, format "[ 'predType' 'of' T ]"). Reserved Notation "[ 'pred' : T | E ]" (at level 0, format "'[hv' [ 'pred' : T | '/ ' E ] ']'"). Reserved Notation "[ 'pred' x | E ]" (at level 0, x name, format "'[hv' [ 'pred' x | '/ ' E ] ']'"). Reserved Notation "[ 'pred' x : T | E ]" (at level 0, x name, format "'[hv' [ 'pred' x : T | '/ ' E ] ']'"). Reserved Notation "[ 'pred' x | E1 & E2 ]" (at level 0, x name, format "'[hv' [ 'pred' x | '/ ' E1 & '/ ' E2 ] ']'"). Reserved Notation "[ 'pred' x : T | E1 & E2 ]" (at level 0, x name, format "'[hv' [ 'pred' x : T | '/ ' E1 & E2 ] ']'"). Reserved Notation "[ 'pred' x 'in' A ]" (at level 0, x name, format "'[hv' [ 'pred' x 'in' A ] ']'"). Reserved Notation "[ 'pred' x 'in' A | E ]" (at level 0, x name, format "'[hv' [ 'pred' x 'in' A | '/ ' E ] ']'"). Reserved Notation "[ 'pred' x 'in' A | E1 & E2 ]" (at level 0, x name, format "'[hv' [ 'pred' x 'in' A | '/ ' E1 & '/ ' E2 ] ']'"). Reserved Notation "[ 'qualify' x | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' x | '/ ' P ] ']'"). Reserved Notation "[ 'qualify' x : T | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' x : T | '/ ' P ] ']'"). Reserved Notation "[ 'qualify' 'a' x | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' 'a' x | '/ ' P ] ']'"). Reserved Notation "[ 'qualify' 'a' x : T | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' 'a' x : T | '/ ' P ] ']'"). Reserved Notation "[ 'qualify' 'an' x | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' 'an' x | '/ ' P ] ']'"). Reserved Notation "[ 'qualify' 'an' x : T | P ]" (at level 0, x at level 99, format "'[hv' [ 'qualify' 'an' x : T | '/ ' P ] ']'"). Reserved Notation "[ 'rel' x y | E ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y | '/ ' E ] ']'"). Reserved Notation "[ 'rel' x y : T | E ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y : T | '/ ' E ] ']'"). Reserved Notation "[ 'rel' x y 'in' A & B | E ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y 'in' A & B | '/ ' E ] ']'"). Reserved Notation "[ 'rel' x y 'in' A & B ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y 'in' A & B ] ']'"). Reserved Notation "[ 'rel' x y 'in' A | E ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y 'in' A | '/ ' E ] ']'"). Reserved Notation "[ 'rel' x y 'in' A ]" (at level 0, x name, y name, format "'[hv' [ 'rel' x y 'in' A ] ']'"). Reserved Notation "[ 'mem' A ]" (at level 0, format "[ 'mem' A ]"). Reserved Notation "[ 'predI' A & B ]" (at level 0, format "[ 'predI' A & B ]"). Reserved Notation "[ 'predU' A & B ]" (at level 0, format "[ 'predU' A & B ]"). Reserved Notation "[ 'predD' A & B ]" (at level 0, format "[ 'predD' A & B ]"). Reserved Notation "[ 'predC' A ]" (at level 0, format "[ 'predC' A ]"). Reserved Notation "[ 'preim' f 'of' A ]" (at level 0, format "[ 'preim' f 'of' A ]"). Reserved Notation "\unless C , P" (at level 200, C at level 100, format "'[hv' \unless C , '/ ' P ']'"). Reserved Notation "{ 'for' x , P }" (at level 0, format "'[hv' { 'for' x , '/ ' P } ']'"). Reserved Notation "{ 'in' d , P }" (at level 0, format "'[hv' { 'in' d , '/ ' P } ']'"). Reserved Notation "{ 'in' d1 & d2 , P }" (at level 0, format "'[hv' { 'in' d1 & d2 , '/ ' P } ']'"). Reserved Notation "{ 'in' d & , P }" (at level 0, format "'[hv' { 'in' d & , '/ ' P } ']'"). Reserved Notation "{ 'in' d1 & d2 & d3 , P }" (at level 0, format "'[hv' { 'in' d1 & d2 & d3 , '/ ' P } ']'"). Reserved Notation "{ 'in' d1 & & d3 , P }" (at level 0, format "'[hv' { 'in' d1 & & d3 , '/ ' P } ']'"). Reserved Notation "{ 'in' d1 & d2 & , P }" (at level 0, format "'[hv' { 'in' d1 & d2 & , '/ ' P } ']'"). Reserved Notation "{ 'in' d & & , P }" (at level 0, format "'[hv' { 'in' d & & , '/ ' P } ']'"). Reserved Notation "{ 'on' cd , P }" (at level 0, format "'[hv' { 'on' cd , '/ ' P } ']'"). Reserved Notation "{ 'on' cd & , P }" (at level 0, format "'[hv' { 'on' cd & , '/ ' P } ']'"). Reserved Notation "{ 'on' cd , P & g }" (at level 0, g at level 8, format "'[hv' { 'on' cd , '/ ' P & g } ']'"). Reserved Notation "{ 'in' d , 'bijective' f }" (at level 0, f at level 8, format "'[hv' { 'in' d , '/ ' 'bijective' f } ']'"). Reserved Notation "{ 'on' cd , 'bijective' f }" (at level 0, f at level 8, format "'[hv' { 'on' cd , '/ ' 'bijective' f } ']'"). (** We introduce a number of n-ary "list-style" notations that share a common format, namely #[#op arg1, arg2, ... last_separator last_arg#]# This usually denotes a right-associative applications of op, e.g., #[#&& a, b, c & d#]# denotes a && (b && (c && d)) The last_separator must be a non-operator token. Here we use &, | or =>; our default is &, but we try to match the intended meaning of op. The separator is a workaround for limitations of the parsing engine; the same limitations mean the separator cannot be omitted even when last_arg can. The Notation declarations are complicated by the separate treatment for some fixed arities (binary for bool operators, and all arities for Prop operators). We also use the square brackets in comprehension-style notations #[#type var separator expr#]# where "type" is the type of the comprehension (e.g., pred) and "separator" is | or => . It is important that in other notations a leading square bracket #[# is always followed by an operator symbol or a fixed identifier. **) Reserved Notation "[ /\ P1 & P2 ]" (at level 0). Reserved Notation "[ /\ P1 , P2 & P3 ]" (at level 0, format "'[hv' [ /\ '[' P1 , '/' P2 ']' '/ ' & P3 ] ']'"). Reserved Notation "[ /\ P1 , P2 , P3 & P4 ]" (at level 0, format "'[hv' [ /\ '[' P1 , '/' P2 , '/' P3 ']' '/ ' & P4 ] ']'"). Reserved Notation "[ /\ P1 , P2 , P3 , P4 & P5 ]" (at level 0, format "'[hv' [ /\ '[' P1 , '/' P2 , '/' P3 , '/' P4 ']' '/ ' & P5 ] ']'"). Reserved Notation "[ \/ P1 | P2 ]" (at level 0). Reserved Notation "[ \/ P1 , P2 | P3 ]" (at level 0, format "'[hv' [ \/ '[' P1 , '/' P2 ']' '/ ' | P3 ] ']'"). Reserved Notation "[ \/ P1 , P2 , P3 | P4 ]" (at level 0, format "'[hv' [ \/ '[' P1 , '/' P2 , '/' P3 ']' '/ ' | P4 ] ']'"). Reserved Notation "[ && b1 & c ]" (at level 0). Reserved Notation "[ && b1 , b2 , .. , bn & c ]" (at level 0, format "'[hv' [ && '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/ ' & c ] ']'"). Reserved Notation "[ || b1 | c ]" (at level 0). Reserved Notation "[ || b1 , b2 , .. , bn | c ]" (at level 0, format "'[hv' [ || '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/ ' | c ] ']'"). Reserved Notation "[ ==> b1 => c ]" (at level 0). Reserved Notation "[ ==> b1 , b2 , .. , bn => c ]" (at level 0, format "'[hv' [ ==> '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/' => c ] ']'"). (** Shorter delimiter **) Delimit Scope bool_scope with B. Open Scope bool_scope. (** An alternative to xorb that behaves somewhat better wrt simplification. **) Definition addb b := if b then negb else id. (** Notation for && and || is declared in Init.Datatypes. **) Notation "~~ b" := (negb b) : bool_scope. Notation "b ==> c" := (implb b c) : bool_scope. Notation "b1 (+) b2" := (addb b1 b2) : bool_scope. (** Constant is_true b := b = true is defined in Init.Datatypes. **) Coercion is_true : bool >-> Sortclass. (* Prop *) Lemma prop_congr : forall b b' : bool, b = b' -> b = b' :> Prop. Proof. by move=> b b' ->. Qed. Ltac prop_congr := apply: prop_congr. (** Lemmas for trivial. **) Lemma is_true_true : true. Proof. by []. Qed. Lemma not_false_is_true : ~ false. Proof. by []. Qed. Lemma is_true_locked_true : locked true. Proof. by unlock. Qed. #[global] Hint Resolve is_true_true not_false_is_true is_true_locked_true : core. (** Shorter names. **) Definition isT := is_true_true. Definition notF := not_false_is_true. (** Negation lemmas. **) (** We generally take NEGATION as the standard form of a false condition: negative boolean hypotheses should be of the form ~~ b, rather than ~ b or b = false, as much as possible. **) Lemma negbT b : b = false -> ~~ b. Proof. by case: b. Qed. Lemma negbTE b : ~~ b -> b = false. Proof. by case: b. Qed. Lemma negbF b : (b : bool) -> ~~ b = false. Proof. by case: b. Qed. Lemma negbFE b : ~~ b = false -> b. Proof. by case: b. Qed. Lemma negbK : involutive negb. Proof. by case. Qed. Lemma negbNE b : ~~ ~~ b -> b. Proof. by case: b. Qed. Lemma negb_inj : injective negb. Proof. exact: can_inj negbK. Qed. Lemma negbLR b c : b = ~~ c -> ~~ b = c. Proof. exact: canLR negbK. Qed. Lemma negbRL b c : ~~ b = c -> b = ~~ c. Proof. exact: canRL negbK. Qed. Lemma contra (c b : bool) : (c -> b) -> ~~ b -> ~~ c. Proof. by case: b => //; case: c. Qed. Definition contraNN := contra. Lemma contraL (c b : bool) : (c -> ~~ b) -> b -> ~~ c. Proof. by case: b => //; case: c. Qed. Definition contraTN := contraL. Lemma contraR (c b : bool) : (~~ c -> b) -> ~~ b -> c. Proof. by case: b => //; case: c. Qed. Definition contraNT := contraR. Lemma contraLR (c b : bool) : (~~ c -> ~~ b) -> b -> c. Proof. by case: b => //; case: c. Qed. Definition contraTT := contraLR. Lemma contraT b : (~~ b -> false) -> b. Proof. by case: b => // ->. Qed. Lemma wlog_neg b : (~~ b -> b) -> b. Proof. by case: b => // ->. Qed. Lemma contraFT (c b : bool) : (~~ c -> b) -> b = false -> c. Proof. by move/contraR=> notb_c /negbT. Qed. Lemma contraFN (c b : bool) : (c -> b) -> b = false -> ~~ c. Proof. by move/contra=> notb_notc /negbT. Qed. Lemma contraTF (c b : bool) : (c -> ~~ b) -> b -> c = false. Proof. by move/contraL=> b_notc /b_notc/negbTE. Qed. Lemma contraNF (c b : bool) : (c -> b) -> ~~ b -> c = false. Proof. by move/contra=> notb_notc /notb_notc/negbTE. Qed. Lemma contraFF (c b : bool) : (c -> b) -> b = false -> c = false. Proof. by move/contraFN=> bF_notc /bF_notc/negbTE. Qed. (* additional contra lemmas involving [P,Q : Prop] *) Lemma contra_not (P Q : Prop) : (Q -> P) -> (~ P -> ~ Q). Proof. by auto. Qed. Lemma contraPnot (P Q : Prop) : (Q -> ~ P) -> (P -> ~ Q). Proof. by auto. Qed. Lemma contraTnot (b : bool) (P : Prop) : (P -> ~~ b) -> (b -> ~ P). Proof. by case: b; auto. Qed. Lemma contraNnot (P : Prop) (b : bool) : (P -> b) -> (~~ b -> ~ P). Proof. rewrite -{1}[b]negbK; exact: contraTnot. Qed. Lemma contraPT (P : Prop) (b : bool) : (~~ b -> ~ P) -> P -> b. Proof. by case: b => //= /(_ isT) nP /nP. Qed. Lemma contra_notT (P : Prop) (b : bool) : (~~ b -> P) -> ~ P -> b. Proof. by case: b => //= /(_ isT) HP /(_ HP). Qed. Lemma contra_notN (P : Prop) (b : bool) : (b -> P) -> ~ P -> ~~ b. Proof. rewrite -{1}[b]negbK; exact: contra_notT. Qed. Lemma contraPN (P : Prop) (b : bool) : (b -> ~ P) -> (P -> ~~ b). Proof. by case: b => //=; move/(_ isT) => HP /HP. Qed. Lemma contraFnot (P : Prop) (b : bool) : (P -> b) -> b = false -> ~ P. Proof. by case: b => //; auto. Qed. Lemma contraPF (P : Prop) (b : bool) : (b -> ~ P) -> P -> b = false. Proof. by case: b => // /(_ isT). Qed. Lemma contra_notF (P : Prop) (b : bool) : (b -> P) -> ~ P -> b = false. Proof. by case: b => // /(_ isT). Qed. (** Coercion of sum-style datatypes into bool, which makes it possible to use ssr's boolean if rather than Coq's "generic" if. **) Coercion isSome T (u : option T) := if u is Some _ then true else false. Coercion is_inl A B (u : A + B) := if u is inl _ then true else false. Coercion is_left A B (u : {A} + {B}) := if u is left _ then true else false. Coercion is_inleft A B (u : A + {B}) := if u is inleft _ then true else false. Prenex Implicits isSome is_inl is_left is_inleft. Definition decidable P := {P} + {~ P}. (** Lemmas for ifs with large conditions, which allow reasoning about the condition without repeating it inside the proof (the latter IS preferable when the condition is short). Usage : if the goal contains (if cond then ...) = ... case: ifP => Hcond. generates two subgoal, with the assumption Hcond : cond = true/false Rewrite if_same eliminates redundant ifs Rewrite (fun_if f) moves a function f inside an if Rewrite if_arg moves an argument inside a function-valued if **) Section BoolIf. Variables (A B : Type) (x : A) (f : A -> B) (b : bool) (vT vF : A). Variant if_spec (not_b : Prop) : bool -> A -> Set := | IfSpecTrue of b : if_spec not_b true vT | IfSpecFalse of not_b : if_spec not_b false vF. Lemma ifP : if_spec (b = false) b (if b then vT else vF). Proof. by case def_b: b; constructor. Qed. Lemma ifPn : if_spec (~~ b) b (if b then vT else vF). Proof. by case def_b: b; constructor; rewrite ?def_b. Qed. Lemma ifT : b -> (if b then vT else vF) = vT. Proof. by move->. Qed. Lemma ifF : b = false -> (if b then vT else vF) = vF. Proof. by move->. Qed. Lemma ifN : ~~ b -> (if b then vT else vF) = vF. Proof. by move/negbTE->. Qed. Lemma if_same : (if b then vT else vT) = vT. Proof. by case b. Qed. Lemma if_neg : (if ~~ b then vT else vF) = if b then vF else vT. Proof. by case b. Qed. Lemma fun_if : f (if b then vT else vF) = if b then f vT else f vF. Proof. by case b. Qed. Lemma if_arg (fT fF : A -> B) : (if b then fT else fF) x = if b then fT x else fF x. Proof. by case b. Qed. (** Turning a boolean "if" form into an application. **) Definition if_expr := if b then vT else vF. Lemma ifE : (if b then vT else vF) = if_expr. Proof. by []. Qed. End BoolIf. (** Core (internal) reflection lemmas, used for the three kinds of views. **) Section ReflectCore. Variables (P Q : Prop) (b c : bool). Hypothesis Hb : reflect P b. Lemma introNTF : (if c then ~ P else P) -> ~~ b = c. Proof. by case c; case Hb. Qed. Lemma introTF : (if c then P else ~ P) -> b = c. Proof. by case c; case Hb. Qed. Lemma elimNTF : ~~ b = c -> if c then ~ P else P. Proof. by move <-; case Hb. Qed. Lemma elimTF : b = c -> if c then P else ~ P. Proof. by move <-; case Hb. Qed. Lemma equivPif : (Q -> P) -> (P -> Q) -> if b then Q else ~ Q. Proof. by case Hb; auto. Qed. Lemma xorPif : Q \/ P -> ~ (Q /\ P) -> if b then ~ Q else Q. Proof. by case Hb => [? _ H ? | ? H _]; case: H. Qed. End ReflectCore. (** Internal negated reflection lemmas **) Section ReflectNegCore. Variables (P Q : Prop) (b c : bool). Hypothesis Hb : reflect P (~~ b). Lemma introTFn : (if c then ~ P else P) -> b = c. Proof. by move/(introNTF Hb) <-; case b. Qed. Lemma elimTFn : b = c -> if c then ~ P else P. Proof. by move <-; apply: (elimNTF Hb); case b. Qed. Lemma equivPifn : (Q -> P) -> (P -> Q) -> if b then ~ Q else Q. Proof. by rewrite -if_neg; apply: equivPif. Qed. Lemma xorPifn : Q \/ P -> ~ (Q /\ P) -> if b then Q else ~ Q. Proof. by rewrite -if_neg; apply: xorPif. Qed. End ReflectNegCore. (** User-oriented reflection lemmas **) Section Reflect. Variables (P Q : Prop) (b b' c : bool). Hypotheses (Pb : reflect P b) (Pb' : reflect P (~~ b')). Lemma introT : P -> b. Proof. exact: introTF true _. Qed. Lemma introF : ~ P -> b = false. Proof. exact: introTF false _. Qed. Lemma introN : ~ P -> ~~ b. Proof. exact: introNTF true _. Qed. Lemma introNf : P -> ~~ b = false. Proof. exact: introNTF false _. Qed. Lemma introTn : ~ P -> b'. Proof. exact: introTFn true _. Qed. Lemma introFn : P -> b' = false. Proof. exact: introTFn false _. Qed. Lemma elimT : b -> P. Proof. exact: elimTF true _. Qed. Lemma elimF : b = false -> ~ P. Proof. exact: elimTF false _. Qed. Lemma elimN : ~~ b -> ~P. Proof. exact: elimNTF true _. Qed. Lemma elimNf : ~~ b = false -> P. Proof. exact: elimNTF false _. Qed. Lemma elimTn : b' -> ~ P. Proof. exact: elimTFn true _. Qed. Lemma elimFn : b' = false -> P. Proof. exact: elimTFn false _. Qed. Lemma introP : (b -> Q) -> (~~ b -> ~ Q) -> reflect Q b. Proof. by case b; constructor; auto. Qed. Lemma iffP : (P -> Q) -> (Q -> P) -> reflect Q b. Proof. by case: Pb; constructor; auto. Qed. Lemma equivP : (P <-> Q) -> reflect Q b. Proof. by case; apply: iffP. Qed. Lemma sumboolP (decQ : decidable Q) : reflect Q decQ. Proof. by case: decQ; constructor. Qed. Lemma appP : reflect Q b -> P -> Q. Proof. by move=> Qb; move/introT; case: Qb. Qed. Lemma sameP : reflect P c -> b = c. Proof. by case; [apply: introT | apply: introF]. Qed. Lemma decPcases : if b then P else ~ P. Proof. by case Pb. Qed. Definition decP : decidable P. by case: b decPcases; [left | right]. Defined. Lemma rwP : P <-> b. Proof. by split; [apply: introT | apply: elimT]. Qed. Lemma rwP2 : reflect Q b -> (P <-> Q). Proof. by move=> Qb; split=> ?; [apply: appP | apply: elimT; case: Qb]. Qed. (** Predicate family to reflect excluded middle in bool. **) Variant alt_spec : bool -> Type := | AltTrue of P : alt_spec true | AltFalse of ~~ b : alt_spec false. Lemma altP : alt_spec b. Proof. by case def_b: b / Pb; constructor; rewrite ?def_b. Qed. End Reflect. Hint View for move/ elimTF|3 elimNTF|3 elimTFn|3 introT|2 introTn|2 introN|2. Hint View for apply/ introTF|3 introNTF|3 introTFn|3 elimT|2 elimTn|2 elimN|2. Hint View for apply// equivPif|3 xorPif|3 equivPifn|3 xorPifn|3. (** Allow the direct application of a reflection lemma to a boolean assertion. **) Coercion elimT : reflect >-> Funclass. #[universes(template)] Variant implies P Q := Implies of P -> Q. Lemma impliesP P Q : implies P Q -> P -> Q. Proof. by case. Qed. Lemma impliesPn (P Q : Prop) : implies P Q -> ~ Q -> ~ P. Proof. by case=> iP ? /iP. Qed. Coercion impliesP : implies >-> Funclass. Hint View for move/ impliesPn|2 impliesP|2. Hint View for apply/ impliesPn|2 impliesP|2. (** Impredicative or, which can emulate a classical not-implies. **) Definition unless condition property : Prop := forall goal : Prop, (condition -> goal) -> (property -> goal) -> goal. Notation "\unless C , P" := (unless C P) : type_scope. Lemma unlessL C P : implies C (\unless C, P). Proof. by split=> hC G /(_ hC). Qed. Lemma unlessR C P : implies P (\unless C, P). Proof. by split=> hP G _ /(_ hP). Qed. Lemma unless_sym C P : implies (\unless C, P) (\unless P, C). Proof. by split; apply; [apply/unlessR | apply/unlessL]. Qed. Lemma unlessP (C P : Prop) : (\unless C, P) <-> C \/ P. Proof. by split=> [|[/unlessL | /unlessR]]; apply; [left | right]. Qed. Lemma bind_unless C P {Q} : implies (\unless C, P) (\unless (\unless C, Q), P). Proof. by split; apply=> [hC|hP]; [apply/unlessL/unlessL | apply/unlessR]. Qed. Lemma unless_contra b C : implies (~~ b -> C) (\unless C, b). Proof. by split; case: b => [_ | hC]; [apply/unlessR | apply/unlessL/hC]. Qed. (** Classical reasoning becomes directly accessible for any bool subgoal. Note that we cannot use "unless" here for lack of universe polymorphism. **) Definition classically P : Prop := forall b : bool, (P -> b) -> b. Lemma classicP (P : Prop) : classically P <-> ~ ~ P. Proof. split=> [cP nP | nnP [] // nP]; last by case nnP; move/nP. by have: P -> false; [move/nP | move/cP]. Qed. Lemma classicW P : P -> classically P. Proof. by move=> hP _ ->. Qed. Lemma classic_bind P Q : (P -> classically Q) -> classically P -> classically Q. Proof. by move=> iPQ cP b /iPQ-/cP. Qed. Lemma classic_EM P : classically (decidable P). Proof. by case=> // undecP; apply/undecP; right=> notP; apply/notF/undecP; left. Qed. Lemma classic_pick T P : classically ({x : T | P x} + (forall x, ~ P x)). Proof. case=> // undecP; apply/undecP; right=> x Px. by apply/notF/undecP; left; exists x. Qed. Lemma classic_imply P Q : (P -> classically Q) -> classically (P -> Q). Proof. move=> iPQ []// notPQ; apply/notPQ=> /iPQ-cQ. by case: notF; apply: cQ => hQ; apply: notPQ. Qed. (** List notations for wider connectives; the Prop connectives have a fixed width so as to avoid iterated destruction (we go up to width 5 for /\, and width 4 for or). The bool connectives have arbitrary widths, but denote expressions that associate to the RIGHT. This is consistent with the right associativity of list expressions and thus more convenient in most proofs. **) Inductive and3 (P1 P2 P3 : Prop) : Prop := And3 of P1 & P2 & P3. Inductive and4 (P1 P2 P3 P4 : Prop) : Prop := And4 of P1 & P2 & P3 & P4. Inductive and5 (P1 P2 P3 P4 P5 : Prop) : Prop := And5 of P1 & P2 & P3 & P4 & P5. Inductive or3 (P1 P2 P3 : Prop) : Prop := Or31 of P1 | Or32 of P2 | Or33 of P3. Inductive or4 (P1 P2 P3 P4 : Prop) : Prop := Or41 of P1 | Or42 of P2 | Or43 of P3 | Or44 of P4. Notation "[ /\ P1 & P2 ]" := (and P1 P2) (only parsing) : type_scope. Notation "[ /\ P1 , P2 & P3 ]" := (and3 P1 P2 P3) : type_scope. Notation "[ /\ P1 , P2 , P3 & P4 ]" := (and4 P1 P2 P3 P4) : type_scope. Notation "[ /\ P1 , P2 , P3 , P4 & P5 ]" := (and5 P1 P2 P3 P4 P5) : type_scope. Notation "[ \/ P1 | P2 ]" := (or P1 P2) (only parsing) : type_scope. Notation "[ \/ P1 , P2 | P3 ]" := (or3 P1 P2 P3) : type_scope. Notation "[ \/ P1 , P2 , P3 | P4 ]" := (or4 P1 P2 P3 P4) : type_scope. Notation "[ && b1 & c ]" := (b1 && c) (only parsing) : bool_scope. Notation "[ && b1 , b2 , .. , bn & c ]" := (b1 && (b2 && .. (bn && c) .. )) : bool_scope. Notation "[ || b1 | c ]" := (b1 || c) (only parsing) : bool_scope. Notation "[ || b1 , b2 , .. , bn | c ]" := (b1 || (b2 || .. (bn || c) .. )) : bool_scope. Notation "[ ==> b1 , b2 , .. , bn => c ]" := (b1 ==> (b2 ==> .. (bn ==> c) .. )) : bool_scope. Notation "[ ==> b1 => c ]" := (b1 ==> c) (only parsing) : bool_scope. Section AllAnd. Variables (T : Type) (P1 P2 P3 P4 P5 : T -> Prop). Local Notation a P := (forall x, P x). Lemma all_and2 : implies (forall x, [/\ P1 x & P2 x]) [/\ a P1 & a P2]. Proof. by split=> haveP; split=> x; case: (haveP x). Qed. Lemma all_and3 : implies (forall x, [/\ P1 x, P2 x & P3 x]) [/\ a P1, a P2 & a P3]. Proof. by split=> haveP; split=> x; case: (haveP x). Qed. Lemma all_and4 : implies (forall x, [/\ P1 x, P2 x, P3 x & P4 x]) [/\ a P1, a P2, a P3 & a P4]. Proof. by split=> haveP; split=> x; case: (haveP x). Qed. Lemma all_and5 : implies (forall x, [/\ P1 x, P2 x, P3 x, P4 x & P5 x]) [/\ a P1, a P2, a P3, a P4 & a P5]. Proof. by split=> haveP; split=> x; case: (haveP x). Qed. End AllAnd. Arguments all_and2 {T P1 P2}. Arguments all_and3 {T P1 P2 P3}. Arguments all_and4 {T P1 P2 P3 P4}. Arguments all_and5 {T P1 P2 P3 P4 P5}. Lemma pair_andP P Q : P /\ Q <-> P * Q. Proof. by split; case. Qed. Section ReflectConnectives. Variable b1 b2 b3 b4 b5 : bool. Lemma idP : reflect b1 b1. Proof. by case b1; constructor. Qed. Lemma boolP : alt_spec b1 b1 b1. Proof. exact: (altP idP). Qed. Lemma idPn : reflect (~~ b1) (~~ b1). Proof. by case b1; constructor. Qed. Lemma negP : reflect (~ b1) (~~ b1). Proof. by case b1; constructor; auto. Qed. Lemma negPn : reflect b1 (~~ ~~ b1). Proof. by case b1; constructor. Qed. Lemma negPf : reflect (b1 = false) (~~ b1). Proof. by case b1; constructor. Qed. Lemma andP : reflect (b1 /\ b2) (b1 && b2). Proof. by case b1; case b2; constructor=> //; case. Qed. Lemma and3P : reflect [/\ b1, b2 & b3] [&& b1, b2 & b3]. Proof. by case b1; case b2; case b3; constructor; try by case. Qed. Lemma and4P : reflect [/\ b1, b2, b3 & b4] [&& b1, b2, b3 & b4]. Proof. by case b1; case b2; case b3; case b4; constructor; try by case. Qed. Lemma and5P : reflect [/\ b1, b2, b3, b4 & b5] [&& b1, b2, b3, b4 & b5]. Proof. by case b1; case b2; case b3; case b4; case b5; constructor; try by case. Qed. Lemma orP : reflect (b1 \/ b2) (b1 || b2). Proof. by case b1; case b2; constructor; auto; case. Qed. Lemma or3P : reflect [\/ b1, b2 | b3] [|| b1, b2 | b3]. Proof. case b1; first by constructor; constructor 1. case b2; first by constructor; constructor 2. case b3; first by constructor; constructor 3. by constructor; case. Qed. Lemma or4P : reflect [\/ b1, b2, b3 | b4] [|| b1, b2, b3 | b4]. Proof. case b1; first by constructor; constructor 1. case b2; first by constructor; constructor 2. case b3; first by constructor; constructor 3. case b4; first by constructor; constructor 4. by constructor; case. Qed. Lemma nandP : reflect (~~ b1 \/ ~~ b2) (~~ (b1 && b2)). Proof. by case b1; case b2; constructor; auto; case; auto. Qed. Lemma norP : reflect (~~ b1 /\ ~~ b2) (~~ (b1 || b2)). Proof. by case b1; case b2; constructor; auto; case; auto. Qed. Lemma implyP : reflect (b1 -> b2) (b1 ==> b2). Proof. by case b1; case b2; constructor; auto. Qed. End ReflectConnectives. Arguments idP {b1}. Arguments idPn {b1}. Arguments negP {b1}. Arguments negPn {b1}. Arguments negPf {b1}. Arguments andP {b1 b2}. Arguments and3P {b1 b2 b3}. Arguments and4P {b1 b2 b3 b4}. Arguments and5P {b1 b2 b3 b4 b5}. Arguments orP {b1 b2}. Arguments or3P {b1 b2 b3}. Arguments or4P {b1 b2 b3 b4}. Arguments nandP {b1 b2}. Arguments norP {b1 b2}. Arguments implyP {b1 b2}. Prenex Implicits idP idPn negP negPn negPf. Prenex Implicits andP and3P and4P and5P orP or3P or4P nandP norP implyP. Section ReflectCombinators. Variables (P Q : Prop) (p q : bool). Hypothesis rP : reflect P p. Hypothesis rQ : reflect Q q. Lemma negPP : reflect (~ P) (~~ p). Proof. by apply:(iffP negP); apply: contra_not => /rP. Qed. Lemma andPP : reflect (P /\ Q) (p && q). Proof. by apply: (iffP andP) => -[/rP ? /rQ ?]. Qed. Lemma orPP : reflect (P \/ Q) (p || q). Proof. by apply: (iffP orP) => -[/rP ?|/rQ ?]; tauto. Qed. Lemma implyPP : reflect (P -> Q) (p ==> q). Proof. by apply: (iffP implyP) => pq /rP /pq /rQ. Qed. End ReflectCombinators. Arguments negPP {P p}. Arguments andPP {P Q p q}. Arguments orPP {P Q p q}. Arguments implyPP {P Q p q}. Prenex Implicits negPP andPP orPP implyPP. (** Shorter, more systematic names for the boolean connectives laws. **) Lemma andTb : left_id true andb. Proof. by []. Qed. Lemma andFb : left_zero false andb. Proof. by []. Qed. Lemma andbT : right_id true andb. Proof. by case. Qed. Lemma andbF : right_zero false andb. Proof. by case. Qed. Lemma andbb : idempotent andb. Proof. by case. Qed. Lemma andbC : commutative andb. Proof. by do 2!case. Qed. Lemma andbA : associative andb. Proof. by do 3!case. Qed. Lemma andbCA : left_commutative andb. Proof. by do 3!case. Qed. Lemma andbAC : right_commutative andb. Proof. by do 3!case. Qed. Lemma andbACA : interchange andb andb. Proof. by do 4!case. Qed. Lemma orTb : forall b, true || b. Proof. by []. Qed. Lemma orFb : left_id false orb. Proof. by []. Qed. Lemma orbT : forall b, b || true. Proof. by case. Qed. Lemma orbF : right_id false orb. Proof. by case. Qed. Lemma orbb : idempotent orb. Proof. by case. Qed. Lemma orbC : commutative orb. Proof. by do 2!case. Qed. Lemma orbA : associative orb. Proof. by do 3!case. Qed. Lemma orbCA : left_commutative orb. Proof. by do 3!case. Qed. Lemma orbAC : right_commutative orb. Proof. by do 3!case. Qed. Lemma orbACA : interchange orb orb. Proof. by do 4!case. Qed. Lemma andbN b : b && ~~ b = false. Proof. by case: b. Qed. Lemma andNb b : ~~ b && b = false. Proof. by case: b. Qed. Lemma orbN b : b || ~~ b = true. Proof. by case: b. Qed. Lemma orNb b : ~~ b || b = true. Proof. by case: b. Qed. Lemma andb_orl : left_distributive andb orb. Proof. by do 3!case. Qed. Lemma andb_orr : right_distributive andb orb. Proof. by do 3!case. Qed. Lemma orb_andl : left_distributive orb andb. Proof. by do 3!case. Qed. Lemma orb_andr : right_distributive orb andb. Proof. by do 3!case. Qed. Lemma andb_idl (a b : bool) : (b -> a) -> a && b = b. Proof. by case: a; case: b => // ->. Qed. Lemma andb_idr (a b : bool) : (a -> b) -> a && b = a. Proof. by case: a; case: b => // ->. Qed. Lemma andb_id2l (a b c : bool) : (a -> b = c) -> a && b = a && c. Proof. by case: a; case: b; case: c => // ->. Qed. Lemma andb_id2r (a b c : bool) : (b -> a = c) -> a && b = c && b. Proof. by case: a; case: b; case: c => // ->. Qed. Lemma orb_idl (a b : bool) : (a -> b) -> a || b = b. Proof. by case: a; case: b => // ->. Qed. Lemma orb_idr (a b : bool) : (b -> a) -> a || b = a. Proof. by case: a; case: b => // ->. Qed. Lemma orb_id2l (a b c : bool) : (~~ a -> b = c) -> a || b = a || c. Proof. by case: a; case: b; case: c => // ->. Qed. Lemma orb_id2r (a b c : bool) : (~~ b -> a = c) -> a || b = c || b. Proof. by case: a; case: b; case: c => // ->. Qed. Lemma negb_and (a b : bool) : ~~ (a && b) = ~~ a || ~~ b. Proof. by case: a; case: b. Qed. Lemma negb_or (a b : bool) : ~~ (a || b) = ~~ a && ~~ b. Proof. by case: a; case: b. Qed. (** Pseudo-cancellation -- i.e, absorption **) Lemma andbK a b : a && b || a = a. Proof. by case: a; case: b. Qed. Lemma andKb a b : a || b && a = a. Proof. by case: a; case: b. Qed. Lemma orbK a b : (a || b) && a = a. Proof. by case: a; case: b. Qed. Lemma orKb a b : a && (b || a) = a. Proof. by case: a; case: b. Qed. (** Imply **) Lemma implybT b : b ==> true. Proof. by case: b. Qed. Lemma implybF b : (b ==> false) = ~~ b. Proof. by case: b. Qed. Lemma implyFb b : false ==> b. Proof. by []. Qed. Lemma implyTb b : (true ==> b) = b. Proof. by []. Qed. Lemma implybb b : b ==> b. Proof. by case: b. Qed. Lemma negb_imply a b : ~~ (a ==> b) = a && ~~ b. Proof. by case: a; case: b. Qed. Lemma implybE a b : (a ==> b) = ~~ a || b. Proof. by case: a; case: b. Qed. Lemma implyNb a b : (~~ a ==> b) = a || b. Proof. by case: a; case: b. Qed. Lemma implybN a b : (a ==> ~~ b) = (b ==> ~~ a). Proof. by case: a; case: b. Qed. Lemma implybNN a b : (~~ a ==> ~~ b) = b ==> a. Proof. by case: a; case: b. Qed. Lemma implyb_idl (a b : bool) : (~~ a -> b) -> (a ==> b) = b. Proof. by case: a; case: b => // ->. Qed. Lemma implyb_idr (a b : bool) : (b -> ~~ a) -> (a ==> b) = ~~ a. Proof. by case: a; case: b => // ->. Qed. Lemma implyb_id2l (a b c : bool) : (a -> b = c) -> (a ==> b) = (a ==> c). Proof. by case: a; case: b; case: c => // ->. Qed. (** Addition (xor) **) Lemma addFb : left_id false addb. Proof. by []. Qed. Lemma addbF : right_id false addb. Proof. by case. Qed. Lemma addbb : self_inverse false addb. Proof. by case. Qed. Lemma addbC : commutative addb. Proof. by do 2!case. Qed. Lemma addbA : associative addb. Proof. by do 3!case. Qed. Lemma addbCA : left_commutative addb. Proof. by do 3!case. Qed. Lemma addbAC : right_commutative addb. Proof. by do 3!case. Qed. Lemma addbACA : interchange addb addb. Proof. by do 4!case. Qed. Lemma andb_addl : left_distributive andb addb. Proof. by do 3!case. Qed. Lemma andb_addr : right_distributive andb addb. Proof. by do 3!case. Qed. Lemma addKb : left_loop id addb. Proof. by do 2!case. Qed. Lemma addbK : right_loop id addb. Proof. by do 2!case. Qed. Lemma addIb : left_injective addb. Proof. by do 3!case. Qed. Lemma addbI : right_injective addb. Proof. by do 3!case. Qed. Lemma addTb b : true (+) b = ~~ b. Proof. by []. Qed. Lemma addbT b : b (+) true = ~~ b. Proof. by case: b. Qed. Lemma addbN a b : a (+) ~~ b = ~~ (a (+) b). Proof. by case: a; case: b. Qed. Lemma addNb a b : ~~ a (+) b = ~~ (a (+) b). Proof. by case: a; case: b. Qed. Lemma addbP a b : reflect (~~ a = b) (a (+) b). Proof. by case: a; case: b; constructor. Qed. Arguments addbP {a b}. (** Resolution tactic for blindly weeding out common terms from boolean equalities. When faced with a goal of the form (andb/orb/addb b1 b2) = b3 they will try to locate b1 in b3 and remove it. This can fail! **) Ltac bool_congr := match goal with | |- (?X1 && ?X2 = ?X3) => first [ symmetry; rewrite -1?(andbC X1) -?(andbCA X1); congr 1 (andb X1); symmetry | case: (X1); [ rewrite ?andTb ?andbT // | by rewrite ?andbF /= ] ] | |- (?X1 || ?X2 = ?X3) => first [ symmetry; rewrite -1?(orbC X1) -?(orbCA X1); congr 1 (orb X1); symmetry | case: (X1); [ by rewrite ?orbT //= | rewrite ?orFb ?orbF ] ] | |- (?X1 (+) ?X2 = ?X3) => symmetry; rewrite -1?(addbC X1) -?(addbCA X1); congr 1 (addb X1); symmetry | |- (~~ ?X1 = ?X2) => congr 1 negb end. (** Predicates, i.e., packaged functions to bool. - pred T, the basic type for predicates over a type T, is simply an alias for T -> bool. We actually distinguish two kinds of predicates, which we call applicative and collective, based on the syntax used to test them at some x in T: - For an applicative predicate P, one uses prefix syntax: P x Also, most operations on applicative predicates use prefix syntax as well (e.g., predI P Q). - For a collective predicate A, one uses infix syntax: x \in A and all operations on collective predicates use infix syntax as well (e.g., #[#predI A & B#]#). There are only two kinds of applicative predicates: - pred T, the alias for T -> bool mentioned above - simpl_pred T, an alias for simpl_fun T bool with a coercion to pred T that auto-simplifies on application (see ssrfun). On the other hand, the set of collective predicate types is open-ended via - predType T, a Structure that can be used to put Canonical collective predicate interpretation on other types, such as lists, tuples, finite sets, etc. Indeed, we define such interpretations for applicative predicate types, which can therefore also be used with the infix syntax, e.g., x \in predI P Q Moreover these infix forms are convertible to their prefix counterpart (e.g., predI P Q x which in turn simplifies to P x && Q x). The converse is not true, however; collective predicate types cannot, in general, be used applicatively, because of restrictions on implicit coercions. However, we do define an explicit generic coercion - mem : forall (pT : predType), pT -> mem_pred T where mem_pred T is a variant of simpl_pred T that preserves the infix syntax, i.e., mem A x auto-simplifies to x \in A. Indeed, the infix "collective" operators are notation for a prefix operator with arguments of type mem_pred T or pred T, applied to coerced collective predicates, e.g., Notation "x \in A" := (in_mem x (mem A)). This prevents the variability in the predicate type from interfering with the application of generic lemmas. Moreover this also makes it much easier to define generic lemmas, because the simplest type -- pred T -- can be used as the type of generic collective predicates, provided one takes care not to use it applicatively; this avoids the burden of having to declare a different predicate type for each predicate parameter of each section or lemma. In detail, we ensure that the head normal form of mem A is always of the eta-long MemPred (fun x => pA x) form, where pA is the pred interpretation of A following its predType pT, i.e., the _expansion_ of topred A. For a pred T evar ?P, (mem ?P) converts MemPred (fun x => ?P x), whose argument is a Miller pattern and therefore always unify: unifying (mem A) with (mem ?P) always yields ?P = pA, because the rigid constant MemPred aligns the unification. Furthermore, we ensure pA is always either A or toP .... A where toP ... is the expansion of @topred T pT, and toP is declared as a Coercion, so pA will _display_ as A in either case, and the instances of @mem T (predPredType T) pA appearing in the premises or right-hand side of a generic lemma parameterized by ?P will be indistinguishable from @mem T pT A. Users should take care not to inadvertently "strip" (mem A) down to the coerced A, since this will expose the internal toP coercion: Coq could then display terms A x that cannot be typed as such. The topredE lemma can be used to restore the x \in A syntax in this case. While -topredE can conversely be used to change x \in P into P x for an applicative P, it is safer to use the inE, unfold_in or and memE lemmas instead, as they do not run the risk of exposing internal coercions. As a consequence it is better to explicitly cast a generic applicative predicate to simpl_pred using the SimplPred constructor when it is used as a collective predicate (see, e.g., Lemma eq_big in bigop). We also sometimes "instantiate" the predType structure by defining a coercion to the sort of the predPredType structure, conveniently denoted {pred T}. This works better for types such as {set T} that have subtypes that coerce to them, since the same coercion will be inserted by the application of mem, or of any lemma that expects a generic collective predicates with type {pred T} := pred_sort (predPredType T) = pred T; thus {pred T} should be the preferred type for generic collective predicate parameters. This device also lets us turn any Type aT : predArgType into the total predicate over that type, i.e., fun _: aT => true. This allows us to write, e.g., ##|'I_n| for the cardinal of the (finite) type of integers less than n. **) (** Boolean predicates. *) Definition pred T := T -> bool. Identity Coercion fun_of_pred : pred >-> Funclass. Definition subpred T (p1 p2 : pred T) := forall x : T, p1 x -> p2 x. (* Notation for some manifest predicates. *) Notation xpred0 := (fun=> false). Notation xpredT := (fun=> true). Notation xpredI := (fun (p1 p2 : pred _) x => p1 x && p2 x). Notation xpredU := (fun (p1 p2 : pred _) x => p1 x || p2 x). Notation xpredC := (fun (p : pred _) x => ~~ p x). Notation xpredD := (fun (p1 p2 : pred _) x => ~~ p2 x && p1 x). Notation xpreim := (fun f (p : pred _) x => p (f x)). (** The packed class interface for pred-like types. **) Structure predType T := PredType {pred_sort :> Type; topred : pred_sort -> pred T}. Definition clone_pred T U := fun pT & @pred_sort T pT -> U => fun toP (pT' := @PredType T U toP) & phant_id pT' pT => pT'. Notation "[ 'predType' 'of' T ]" := (@clone_pred _ T _ id _ id) : form_scope. Canonical predPredType T := PredType (@id (pred T)). Set Warnings "-redundant-canonical-projection". Canonical boolfunPredType T := PredType (@id (T -> bool)). Set Warnings "redundant-canonical-projection". (** The type of abstract collective predicates. While {pred T} is convertible to pred T, it presents the pred_sort coercion class, which crucially does _not_ coerce to Funclass. Term whose type P coerces to {pred T} cannot be applied to arguments, but they _can_ be used as if P had a canonical predType instance, as the coercion will be inserted if the unification P =~= pred_sort ?pT fails, changing the problem into the trivial {pred T} =~= pred_sort ?pT (solution ?pT := predPredType P). Additional benefits of this approach are that any type coercing to P will also inherit this behaviour, and that the coercion will be apparent in the elaborated expression. The latter may be important if the coercion is also a canonical structure projector - see mathcomp/fingroup/fingroup.v. The main drawback of implementing predType by coercion in this way is that the type of the value must be known when the unification constraint is imposed: if we only register the constraint and then later discover later that the expression had type P it will be too late to insert a coercion, whereas a canonical instance of predType for P would have solved the deferred constraint. Finally, definitions, lemmas and sections should use type {pred T} for their generic collective type parameters, as this will make it possible to apply such definitions and lemmas directly to values of types that implement predType by coercion to {pred T} (values of types that implement predType without coercing to {pred T} will have to be coerced explicitly using topred). **) Notation "{ 'pred' T }" := (pred_sort (predPredType T)) : type_scope. (** The type of self-simplifying collective predicates. **) Definition simpl_pred T := simpl_fun T bool. Definition SimplPred {T} (p : pred T) : simpl_pred T := SimplFun p. (** Some simpl_pred constructors. **) Definition pred0 {T} := @SimplPred T xpred0. Definition predT {T} := @SimplPred T xpredT. Definition predI {T} (p1 p2 : pred T) := SimplPred (xpredI p1 p2). Definition predU {T} (p1 p2 : pred T) := SimplPred (xpredU p1 p2). Definition predC {T} (p : pred T) := SimplPred (xpredC p). Definition predD {T} (p1 p2 : pred T) := SimplPred (xpredD p1 p2). Definition preim {aT rT} (f : aT -> rT) (d : pred rT) := SimplPred (xpreim f d). Notation "[ 'pred' : T | E ]" := (SimplPred (fun _ : T => E%B)) : fun_scope. Notation "[ 'pred' x | E ]" := (SimplPred (fun x => E%B)) : fun_scope. Notation "[ 'pred' x | E1 & E2 ]" := [pred x | E1 && E2 ] : fun_scope. Notation "[ 'pred' x : T | E ]" := (SimplPred (fun x : T => E%B)) (only parsing) : fun_scope. Notation "[ 'pred' x : T | E1 & E2 ]" := [pred x : T | E1 && E2 ] (only parsing) : fun_scope. (** Coercions for simpl_pred. As simpl_pred T values are used both applicatively and collectively we need simpl_pred to coerce to both pred T _and_ {pred T}. However it is undesirable to have two distinct constants for what are essentially identical coercion functions, as this confuses the SSReflect keyed matching algorithm. While the Coq Coercion declarations appear to disallow such Coercion aliasing, it is possible to work around this limitation with a combination of modules and functors, which we do below. In addition we also give a predType instance for simpl_pred, which will be preferred to the {pred T} coercion to solve simpl_pred T =~= pred_sort ?pT constraints; note however that the pred_of_simpl coercion _will_ be used when a simpl_pred T is passed as a {pred T}, since the simplPredType T structure for simpl_pred T is _not_ convertible to predPredType T. **) Module PredOfSimpl. Definition coerce T (sp : simpl_pred T) : pred T := fun_of_simpl sp. End PredOfSimpl. Notation pred_of_simpl := PredOfSimpl.coerce. Coercion pred_of_simpl : simpl_pred >-> pred. Canonical simplPredType T := PredType (@pred_of_simpl T). Module Type PredSortOfSimplSignature. Parameter coerce : forall T, simpl_pred T -> {pred T}. End PredSortOfSimplSignature. Module DeclarePredSortOfSimpl (PredSortOfSimpl : PredSortOfSimplSignature). Coercion PredSortOfSimpl.coerce : simpl_pred >-> pred_sort. End DeclarePredSortOfSimpl. Module Export PredSortOfSimplCoercion := DeclarePredSortOfSimpl PredOfSimpl. (** Type to pred coercion. This lets us use types of sort predArgType as a synonym for their universal predicate. We define this predicate as a simpl_pred T rather than a pred T or a {pred T} so that /= and inE reduce (T x) and x \in T to true, respectively. Unfortunately, this can't be used for existing types like bool whose sort is already fixed (at least, not without redefining bool, true, false and all bool operations and lemmas); we provide syntax to recast a given type in predArgType as a workaround. **) Definition predArgType := Type. Bind Scope type_scope with predArgType. Identity Coercion sort_of_predArgType : predArgType >-> Sortclass. Coercion pred_of_argType (T : predArgType) : simpl_pred T := predT. Notation "{ : T }" := (T%type : predArgType) : type_scope. (** Boolean relations. Simplifying relations follow the coding pattern of 2-argument simplifying functions: the simplifying type constructor is applied to the _last_ argument. This design choice will let the in_simpl component of inE expand membership in simpl_rel as well. We provide an explicit coercion to rel T to avoid eta-expansion during coercion; this coercion self-simplifies so it should be invisible. **) Definition rel T := T -> pred T. Identity Coercion fun_of_rel : rel >-> Funclass. Definition subrel T (r1 r2 : rel T) := forall x y : T, r1 x y -> r2 x y. Definition simpl_rel T := T -> simpl_pred T. Coercion rel_of_simpl T (sr : simpl_rel T) : rel T := fun x : T => sr x. Arguments rel_of_simpl {T} sr x /. Notation xrelU := (fun (r1 r2 : rel _) x y => r1 x y || r2 x y). Notation xrelpre := (fun f (r : rel _) x y => r (f x) (f y)). Definition SimplRel {T} (r : rel T) : simpl_rel T := fun x => SimplPred (r x). Definition relU {T} (r1 r2 : rel T) := SimplRel (xrelU r1 r2). Definition relpre {aT rT} (f : aT -> rT) (r : rel rT) := SimplRel (xrelpre f r). Notation "[ 'rel' x y | E ]" := (SimplRel (fun x y => E%B)) (only parsing) : fun_scope. Notation "[ 'rel' x y : T | E ]" := (SimplRel (fun x y : T => E%B)) (only parsing) : fun_scope. Lemma subrelUl T (r1 r2 : rel T) : subrel r1 (relU r1 r2). Proof. by move=> x y r1xy; apply/orP; left. Qed. Lemma subrelUr T (r1 r2 : rel T) : subrel r2 (relU r1 r2). Proof. by move=> x y r2xy; apply/orP; right. Qed. (** Variant of simpl_pred specialised to the membership operator. **) Variant mem_pred T := Mem of pred T. (** We mainly declare pred_of_mem as a coercion so that it is not displayed. Similarly to pred_of_simpl, it will usually not be inserted by type inference, as all mem_pred mp =~= pred_sort ?pT unification problems will be solve by the memPredType instance below; pred_of_mem will however be used if a mem_pred T is used as a {pred T}, which is desirable as it will avoid a redundant mem in a collective, e.g., passing (mem A) to a lemma exception a generic collective predicate p : {pred T} and premise x \in P will display a subgoal x \in A rather than x \in mem A. Conversely, pred_of_mem will _not_ if it is used id (mem A) is used applicatively or as a pred T; there the simpl_of_mem coercion defined below will be used, resulting in a subgoal that displays as mem A x by simplifies to x \in A. **) Coercion pred_of_mem {T} mp : {pred T} := let: Mem p := mp in [eta p]. Canonical memPredType T := PredType (@pred_of_mem T). Definition in_mem {T} (x : T) mp := pred_of_mem mp x. Definition eq_mem {T} mp1 mp2 := forall x : T, in_mem x mp1 = in_mem x mp2. Definition sub_mem {T} mp1 mp2 := forall x : T, in_mem x mp1 -> in_mem x mp2. Arguments in_mem {T} x mp : simpl never. Global Typeclasses Opaque eq_mem sub_mem. (** The [simpl_of_mem; pred_of_simpl] path provides a new mem_pred >-> pred coercion, but does _not_ override the pred_of_mem : mem_pred >-> pred_sort explicit coercion declaration above. **) Coercion simpl_of_mem {T} mp := SimplPred (fun x : T => in_mem x mp). Lemma sub_refl T (mp : mem_pred T) : sub_mem mp mp. Proof. by []. Qed. Arguments sub_refl {T mp} [x] mp_x. (** It is essential to interlock the production of the Mem constructor inside the branch of the predType match, to ensure that unifying mem A with Mem [eta ?p] sets ?p := toP A (or ?p := P if toP = id and A = [eta P]), rather than topred pT A, had we put mem A := Mem (topred A). **) Definition mem T (pT : predType T) : pT -> mem_pred T := let: PredType toP := pT in fun A => Mem [eta toP A]. Arguments mem {T pT} A : rename, simpl never. Notation "x \in A" := (in_mem x (mem A)) (only parsing) : bool_scope. Notation "x \in A" := (in_mem x (mem A)) (only printing) : bool_scope. Notation "x \notin A" := (~~ (x \in A)) : bool_scope. Notation "A =i B" := (eq_mem (mem A) (mem B)) : type_scope. Notation "{ 'subset' A <= B }" := (sub_mem (mem A) (mem B)) : type_scope. Notation "[ 'mem' A ]" := (pred_of_simpl (simpl_of_mem (mem A))) (only parsing) : fun_scope. Notation "[ 'predI' A & B ]" := (predI [mem A] [mem B]) : fun_scope. Notation "[ 'predU' A & B ]" := (predU [mem A] [mem B]) : fun_scope. Notation "[ 'predD' A & B ]" := (predD [mem A] [mem B]) : fun_scope. Notation "[ 'predC' A ]" := (predC [mem A]) : fun_scope. Notation "[ 'preim' f 'of' A ]" := (preim f [mem A]) : fun_scope. Notation "[ 'pred' x 'in' A ]" := [pred x | x \in A] : fun_scope. Notation "[ 'pred' x 'in' A | E ]" := [pred x | x \in A & E] : fun_scope. Notation "[ 'pred' x 'in' A | E1 & E2 ]" := [pred x | x \in A & E1 && E2 ] : fun_scope. Notation "[ 'rel' x y 'in' A & B | E ]" := [rel x y | (x \in A) && (y \in B) && E] : fun_scope. Notation "[ 'rel' x y 'in' A & B ]" := [rel x y | (x \in A) && (y \in B)] : fun_scope. Notation "[ 'rel' x y 'in' A | E ]" := [rel x y in A & A | E] : fun_scope. Notation "[ 'rel' x y 'in' A ]" := [rel x y in A & A] : fun_scope. (** Aliases of pred T that let us tag instances of simpl_pred as applicative or collective, via bespoke coercions. This tagging will give control over the simplification behaviour of inE and other rewriting lemmas below. For this control to work it is crucial that collective_of_simpl _not_ be convertible to either applicative_of_simpl or pred_of_simpl. Indeed they differ here by a commutative conversion (of the match and lambda). **) Definition applicative_pred T := pred T. Definition collective_pred T := pred T. Coercion applicative_pred_of_simpl T (sp : simpl_pred T) : applicative_pred T := fun_of_simpl sp. Coercion collective_pred_of_simpl T (sp : simpl_pred T) : collective_pred T := let: SimplFun p := sp in p. (** Explicit simplification rules for predicate application and membership. **) Section PredicateSimplification. Variables T : Type. Implicit Types (p : pred T) (pT : predType T) (sp : simpl_pred T). Implicit Types (mp : mem_pred T). (** The following four bespoke structures provide fine-grained control over matching the various predicate forms. While all four follow a common pattern of using a canonical projection to match a particular form of predicate (in pred T, simpl_pred, mem_pred and mem_pred, respectively), and display the matched predicate in the structure type, each is in fact used for a different, specific purpose: - registered_applicative_pred: this user-facing structure is used to declare values of type pred T meant to be used applicatively. The structure parameter merely displays this same value, and is used to avoid undesirable, visible occurrence of the structure in the right hand side of rewrite rules such as app_predE. There is a canonical instance of registered_applicative_pred for values of the applicative_of_simpl coercion, which handles the Definition Apred : applicative_pred T := [pred x | ...] idiom. This instance is mainly intended for the in_applicative component of inE, in conjunction with manifest_mem_pred and applicative_mem_pred. - manifest_simpl_pred: the only instance of this structure matches manifest simpl_pred values of the form SimplPred p, displaying p in the structure type. This structure is used in in_simpl to detect and selectively expand collective predicates of this form. An explicit SimplPred p pattern would _NOT_ work for this purpose, as then the left-hand side of in_simpl would reduce to in_mem ?x (Mem [eta ?p]) and would thus match _any_ instance of \in, not just those arising from a manifest simpl_pred. - manifest_mem_pred: similar to manifest_simpl_pred, the one instance of this structure matches manifest mem_pred values of the form Mem [eta ?p]. The purpose is different however: to match and display in ?p the actual predicate appearing in an ... \in ... expression matched by the left hand side of the in_applicative component of inE; then - applicative_mem_pred is a telescope refinement of manifest_mem_pred p with a default constructor that checks that the predicate p is the value of a registered_applicative_pred; any unfolding occurring during this check does _not_ affect the value of p passed to in_applicative, since that has been fixed earlier by the manifest_mem_pred match. In particular the definition of a predicate using the applicative_pred_of_simpl idiom above will not be expanded - this very case is the reason in_applicative uses a mem_pred telescope in its left hand side. The more straightforward ?x \in applicative_pred_value ?ap (equivalent to in_mem ?x (Mem ?ap)) with ?ap : registered_applicative_pred ?p would set ?p := [pred x | ...] rather than ?p := Apred in the example above. Also note that the in_applicative component of inE must be come before the in_simpl one, as the latter also matches terms of the form x \in Apred. Finally, no component of inE matches x \in Acoll, when Definition Acoll : collective_pred T := [pred x | ...]. as the collective_pred_of_simpl is _not_ convertible to pred_of_simpl. **) Structure registered_applicative_pred p := RegisteredApplicativePred { applicative_pred_value :> pred T; _ : applicative_pred_value = p }. Definition ApplicativePred p := RegisteredApplicativePred (erefl p). Canonical applicative_pred_applicative sp := ApplicativePred (applicative_pred_of_simpl sp). Structure manifest_simpl_pred p := ManifestSimplPred { simpl_pred_value :> simpl_pred T; _ : simpl_pred_value = SimplPred p }. Canonical expose_simpl_pred p := ManifestSimplPred (erefl (SimplPred p)). Structure manifest_mem_pred p := ManifestMemPred { mem_pred_value :> mem_pred T; _ : mem_pred_value = Mem [eta p] }. Canonical expose_mem_pred p := ManifestMemPred (erefl (Mem [eta p])). Structure applicative_mem_pred p := ApplicativeMemPred {applicative_mem_pred_value :> manifest_mem_pred p}. Canonical check_applicative_mem_pred p (ap : registered_applicative_pred p) := [eta @ApplicativeMemPred ap]. Lemma mem_topred pT (pp : pT) : mem (topred pp) = mem pp. Proof. by case: pT pp. Qed. Lemma topredE pT x (pp : pT) : topred pp x = (x \in pp). Proof. by rewrite -mem_topred. Qed. Lemma app_predE x p (ap : registered_applicative_pred p) : ap x = (x \in p). Proof. by case: ap => _ /= ->. Qed. Lemma in_applicative x p (amp : applicative_mem_pred p) : in_mem x amp = p x. Proof. by case: amp => -[_ /= ->]. Qed. Lemma in_collective x p (msp : manifest_simpl_pred p) : (x \in collective_pred_of_simpl msp) = p x. Proof. by case: msp => _ /= ->. Qed. Lemma in_simpl x p (msp : manifest_simpl_pred p) : in_mem x (Mem [eta pred_of_simpl msp]) = p x. Proof. by case: msp => _ /= ->. Qed. (** Because of the explicit eta expansion in the left-hand side, this lemma should only be used in the left-to-right direction. **) Lemma unfold_in x p : (x \in ([eta p] : pred T)) = p x. Proof. by []. Qed. Lemma simpl_predE p : SimplPred p =1 p. Proof. by []. Qed. Definition inE := (in_applicative, in_simpl, simpl_predE). (* to be extended *) Lemma mem_simpl sp : mem sp = sp :> pred T. Proof. by []. Qed. Definition memE := mem_simpl. (* could be extended *) Lemma mem_mem mp : (mem mp = mp) * (mem (mp : simpl_pred T) = mp) * (mem (mp : pred T) = mp). Proof. by case: mp. Qed. End PredicateSimplification. (** Qualifiers and keyed predicates. **) Variant qualifier (q : nat) T := Qualifier of {pred T}. Coercion has_quality n T (q : qualifier n T) : {pred T} := fun x => let: Qualifier _ p := q in p x. Arguments has_quality n {T}. Lemma qualifE n T p x : (x \in @Qualifier n T p) = p x. Proof. by []. Qed. Notation "x \is A" := (x \in has_quality 0 A) (only parsing) : bool_scope. Notation "x \is A" := (x \in has_quality 0 A) (only printing) : bool_scope. Notation "x \is 'a' A" := (x \in has_quality 1 A) (only parsing) : bool_scope. Notation "x \is 'a' A" := (x \in has_quality 1 A) (only printing) : bool_scope. Notation "x \is 'an' A" := (x \in has_quality 2 A) (only parsing) : bool_scope. Notation "x \is 'an' A" := (x \in has_quality 2 A) (only printing) : bool_scope. Notation "x \isn't A" := (x \notin has_quality 0 A) : bool_scope. Notation "x \isn't 'a' A" := (x \notin has_quality 1 A) : bool_scope. Notation "x \isn't 'an' A" := (x \notin has_quality 2 A) : bool_scope. Notation "[ 'qualify' x | P ]" := (Qualifier 0 (fun x => P%B)) : form_scope. Notation "[ 'qualify' x : T | P ]" := (Qualifier 0 (fun x : T => P%B)) (only parsing) : form_scope. Notation "[ 'qualify' 'a' x | P ]" := (Qualifier 1 (fun x => P%B)) : form_scope. Notation "[ 'qualify' 'a' x : T | P ]" := (Qualifier 1 (fun x : T => P%B)) (only parsing) : form_scope. Notation "[ 'qualify' 'an' x | P ]" := (Qualifier 2 (fun x => P%B)) : form_scope. Notation "[ 'qualify' 'an' x : T | P ]" := (Qualifier 2 (fun x : T => P%B)) (only parsing) : form_scope. (** Keyed predicates: support for property-bearing predicate interfaces. **) Section KeyPred. Variable T : Type. #[universes(template)] Variant pred_key (p : {pred T}) := DefaultPredKey. Variable p : {pred T}. Structure keyed_pred (k : pred_key p) := PackKeyedPred {unkey_pred :> {pred T}; _ : unkey_pred =i p}. Variable k : pred_key p. Definition KeyedPred := @PackKeyedPred k p (frefl _). Variable k_p : keyed_pred k. Lemma keyed_predE : k_p =i p. Proof. by case: k_p. Qed. (** Instances that strip the mem cast; the first one has "pred_of_mem" as its projection head value, while the second has "pred_of_simpl". The latter has the side benefit of preempting accidental misdeclarations. Note: pred_of_mem is the registered mem >-> pred_sort coercion, while [simpl_of_mem; pred_of_simpl] is the mem >-> pred >=> Funclass coercion. We must write down the coercions explicitly as the Canonical head constant computation does not strip casts. **) Canonical keyed_mem := @PackKeyedPred k (pred_of_mem (mem k_p)) keyed_predE. Canonical keyed_mem_simpl := @PackKeyedPred k (pred_of_simpl (mem k_p)) keyed_predE. End KeyPred. Local Notation in_unkey x S := (x \in @unkey_pred _ S _ _) (only parsing). Notation "x \in S" := (in_unkey x S) (only printing) : bool_scope. Section KeyedQualifier. Variables (T : Type) (n : nat) (q : qualifier n T). Structure keyed_qualifier (k : pred_key q) := PackKeyedQualifier {unkey_qualifier; _ : unkey_qualifier = q}. Definition KeyedQualifier k := PackKeyedQualifier k (erefl q). Variables (k : pred_key q) (k_q : keyed_qualifier k). Fact keyed_qualifier_suproof : unkey_qualifier k_q =i q. Proof. by case: k_q => /= _ ->. Qed. Canonical keyed_qualifier_keyed := PackKeyedPred k keyed_qualifier_suproof. End KeyedQualifier. Notation "x \is A" := (in_unkey x (has_quality 0 A)) (only printing) : bool_scope. Notation "x \is 'a' A" := (in_unkey x (has_quality 1 A)) (only printing) : bool_scope. Notation "x \is 'an' A" := (in_unkey x (has_quality 2 A)) (only printing) : bool_scope. Module DefaultKeying. Canonical default_keyed_pred T p := KeyedPred (@DefaultPredKey T p). Canonical default_keyed_qualifier T n (q : qualifier n T) := KeyedQualifier (DefaultPredKey q). End DefaultKeying. (** Skolemizing with conditions. **) Lemma all_tag_cond_dep I T (C : pred I) U : (forall x, T x) -> (forall x, C x -> {y : T x & U x y}) -> {f : forall x, T x & forall x, C x -> U x (f x)}. Proof. move=> f0 fP; apply: all_tag (fun x y => C x -> U x y) _ => x. by case Cx: (C x); [case/fP: Cx => y; exists y | exists (f0 x)]. Qed. Lemma all_tag_cond I T (C : pred I) U : T -> (forall x, C x -> {y : T & U x y}) -> {f : I -> T & forall x, C x -> U x (f x)}. Proof. by move=> y0; apply: all_tag_cond_dep. Qed. Lemma all_sig_cond_dep I T (C : pred I) P : (forall x, T x) -> (forall x, C x -> {y : T x | P x y}) -> {f : forall x, T x | forall x, C x -> P x (f x)}. Proof. by move=> f0 /(all_tag_cond_dep f0)[f]; exists f. Qed. Lemma all_sig_cond I T (C : pred I) P : T -> (forall x, C x -> {y : T | P x y}) -> {f : I -> T | forall x, C x -> P x (f x)}. Proof. by move=> y0; apply: all_sig_cond_dep. Qed. Section RelationProperties. (** Caveat: reflexive should not be used to state lemmas, as auto and trivial will not expand the constant. **) Variable T : Type. Variable R : rel T. Definition total := forall x y, R x y || R y x. Definition transitive := forall y x z, R x y -> R y z -> R x z. Definition symmetric := forall x y, R x y = R y x. Definition antisymmetric := forall x y, R x y && R y x -> x = y. Definition pre_symmetric := forall x y, R x y -> R y x. Lemma symmetric_from_pre : pre_symmetric -> symmetric. Proof. by move=> symR x y; apply/idP/idP; apply: symR. Qed. Definition reflexive := forall x, R x x. Definition irreflexive := forall x, R x x = false. Definition left_transitive := forall x y, R x y -> R x =1 R y. Definition right_transitive := forall x y, R x y -> R^~ x =1 R^~ y. Section PER. Hypotheses (symR : symmetric) (trR : transitive). Lemma sym_left_transitive : left_transitive. Proof. by move=> x y Rxy z; apply/idP/idP; apply: trR; rewrite // symR. Qed. Lemma sym_right_transitive : right_transitive. Proof. by move=> x y /sym_left_transitive Rxy z; rewrite !(symR z) Rxy. Qed. End PER. (** We define the equivalence property with prenex quantification so that it can be localized using the {in ..., ..} form defined below. **) Definition equivalence_rel := forall x y z, R z z * (R x y -> R x z = R y z). Lemma equivalence_relP : equivalence_rel <-> reflexive /\ left_transitive. Proof. split=> [eqiR | [Rxx trR] x y z]; last by split=> [|/trR->]. by split=> [x | x y Rxy z]; [rewrite (eqiR x x x) | rewrite (eqiR x y z)]. Qed. End RelationProperties. Lemma rev_trans T (R : rel T) : transitive R -> transitive (fun x y => R y x). Proof. by move=> trR x y z Ryx Rzy; apply: trR Rzy Ryx. Qed. (** Property localization **) Local Notation "{ 'all1' P }" := (forall x, P x : Prop) (at level 0). Local Notation "{ 'all2' P }" := (forall x y, P x y : Prop) (at level 0). Local Notation "{ 'all3' P }" := (forall x y z, P x y z: Prop) (at level 0). Local Notation ph := (phantom _). Section LocalProperties. Variables T1 T2 T3 : Type. Variables (d1 : mem_pred T1) (d2 : mem_pred T2) (d3 : mem_pred T3). Local Notation ph := (phantom Prop). Definition prop_for (x : T1) P & ph {all1 P} := P x. Lemma forE x P phP : @prop_for x P phP = P x. Proof. by []. Qed. Definition prop_in1 P & ph {all1 P} := forall x, in_mem x d1 -> P x. Definition prop_in11 P & ph {all2 P} := forall x y, in_mem x d1 -> in_mem y d2 -> P x y. Definition prop_in2 P & ph {all2 P} := forall x y, in_mem x d1 -> in_mem y d1 -> P x y. Definition prop_in111 P & ph {all3 P} := forall x y z, in_mem x d1 -> in_mem y d2 -> in_mem z d3 -> P x y z. Definition prop_in12 P & ph {all3 P} := forall x y z, in_mem x d1 -> in_mem y d2 -> in_mem z d2 -> P x y z. Definition prop_in21 P & ph {all3 P} := forall x y z, in_mem x d1 -> in_mem y d1 -> in_mem z d2 -> P x y z. Definition prop_in3 P & ph {all3 P} := forall x y z, in_mem x d1 -> in_mem y d1 -> in_mem z d1 -> P x y z. Variable f : T1 -> T2. Definition prop_on1 Pf P & phantom T3 (Pf f) & ph {all1 P} := forall x, in_mem (f x) d2 -> P x. Definition prop_on2 Pf P & phantom T3 (Pf f) & ph {all2 P} := forall x y, in_mem (f x) d2 -> in_mem (f y) d2 -> P x y. End LocalProperties. Definition inPhantom := Phantom Prop. Definition onPhantom {T} P (x : T) := Phantom Prop (P x). Definition bijective_in aT rT (d : mem_pred aT) (f : aT -> rT) := exists2 g, prop_in1 d (inPhantom (cancel f g)) & prop_on1 d (Phantom _ (cancel g)) (onPhantom (cancel g) f). Definition bijective_on aT rT (cd : mem_pred rT) (f : aT -> rT) := exists2 g, prop_on1 cd (Phantom _ (cancel f)) (onPhantom (cancel f) g) & prop_in1 cd (inPhantom (cancel g f)). Notation "{ 'for' x , P }" := (prop_for x (inPhantom P)) : type_scope. Notation "{ 'in' d , P }" := (prop_in1 (mem d) (inPhantom P)) : type_scope. Notation "{ 'in' d1 & d2 , P }" := (prop_in11 (mem d1) (mem d2) (inPhantom P)) : type_scope. Notation "{ 'in' d & , P }" := (prop_in2 (mem d) (inPhantom P)) : type_scope. Notation "{ 'in' d1 & d2 & d3 , P }" := (prop_in111 (mem d1) (mem d2) (mem d3) (inPhantom P)) : type_scope. Notation "{ 'in' d1 & & d3 , P }" := (prop_in21 (mem d1) (mem d3) (inPhantom P)) : type_scope. Notation "{ 'in' d1 & d2 & , P }" := (prop_in12 (mem d1) (mem d2) (inPhantom P)) : type_scope. Notation "{ 'in' d & & , P }" := (prop_in3 (mem d) (inPhantom P)) : type_scope. Notation "{ 'on' cd , P }" := (prop_on1 (mem cd) (inPhantom P) (inPhantom P)) : type_scope. Notation "{ 'on' cd & , P }" := (prop_on2 (mem cd) (inPhantom P) (inPhantom P)) : type_scope. Local Arguments onPhantom : clear scopes. Notation "{ 'on' cd , P & g }" := (prop_on1 (mem cd) (Phantom (_ -> Prop) P) (onPhantom P g)) : type_scope. Notation "{ 'in' d , 'bijective' f }" := (bijective_in (mem d) f) : type_scope. Notation "{ 'on' cd , 'bijective' f }" := (bijective_on (mem cd) f) : type_scope. (** Weakening and monotonicity lemmas for localized predicates. Note that using these lemmas in backward reasoning will force expansion of the predicate definition, as Coq needs to expose the quantifier to apply these lemmas. We define a few specialized variants to avoid this for some of the ssrfun predicates. **) Section LocalGlobal. Variables T1 T2 T3 : predArgType. Variables (D1 : {pred T1}) (D2 : {pred T2}) (D3 : {pred T3}). Variables (d1 d1' : mem_pred T1) (d2 d2' : mem_pred T2) (d3 d3' : mem_pred T3). Variables (f f' : T1 -> T2) (g : T2 -> T1) (h : T3). Variables (P1 : T1 -> Prop) (P2 : T1 -> T2 -> Prop). Variable P3 : T1 -> T2 -> T3 -> Prop. Variable Q1 : (T1 -> T2) -> T1 -> Prop. Variable Q1l : (T1 -> T2) -> T3 -> T1 -> Prop. Variable Q2 : (T1 -> T2) -> T1 -> T1 -> Prop. Hypothesis sub1 : sub_mem d1 d1'. Hypothesis sub2 : sub_mem d2 d2'. Hypothesis sub3 : sub_mem d3 d3'. Lemma in1W : {all1 P1} -> {in D1, {all1 P1}}. Proof. by move=> ? ?. Qed. Lemma in2W : {all2 P2} -> {in D1 & D2, {all2 P2}}. Proof. by move=> ? ?. Qed. Lemma in3W : {all3 P3} -> {in D1 & D2 & D3, {all3 P3}}. Proof. by move=> ? ?. Qed. Lemma in1T : {in T1, {all1 P1}} -> {all1 P1}. Proof. by move=> ? ?; auto. Qed. Lemma in2T : {in T1 & T2, {all2 P2}} -> {all2 P2}. Proof. by move=> ? ?; auto. Qed. Lemma in3T : {in T1 & T2 & T3, {all3 P3}} -> {all3 P3}. Proof. by move=> ? ?; auto. Qed. Lemma sub_in1 (Ph : ph {all1 P1}) : prop_in1 d1' Ph -> prop_in1 d1 Ph. Proof. by move=> allP x /sub1; apply: allP. Qed. Lemma sub_in11 (Ph : ph {all2 P2}) : prop_in11 d1' d2' Ph -> prop_in11 d1 d2 Ph. Proof. by move=> allP x1 x2 /sub1 d1x1 /sub2; apply: allP. Qed. Lemma sub_in111 (Ph : ph {all3 P3}) : prop_in111 d1' d2' d3' Ph -> prop_in111 d1 d2 d3 Ph. Proof. by move=> allP x1 x2 x3 /sub1 d1x1 /sub2 d2x2 /sub3; apply: allP. Qed. Let allQ1 f'' := {all1 Q1 f''}. Let allQ1l f'' h' := {all1 Q1l f'' h'}. Let allQ2 f'' := {all2 Q2 f''}. Lemma on1W : allQ1 f -> {on D2, allQ1 f}. Proof. by move=> ? ?. Qed. Lemma on1lW : allQ1l f h -> {on D2, allQ1l f & h}. Proof. by move=> ? ?. Qed. Lemma on2W : allQ2 f -> {on D2 &, allQ2 f}. Proof. by move=> ? ?. Qed. Lemma on1T : {on T2, allQ1 f} -> allQ1 f. Proof. by move=> ? ?; auto. Qed. Lemma on1lT : {on T2, allQ1l f & h} -> allQ1l f h. Proof. by move=> ? ?; auto. Qed. Lemma on2T : {on T2 &, allQ2 f} -> allQ2 f. Proof. by move=> ? ?; auto. Qed. Lemma subon1 (Phf : ph (allQ1 f)) (Ph : ph (allQ1 f)) : prop_on1 d2' Phf Ph -> prop_on1 d2 Phf Ph. Proof. by move=> allQ x /sub2; apply: allQ. Qed. Lemma subon1l (Phf : ph (allQ1l f)) (Ph : ph (allQ1l f h)) : prop_on1 d2' Phf Ph -> prop_on1 d2 Phf Ph. Proof. by move=> allQ x /sub2; apply: allQ. Qed. Lemma subon2 (Phf : ph (allQ2 f)) (Ph : ph (allQ2 f)) : prop_on2 d2' Phf Ph -> prop_on2 d2 Phf Ph. Proof. by move=> allQ x y /sub2=> d2fx /sub2; apply: allQ. Qed. Lemma can_in_inj : {in D1, cancel f g} -> {in D1 &, injective f}. Proof. by move=> fK x y /fK{2}<- /fK{2}<- ->. Qed. Lemma canLR_in x y : {in D1, cancel f g} -> y \in D1 -> x = f y -> g x = y. Proof. by move=> fK D1y ->; rewrite fK. Qed. Lemma canRL_in x y : {in D1, cancel f g} -> x \in D1 -> f x = y -> x = g y. Proof. by move=> fK D1x <-; rewrite fK. Qed. Lemma on_can_inj : {on D2, cancel f & g} -> {on D2 &, injective f}. Proof. by move=> fK x y /fK{2}<- /fK{2}<- ->. Qed. Lemma canLR_on x y : {on D2, cancel f & g} -> f y \in D2 -> x = f y -> g x = y. Proof. by move=> fK D2fy ->; rewrite fK. Qed. Lemma canRL_on x y : {on D2, cancel f & g} -> f x \in D2 -> f x = y -> x = g y. Proof. by move=> fK D2fx <-; rewrite fK. Qed. Lemma inW_bij : bijective f -> {in D1, bijective f}. Proof. by case=> g' fK g'K; exists g' => * ? *; auto. Qed. Lemma onW_bij : bijective f -> {on D2, bijective f}. Proof. by case=> g' fK g'K; exists g' => * ? *; auto. Qed. Lemma inT_bij : {in T1, bijective f} -> bijective f. Proof. by case=> g' fK g'K; exists g' => * ? *; auto. Qed. Lemma onT_bij : {on T2, bijective f} -> bijective f. Proof. by case=> g' fK g'K; exists g' => * ? *; auto. Qed. Lemma sub_in_bij (D1' : pred T1) : {subset D1 <= D1'} -> {in D1', bijective f} -> {in D1, bijective f}. Proof. by move=> subD [g' fK g'K]; exists g' => x; move/subD; [apply: fK | apply: g'K]. Qed. Lemma subon_bij (D2' : pred T2) : {subset D2 <= D2'} -> {on D2', bijective f} -> {on D2, bijective f}. Proof. by move=> subD [g' fK g'K]; exists g' => x; move/subD; [apply: fK | apply: g'K]. Qed. Lemma in_on1P : {in D1, {on D2, allQ1 f}} <-> {in [pred x in D1 | f x \in D2], allQ1 f}. Proof. split => allf x; have := allf x; rewrite inE => Q1f; first by case/andP. by move=> ? ?; apply: Q1f; apply/andP. Qed. Lemma in_on1lP : {in D1, {on D2, allQ1l f & h}} <-> {in [pred x in D1 | f x \in D2], allQ1l f h}. Proof. split => allf x; have := allf x; rewrite inE => Q1f; first by case/andP. by move=> ? ?; apply: Q1f; apply/andP. Qed. Lemma in_on2P : {in D1 &, {on D2 &, allQ2 f}} <-> {in [pred x in D1 | f x \in D2] &, allQ2 f}. Proof. split => allf x y; have := allf x y; rewrite !inE => Q2f. by move=> /andP[? ?] /andP[? ?]; apply: Q2f. by move=> ? ? ? ?; apply: Q2f; apply/andP. Qed. Lemma on1W_in : {in D1, allQ1 f} -> {in D1, {on D2, allQ1 f}}. Proof. by move=> D1f ? /D1f. Qed. Lemma on1lW_in : {in D1, allQ1l f h} -> {in D1, {on D2, allQ1l f & h}}. Proof. by move=> D1f ? /D1f. Qed. Lemma on2W_in : {in D1 &, allQ2 f} -> {in D1 &, {on D2 &, allQ2 f}}. Proof. by move=> D1f ? ? ? ? ? ?; apply: D1f. Qed. Lemma in_on1W : allQ1 f -> {in D1, {on D2, allQ1 f}}. Proof. by move=> allf ? ? ?; apply: allf. Qed. Lemma in_on1lW : allQ1l f h -> {in D1, {on D2, allQ1l f & h}}. Proof. by move=> allf ? ? ?; apply: allf. Qed. Lemma in_on2W : allQ2 f -> {in D1 &, {on D2 &, allQ2 f}}. Proof. by move=> allf ? ? ? ? ? ?; apply: allf. Qed. Lemma on1S : (forall x, f x \in D2) -> {on D2, allQ1 f} -> allQ1 f. Proof. by move=> ? fD1 ?; apply: fD1. Qed. Lemma on1lS : (forall x, f x \in D2) -> {on D2, allQ1l f & h} -> allQ1l f h. Proof. by move=> ? fD1 ?; apply: fD1. Qed. Lemma on2S : (forall x, f x \in D2) -> {on D2 &, allQ2 f} -> allQ2 f. Proof. by move=> ? fD1 ? ?; apply: fD1. Qed. Lemma on1S_in : {homo f : x / x \in D1 >-> x \in D2} -> {in D1, {on D2, allQ1 f}} -> {in D1, allQ1 f}. Proof. by move=> fD fD1 ? ?; apply/fD1/fD. Qed. Lemma on1lS_in : {homo f : x / x \in D1 >-> x \in D2} -> {in D1, {on D2, allQ1l f & h}} -> {in D1, allQ1l f h}. Proof. by move=> fD fD1 ? ?; apply/fD1/fD. Qed. Lemma on2S_in : {homo f : x / x \in D1 >-> x \in D2} -> {in D1 &, {on D2 &, allQ2 f}} -> {in D1 &, allQ2 f}. Proof. by move=> fD fD1 ? ? ? ?; apply: fD1 => //; apply: fD. Qed. Lemma in_on1S : (forall x, f x \in D2) -> {in T1, {on D2, allQ1 f}} -> allQ1 f. Proof. by move=> fD2 fD1 ?; apply: fD1. Qed. Lemma in_on1lS : (forall x, f x \in D2) -> {in T1, {on D2, allQ1l f & h}} -> allQ1l f h. Proof. by move=> fD2 fD1 ?; apply: fD1. Qed. Lemma in_on2S : (forall x, f x \in D2) -> {in T1 &, {on D2 &, allQ2 f}} -> allQ2 f. Proof. by move=> fD2 fD1 ? ?; apply: fD1. Qed. End LocalGlobal. Arguments in_on1P {T1 T2 D1 D2 f Q1}. Arguments in_on1lP {T1 T2 T3 D1 D2 f h Q1l}. Arguments in_on2P {T1 T2 D1 D2 f Q2}. Arguments on1W_in {T1 T2 D1} D2 {f Q1}. Arguments on1lW_in {T1 T2 T3 D1} D2 {f h Q1l}. Arguments on2W_in {T1 T2 D1} D2 {f Q2}. Arguments in_on1W {T1 T2} D1 D2 {f Q1}. Arguments in_on1lW {T1 T2 T3} D1 D2 {f h Q1l}. Arguments in_on2W {T1 T2} D1 D2 {f Q2}. Arguments on1S {T1 T2} D2 {f Q1}. Arguments on1lS {T1 T2 T3} D2 {f h Q1l}. Arguments on2S {T1 T2} D2 {f Q2}. Arguments on1S_in {T1 T2 D1} D2 {f Q1}. Arguments on1lS_in {T1 T2 T3 D1} D2 {f h Q1l}. Arguments on2S_in {T1 T2 D1} D2 {f Q2}. Arguments in_on1S {T1 T2} D2 {f Q1}. Arguments in_on1lS {T1 T2 T3} D2 {f h Q1l}. Arguments in_on2S {T1 T2} D2 {f Q2}. Section in_sig. Variables T1 T2 T3 : Type. Variables (D1 : {pred T1}) (D2 : {pred T2}) (D3 : {pred T3}). Variable P1 : T1 -> Prop. Variable P2 : T1 -> T2 -> Prop. Variable P3 : T1 -> T2 -> T3 -> Prop. Lemma in1_sig : {in D1, {all1 P1}} -> forall x : sig D1, P1 (sval x). Proof. by move=> DP [x Dx]; have := DP _ Dx. Qed. Lemma in2_sig : {in D1 & D2, {all2 P2}} -> forall (x : sig D1) (y : sig D2), P2 (sval x) (sval y). Proof. by move=> DP [x Dx] [y Dy]; have := DP _ _ Dx Dy. Qed. Lemma in3_sig : {in D1 & D2 & D3, {all3 P3}} -> forall (x : sig D1) (y : sig D2) (z : sig D3), P3 (sval x) (sval y) (sval z). Proof. by move=> DP [x Dx] [y Dy] [z Dz]; have := DP _ _ _ Dx Dy Dz. Qed. End in_sig. Arguments in1_sig {T1 D1 P1}. Arguments in2_sig {T1 T2 D1 D2 P2}. Arguments in3_sig {T1 T2 T3 D1 D2 D3 P3}. Lemma sub_in2 T d d' (P : T -> T -> Prop) : sub_mem d d' -> forall Ph : ph {all2 P}, prop_in2 d' Ph -> prop_in2 d Ph. Proof. by move=> /= sub_dd'; apply: sub_in11. Qed. Lemma sub_in3 T d d' (P : T -> T -> T -> Prop) : sub_mem d d' -> forall Ph : ph {all3 P}, prop_in3 d' Ph -> prop_in3 d Ph. Proof. by move=> /= sub_dd'; apply: sub_in111. Qed. Lemma sub_in12 T1 T d1 d1' d d' (P : T1 -> T -> T -> Prop) : sub_mem d1 d1' -> sub_mem d d' -> forall Ph : ph {all3 P}, prop_in12 d1' d' Ph -> prop_in12 d1 d Ph. Proof. by move=> /= sub1 sub; apply: sub_in111. Qed. Lemma sub_in21 T T3 d d' d3 d3' (P : T -> T -> T3 -> Prop) : sub_mem d d' -> sub_mem d3 d3' -> forall Ph : ph {all3 P}, prop_in21 d' d3' Ph -> prop_in21 d d3 Ph. Proof. by move=> /= sub sub3; apply: sub_in111. Qed. Lemma equivalence_relP_in T (R : rel T) (A : pred T) : {in A & &, equivalence_rel R} <-> {in A, reflexive R} /\ {in A &, forall x y, R x y -> {in A, R x =1 R y}}. Proof. split=> [eqiR | [Rxx trR] x y z *]; last by split=> [|/trR-> //]; apply: Rxx. by split=> [x Ax|x y Ax Ay Rxy z Az]; [rewrite (eqiR x x) | rewrite (eqiR x y)]. Qed. Section MonoHomoMorphismTheory. Variables (aT rT sT : Type) (f : aT -> rT) (g : rT -> aT). Variables (aP : pred aT) (rP : pred rT) (aR : rel aT) (rR : rel rT). Lemma monoW : {mono f : x / aP x >-> rP x} -> {homo f : x / aP x >-> rP x}. Proof. by move=> hf x ax; rewrite hf. Qed. Lemma mono2W : {mono f : x y / aR x y >-> rR x y} -> {homo f : x y / aR x y >-> rR x y}. Proof. by move=> hf x y axy; rewrite hf. Qed. Hypothesis fgK : cancel g f. Lemma homoRL : {homo f : x y / aR x y >-> rR x y} -> forall x y, aR (g x) y -> rR x (f y). Proof. by move=> Hf x y /Hf; rewrite fgK. Qed. Lemma homoLR : {homo f : x y / aR x y >-> rR x y} -> forall x y, aR x (g y) -> rR (f x) y. Proof. by move=> Hf x y /Hf; rewrite fgK. Qed. Lemma homo_mono : {homo f : x y / aR x y >-> rR x y} -> {homo g : x y / rR x y >-> aR x y} -> {mono g : x y / rR x y >-> aR x y}. Proof. move=> mf mg x y; case: (boolP (rR _ _))=> [/mg //|]. by apply: contraNF=> /mf; rewrite !fgK. Qed. Lemma monoLR : {mono f : x y / aR x y >-> rR x y} -> forall x y, rR (f x) y = aR x (g y). Proof. by move=> mf x y; rewrite -{1}[y]fgK mf. Qed. Lemma monoRL : {mono f : x y / aR x y >-> rR x y} -> forall x y, rR x (f y) = aR (g x) y. Proof. by move=> mf x y; rewrite -{1}[x]fgK mf. Qed. Lemma can_mono : {mono f : x y / aR x y >-> rR x y} -> {mono g : x y / rR x y >-> aR x y}. Proof. by move=> mf x y /=; rewrite -mf !fgK. Qed. End MonoHomoMorphismTheory. Section MonoHomoMorphismTheory_in. Variables (aT rT : predArgType) (f : aT -> rT) (g : rT -> aT). Variables (aD : {pred aT}) (rD : {pred rT}). Variable (aP : pred aT) (rP : pred rT) (aR : rel aT) (rR : rel rT). Lemma monoW_in : {in aD &, {mono f : x y / aR x y >-> rR x y}} -> {in aD &, {homo f : x y / aR x y >-> rR x y}}. Proof. by move=> hf x y hx hy axy; rewrite hf. Qed. Lemma mono2W_in : {in aD, {mono f : x / aP x >-> rP x}} -> {in aD, {homo f : x / aP x >-> rP x}}. Proof. by move=> hf x hx ax; rewrite hf. Qed. Hypothesis fgK : {in rD, {on aD, cancel g & f}}. Hypothesis mem_g : {homo g : x / x \in rD >-> x \in aD}. Lemma homoRL_in : {in aD &, {homo f : x y / aR x y >-> rR x y}} -> {in rD & aD, forall x y, aR (g x) y -> rR x (f y)}. Proof. by move=> Hf x y hx hy /Hf; rewrite fgK ?mem_g// ?inE; apply. Qed. Lemma homoLR_in : {in aD &, {homo f : x y / aR x y >-> rR x y}} -> {in aD & rD, forall x y, aR x (g y) -> rR (f x) y}. Proof. by move=> Hf x y hx hy /Hf; rewrite fgK ?mem_g// ?inE; apply. Qed. Lemma homo_mono_in : {in aD &, {homo f : x y / aR x y >-> rR x y}} -> {in rD &, {homo g : x y / rR x y >-> aR x y}} -> {in rD &, {mono g : x y / rR x y >-> aR x y}}. Proof. move=> mf mg x y hx hy; case: (boolP (rR _ _))=> [/mg //|]; first exact. by apply: contraNF=> /mf; rewrite !fgK ?mem_g//; apply. Qed. Lemma monoLR_in : {in aD &, {mono f : x y / aR x y >-> rR x y}} -> {in aD & rD, forall x y, rR (f x) y = aR x (g y)}. Proof. by move=> mf x y hx hy; rewrite -{1}[y]fgK ?mem_g// mf ?mem_g. Qed. Lemma monoRL_in : {in aD &, {mono f : x y / aR x y >-> rR x y}} -> {in rD & aD, forall x y, rR x (f y) = aR (g x) y}. Proof. by move=> mf x y hx hy; rewrite -{1}[x]fgK ?mem_g// mf ?mem_g. Qed. Lemma can_mono_in : {in aD &, {mono f : x y / aR x y >-> rR x y}} -> {in rD &, {mono g : x y / rR x y >-> aR x y}}. Proof. by move=> mf x y hx hy; rewrite -mf ?mem_g// !fgK ?mem_g. Qed. End MonoHomoMorphismTheory_in. Arguments homoRL_in {aT rT f g aD rD aR rR}. Arguments homoLR_in {aT rT f g aD rD aR rR}. Arguments homo_mono_in {aT rT f g aD rD aR rR}. Arguments monoLR_in {aT rT f g aD rD aR rR}. Arguments monoRL_in {aT rT f g aD rD aR rR}. Arguments can_mono_in {aT rT f g aD rD aR rR}. Section HomoMonoMorphismFlip. Variables (aT rT : Type) (aR : rel aT) (rR : rel rT) (f : aT -> rT). Variable (aD aD' : {pred aT}). Lemma homo_sym : {homo f : x y / aR x y >-> rR x y} -> {homo f : y x / aR x y >-> rR x y}. Proof. by move=> fR y x; apply: fR. Qed. Lemma mono_sym : {mono f : x y / aR x y >-> rR x y} -> {mono f : y x / aR x y >-> rR x y}. Proof. by move=> fR y x; apply: fR. Qed. Lemma homo_sym_in : {in aD &, {homo f : x y / aR x y >-> rR x y}} -> {in aD &, {homo f : y x / aR x y >-> rR x y}}. Proof. by move=> fR y x yD xD; apply: fR. Qed. Lemma mono_sym_in : {in aD &, {mono f : x y / aR x y >-> rR x y}} -> {in aD &, {mono f : y x / aR x y >-> rR x y}}. Proof. by move=> fR y x yD xD; apply: fR. Qed. Lemma homo_sym_in11 : {in aD & aD', {homo f : x y / aR x y >-> rR x y}} -> {in aD' & aD, {homo f : y x / aR x y >-> rR x y}}. Proof. by move=> fR y x yD xD; apply: fR. Qed. Lemma mono_sym_in11 : {in aD & aD', {mono f : x y / aR x y >-> rR x y}} -> {in aD' & aD, {mono f : y x / aR x y >-> rR x y}}. Proof. by move=> fR y x yD xD; apply: fR. Qed. End HomoMonoMorphismFlip. Arguments homo_sym {aT rT} [aR rR f]. Arguments mono_sym {aT rT} [aR rR f]. Arguments homo_sym_in {aT rT} [aR rR f aD]. Arguments mono_sym_in {aT rT} [aR rR f aD]. Arguments homo_sym_in11 {aT rT} [aR rR f aD aD']. Arguments mono_sym_in11 {aT rT} [aR rR f aD aD']. Section CancelOn. Variables (aT rT : predArgType) (aD : {pred aT}) (rD : {pred rT}). Variables (f : aT -> rT) (g : rT -> aT). Lemma onW_can : cancel g f -> {on aD, cancel g & f}. Proof. by move=> fgK x xaD; apply: fgK. Qed. Lemma onW_can_in : {in rD, cancel g f} -> {in rD, {on aD, cancel g & f}}. Proof. by move=> fgK x xrD xaD; apply: fgK. Qed. Lemma in_onW_can : cancel g f -> {in rD, {on aD, cancel g & f}}. Proof. by move=> fgK x xrD xaD; apply: fgK. Qed. Lemma onS_can : (forall x, g x \in aD) -> {on aD, cancel g & f} -> cancel g f. Proof. by move=> mem_g fgK x; apply: fgK. Qed. Lemma onS_can_in : {homo g : x / x \in rD >-> x \in aD} -> {in rD, {on aD, cancel g & f}} -> {in rD, cancel g f}. Proof. by move=> mem_g fgK x x_rD; apply/fgK/mem_g. Qed. Lemma in_onS_can : (forall x, g x \in aD) -> {in rT, {on aD, cancel g & f}} -> cancel g f. Proof. by move=> mem_g fgK x; apply/fgK. Qed. End CancelOn. Arguments onW_can {aT rT} aD {f g}. Arguments onW_can_in {aT rT} aD {rD f g}. Arguments in_onW_can {aT rT} aD rD {f g}. Arguments onS_can {aT rT} aD {f g}. Arguments onS_can_in {aT rT} aD {rD f g}. Arguments in_onS_can {aT rT} aD {f g}. Section inj_can_sym_in_on. Variables (aT rT : predArgType) (aD : {pred aT}) (rD : {pred rT}). Variables (f : aT -> rT) (g : rT -> aT). Lemma inj_can_sym_in_on : {homo f : x / x \in aD >-> x \in rD} -> {in aD, {on rD, cancel f & g}} -> {in rD &, {on aD &, injective g}} -> {in rD, {on aD, cancel g & f}}. Proof. by move=> fD fK gI x x_rD gx_aD; apply: gI; rewrite ?inE ?fK ?fD. Qed. Lemma inj_can_sym_on : {in aD, cancel f g} -> {on aD &, injective g} -> {on aD, cancel g & f}. Proof. by move=> fK gI x gx_aD; apply: gI; rewrite ?inE ?fK. Qed. Lemma inj_can_sym_in : {homo f \o g : x / x \in rD} -> {on rD, cancel f & g} -> {in rD &, injective g} -> {in rD, cancel g f}. Proof. by move=> fgD fK gI x x_rD; apply: gI; rewrite ?fK ?fgD. Qed. End inj_can_sym_in_on. Arguments inj_can_sym_in_on {aT rT aD rD f g}. Arguments inj_can_sym_on {aT rT aD f g}. Arguments inj_can_sym_in {aT rT rD f g}. coq-8.15.0/theories/ssr/ssrclasses.v000066400000000000000000000027011417001151100173630ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* .doc { font-family: monospace; white-space: pre; } # **) (** Compatibility layer for [under] and [setoid_rewrite]. Note: this file does not require [ssreflect]; it is both required by [ssrsetoid] and required by [ssrunder]. Redefine [Coq.Classes.RelationClasses.Reflexive] here, so that doing [Require Import ssreflect] does not [Require Import RelationClasses], and conversely. **) Section Defs. Context {A : Type}. Class Reflexive (R : A -> A -> Prop) := reflexivity : forall x : A, R x x. End Defs. Register Reflexive as plugins.ssreflect.reflexive_type. Register reflexivity as plugins.ssreflect.reflexive_proof. #[global] Instance eq_Reflexive {A : Type} : Reflexive (@eq A) := @eq_refl A. #[global] Instance iff_Reflexive : Reflexive iff := iff_refl. coq-8.15.0/theories/ssr/ssreflect.v000066400000000000000000000762111417001151100171770ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* .doc { font-family: monospace; white-space: pre; } # **) Require Import Bool. (* For bool_scope delimiter 'bool'. *) Require Import ssrmatching. Declare ML Module "ssreflect_plugin". (** This file is the Gallina part of the ssreflect plugin implementation. Files that use the ssreflect plugin should always Require ssreflect and either Import ssreflect or Import ssreflect.SsrSyntax. Part of the contents of this file is technical and will only interest advanced developers; in addition the following are defined: #[#the str of v by f#]# == the Canonical s : str such that f s = v. #[#the str of v#]# == the Canonical s : str that coerces to v. argumentType c == the T such that c : forall x : T, P x. returnType c == the R such that c : T -> R. {type of c for s} == P s where c : forall x : T, P x. nonPropType == an interface for non-Prop Types: a nonPropType coerces to a Type, and only types that do _not_ have sort Prop are canonical nonPropType instances. This is useful for applied views (see mid-file comment). notProp T == the nonPropType instance for type T. phantom T v == singleton type with inhabitant Phantom T v. phant T == singleton type with inhabitant Phant v. =^~ r == the converse of rewriting rule r (e.g., in a rewrite multirule). unkeyed t == t, but treated as an unkeyed matching pattern by the ssreflect matching algorithm. nosimpl t == t, but on the right-hand side of Definition C := nosimpl disables expansion of C by /=. locked t == t, but locked t is not convertible to t. locked_with k t == t, but not convertible to t or locked_with k' t unless k = k' (with k : unit). Coq type-checking will be much more efficient if locked_with with a bespoke k is used for sealed definitions. unlockable v == interface for sealed constant definitions of v. Unlockable def == the unlockable that registers def : C = v. #[#unlockable of C#]# == a clone for C of the canonical unlockable for the definition of C (e.g., if it uses locked_with). #[#unlockable fun C#]# == #[#unlockable of C#]# with the expansion forced to be an explicit lambda expression. -> The usage pattern for ADT operations is: Definition foo_def x1 .. xn := big_foo_expression. Fact foo_key : unit. Proof. by #[# #]#. Qed. Definition foo := locked_with foo_key foo_def. Canonical foo_unlockable := #[#unlockable fun foo#]#. This minimizes the comparison overhead for foo, while still allowing rewrite unlock to expose big_foo_expression. Additionally we provide default intro pattern ltac views: - top of the stack actions: => /[apply] := => hyp {}/hyp => /[swap] := => x y; move: y x (also swap and preserves let bindings) => /[dup] := => x; have copy := x; move: copy x (also copies and preserves let bindings) More information about these definitions and their use can be found in the ssreflect manual, and in specific comments below. **) Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. Module SsrSyntax. (** Declare Ssr keywords: 'is' 'of' '//' '/=' and '//='. We also declare the parsing level 8, as a workaround for a notation grammar factoring problem. Arguments of application-style notations (at level 10) should be declared at level 8 rather than 9 or the camlp5 grammar will not factor properly. **) Reserved Notation "(* x 'is' y 'of' z 'isn't' // /= //= *)" (at level 8). Reserved Notation "(* 69 *)" (at level 69). (** Non ambiguous keyword to check if the SsrSyntax module is imported **) Reserved Notation "(* Use to test if 'SsrSyntax_is_Imported' *)" (at level 8). Reserved Notation "" (at level 0, n at level 0, format ""). Reserved Notation "T (* n *)" (at level 200, format "T (* n *)"). End SsrSyntax. Export SsrMatchingSyntax. Export SsrSyntax. (** Save primitive notation that will be overloaded. **) Local Notation CoqGenericIf c vT vF := (if c then vT else vF) (only parsing). Local Notation CoqGenericDependentIf c x R vT vF := (if c as x return R then vT else vF) (only parsing). Local Notation CoqCast x T := (x : T) (only parsing). (** Reserve notation that introduced in this file. **) Reserved Notation "'if' c 'then' vT 'else' vF" (at level 200, c, vT, vF at level 200). Reserved Notation "'if' c 'return' R 'then' vT 'else' vF" (at level 200, c, R, vT, vF at level 200). Reserved Notation "'if' c 'as' x 'return' R 'then' vT 'else' vF" (at level 200, c, R, vT, vF at level 200, x name). Reserved Notation "x : T" (at level 100, right associativity, format "'[hv' x '/ ' : T ']'"). Reserved Notation "T : 'Type'" (at level 100, format "T : 'Type'"). Reserved Notation "P : 'Prop'" (at level 100, format "P : 'Prop'"). Reserved Notation "[ 'the' sT 'of' v 'by' f ]" (at level 0, format "[ 'the' sT 'of' v 'by' f ]"). Reserved Notation "[ 'the' sT 'of' v ]" (at level 0, format "[ 'the' sT 'of' v ]"). Reserved Notation "{ 'type' 'of' c 'for' s }" (at level 0, format "{ 'type' 'of' c 'for' s }"). Reserved Notation "=^~ r" (at level 100, format "=^~ r"). Reserved Notation "[ 'unlockable' 'of' C ]" (at level 0, format "[ 'unlockable' 'of' C ]"). Reserved Notation "[ 'unlockable' 'fun' C ]" (at level 0, format "[ 'unlockable' 'fun' C ]"). (** To define notations for tactic in intro patterns. When "=> /t" is parsed, "t:%ssripat" is actually interpreted. **) Declare Scope ssripat_scope. Delimit Scope ssripat_scope with ssripat. (** Make the general "if" into a notation, so that we can override it below. The notations are "only parsing" because the Coq decompiler will not recognize the expansion of the boolean if; using the default printer avoids a spurious trailing %%GEN_IF. **) Declare Scope general_if_scope. Delimit Scope general_if_scope with GEN_IF. Notation "'if' c 'then' vT 'else' vF" := (CoqGenericIf c vT vF) (only parsing) : general_if_scope. Notation "'if' c 'return' R 'then' vT 'else' vF" := (CoqGenericDependentIf c c R vT vF) (only parsing) : general_if_scope. Notation "'if' c 'as' x 'return' R 'then' vT 'else' vF" := (CoqGenericDependentIf c x R vT vF) (only parsing) : general_if_scope. (** Force boolean interpretation of simple if expressions. **) Declare Scope boolean_if_scope. Delimit Scope boolean_if_scope with BOOL_IF. Notation "'if' c 'return' R 'then' vT 'else' vF" := (if c is true as c in bool return R then vT else vF) : boolean_if_scope. Notation "'if' c 'then' vT 'else' vF" := (if c%bool is true as _ in bool return _ then vT else vF) : boolean_if_scope. Notation "'if' c 'as' x 'return' R 'then' vT 'else' vF" := (if c%bool is true as x in bool return R then vT else vF) : boolean_if_scope. Open Scope boolean_if_scope. (** To allow a wider variety of notations without reserving a large number of of identifiers, the ssreflect library systematically uses "forms" to enclose complex mixfix syntax. A "form" is simply a mixfix expression enclosed in square brackets and introduced by a keyword: #[#keyword ... #]# Because the keyword follows a bracket it does not need to be reserved. Non-ssreflect libraries that do not respect the form syntax (e.g., the Coq Lists library) should be loaded before ssreflect so that their notations do not mask all ssreflect forms. **) Declare Scope form_scope. Delimit Scope form_scope with FORM. Open Scope form_scope. (** Allow overloading of the cast (x : T) syntax, put whitespace around the ":" symbol to avoid lexical clashes (and for consistency with the parsing precedence of the notation, which binds less tightly than application), and put printing boxes that print the type of a long definition on a separate line rather than force-fit it at the right margin. **) Notation "x : T" := (CoqCast x T) : core_scope. (** Allow the casual use of notations like nat * nat for explicit Type declarations. Note that (nat * nat : Type) is NOT equivalent to (nat * nat)%%type, whose inferred type is legacy type "Set". **) Notation "T : 'Type'" := (CoqCast T%type Type) (only parsing) : core_scope. (** Allow similarly Prop annotation for, e.g., rewrite multirules. **) Notation "P : 'Prop'" := (CoqCast P%type Prop) (only parsing) : core_scope. (** Constants for abstract: and #[#: name #]# intro pattern **) Definition abstract_lock := unit. Definition abstract_key := tt. Definition abstract (statement : Type) (id : nat) (lock : abstract_lock) := let: tt := lock in statement. Declare Scope ssr_scope. Notation "" := (abstract _ n _) : ssr_scope. Notation "T (* n *)" := (abstract T n abstract_key) : ssr_scope. Open Scope ssr_scope. Register abstract_lock as plugins.ssreflect.abstract_lock. Register abstract_key as plugins.ssreflect.abstract_key. Register abstract as plugins.ssreflect.abstract. (** Constants for tactic-views **) Inductive external_view : Type := tactic_view of Type. (** Syntax for referring to canonical structures: #[#the struct_type of proj_val by proj_fun#]# This form denotes the Canonical instance s of the Structure type struct_type whose proj_fun projection is proj_val, i.e., such that proj_fun s = proj_val. Typically proj_fun will be A record field accessors of struct_type, but this need not be the case; it can be, for instance, a field of a record type to which struct_type coerces; proj_val will likewise be coerced to the return type of proj_fun. In all but the simplest cases, proj_fun should be eta-expanded to allow for the insertion of implicit arguments. In the common case where proj_fun itself is a coercion, the "by" part can be omitted entirely; in this case it is inferred by casting s to the inferred type of proj_val. Obviously the latter can be fixed by using an explicit cast on proj_val, and it is highly recommended to do so when the return type intended for proj_fun is "Type", as the type inferred for proj_val may vary because of sort polymorphism (it could be Set or Prop). Note when using the #[#the _ of _ #]# form to generate a substructure from a telescopes-style canonical hierarchy (implementing inheritance with coercions), one should always project or coerce the value to the BASE structure, because Coq will only find a Canonical derived structure for the Canonical base structure -- not for a base structure that is specific to proj_value. **) Module TheCanonical. #[universes(template)] Variant put vT sT (v1 v2 : vT) (s : sT) := Put. Definition get vT sT v s (p : @put vT sT v v s) := let: Put _ _ _ := p in s. Definition get_by vT sT of sT -> vT := @get vT sT. End TheCanonical. Import TheCanonical. (* Note: no export. *) Local Arguments get_by _%type_scope _%type_scope _ _ _ _. Notation "[ 'the' sT 'of' v 'by' f ]" := (@get_by _ sT f _ _ ((fun v' (s : sT) => Put v' (f s) s) v _)) (only parsing) : form_scope. Notation "[ 'the' sT 'of' v ]" := (get ((fun s : sT => Put v (*coerce*) s s) _)) (only parsing) : form_scope. (** The following are "format only" versions of the above notations. We need to do this to prevent the formatter from being be thrown off by application collapsing, coercion insertion and beta reduction in the right hand side of the notations above. **) Notation "[ 'the' sT 'of' v 'by' f ]" := (@get_by _ sT f v _ _) (only printing) : form_scope. Notation "[ 'the' sT 'of' v ]" := (@get _ sT v _ _) (only printing) : form_scope. (** We would like to recognize Notation " #[# 'the' sT 'of' v : 'Type' #]#" := (@get Type sT v _ _) (at level 0, format " #[# 'the' sT 'of' v : 'Type' #]#") : form_scope. **) (** Helper notation for canonical structure inheritance support. This is a workaround for the poor interaction between delta reduction and canonical projections in Coq's unification algorithm, by which transparent definitions hide canonical instances, i.e., in Canonical a_type_struct := @Struct a_type ... Definition my_type := a_type. my_type doesn't effectively inherit the struct structure from a_type. Our solution is to redeclare the instance as follows Canonical my_type_struct := Eval hnf in #[#struct of my_type#]#. The special notation #[#str of _ #]# must be defined for each Structure "str" with constructor "Str", typically as follows Definition clone_str s := let: Str _ x y ... z := s return {type of Str for s} -> str in fun k => k _ x y ... z. Notation " #[# 'str' 'of' T 'for' s #]#" := (@clone_str s (@Str T)) (at level 0, format " #[# 'str' 'of' T 'for' s #]#") : form_scope. Notation " #[# 'str' 'of' T #]#" := (repack_str (fun x => @Str T x)) (at level 0, format " #[# 'str' 'of' T #]#") : form_scope. The notation for the match return predicate is defined below; the eta expansion in the second form serves both to distinguish it from the first and to avoid the delta reduction problem. There are several variations on the notation and the definition of the the "clone" function, for telescopes, mixin classes, and join (multiple inheritance) classes. We describe a different idiom for clones in ssrfun; it uses phantom types (see below) and static unification; see fintype and ssralg for examples. **) Definition argumentType T P & forall x : T, P x := T. Definition dependentReturnType T P & forall x : T, P x := P. Definition returnType aT rT & aT -> rT := rT. Notation "{ 'type' 'of' c 'for' s }" := (dependentReturnType c s) : type_scope. (** A generic "phantom" type (actually, a unit type with a phantom parameter). This type can be used for type definitions that require some Structure on one of their parameters, to allow Coq to infer said structure so it does not have to be supplied explicitly or via the " #[#the _ of _ #]#" notation (the latter interacts poorly with other Notation). The definition of a (co)inductive type with a parameter p : p_type, that needs to use the operations of a structure Structure p_str : Type := p_Str {p_repr :> p_type; p_op : p_repr -> ...} should be given as Inductive indt_type (p : p_str) := Indt ... . Definition indt_of (p : p_str) & phantom p_type p := indt_type p. Notation "{ 'indt' p }" := (indt_of (Phantom p)). Definition indt p x y ... z : {indt p} := @Indt p x y ... z. Notation " #[# 'indt' x y ... z #]#" := (indt x y ... z). That is, the concrete type and its constructor should be shadowed by definitions that use a phantom argument to infer and display the true value of p (in practice, the "indt" constructor often performs additional functions, like "locking" the representation -- see below). We also define a simpler version ("phant" / "Phant") of phantom for the common case where p_type is Type. **) #[universes(template)] Variant phantom T (p : T) := Phantom. Arguments phantom : clear implicits. Arguments Phantom : clear implicits. #[universes(template)] Variant phant (p : Type) := Phant. (** Internal tagging used by the implementation of the ssreflect elim. **) Definition protect_term (A : Type) (x : A) : A := x. Register protect_term as plugins.ssreflect.protect_term. (** The ssreflect idiom for a non-keyed pattern: - unkeyed t will match any subterm that unifies with t, regardless of whether it displays the same head symbol as t. - unkeyed t a b will match any application of a term f unifying with t, to two arguments unifying with with a and b, respectively, regardless of apparent head symbols. - unkeyed x where x is a variable will match any subterm with the same type as x (when x would raise the 'indeterminate pattern' error). **) Notation unkeyed x := (let flex := x in flex). (** Ssreflect converse rewrite rule rule idiom. **) Definition ssr_converse R (r : R) := (Logic.I, r). Notation "=^~ r" := (ssr_converse r) : form_scope. (** Term tagging (user-level). The ssreflect library uses four strengths of term tagging to restrict convertibility during type checking: nosimpl t simplifies to t EXCEPT in a definition; more precisely, given Definition foo := nosimpl bar, foo (or foo t') will NOT be expanded by the /= and //= switches unless it is in a forcing context (e.g., in match foo t' with ... end, foo t' will be reduced if this allows the match to be reduced). Note that nosimpl bar is simply notation for a a term that beta-iota reduces to bar; hence rewrite /foo will replace foo by bar, and rewrite -/foo will replace bar by foo. CAVEAT: nosimpl should not be used inside a Section, because the end of section "cooking" removes the iota redex. locked t is provably equal to t, but is not convertible to t; 'locked' provides support for selective rewriting, via the lock t : t = locked t Lemma, and the ssreflect unlock tactic. locked_with k t is equal but not convertible to t, much like locked t, but supports explicit tagging with a value k : unit. This is used to mitigate a flaw in the term comparison heuristic of the Coq kernel, which treats all terms of the form locked t as equal and compares their arguments recursively, leading to an exponential blowup of comparison. For this reason locked_with should be used rather than locked when defining ADT operations. The unlock tactic does not support locked_with but the unlock rewrite rule does, via the unlockable interface. we also use Module Type ascription to create truly opaque constants, because simple expansion of constants to reveal an unreducible term doubles the time complexity of a negative comparison. Such opaque constants can be expanded generically with the unlock rewrite rule. See the definition of card and subset in fintype for examples of this. **) Notation nosimpl t := (let: tt := tt in t). Lemma master_key : unit. Proof. exact tt. Qed. Definition locked A := let: tt := master_key in fun x : A => x. Register master_key as plugins.ssreflect.master_key. Register locked as plugins.ssreflect.locked. Lemma lock A x : x = locked x :> A. Proof. unlock; reflexivity. Qed. (** Needed for locked predicates, in particular for eqType's. **) Lemma not_locked_false_eq_true : locked false <> true. Proof. unlock; discriminate. Qed. (** The basic closing tactic "done". **) Ltac done := trivial; hnf; intros; solve [ do ![solve [trivial | apply: sym_equal; trivial] | discriminate | contradiction | split] | case not_locked_false_eq_true; assumption | match goal with H : ~ _ |- _ => solve [case H; trivial] end ]. (** Quicker done tactic not including split, syntax: /0/ **) Ltac ssrdone0 := trivial; hnf; intros; solve [ do ![solve [trivial | apply: sym_equal; trivial] | discriminate | contradiction ] | case not_locked_false_eq_true; assumption | match goal with H : ~ _ |- _ => solve [case H; trivial] end ]. (** To unlock opaque constants. **) #[universes(template)] Structure unlockable T v := Unlockable {unlocked : T; _ : unlocked = v}. Lemma unlock T x C : @unlocked T x C = x. Proof. by case: C. Qed. Notation "[ 'unlockable' 'of' C ]" := (@Unlockable _ _ C (unlock _)) : form_scope. Notation "[ 'unlockable' 'fun' C ]" := (@Unlockable _ (fun _ => _) C (unlock _)) : form_scope. (** Generic keyed constant locking. **) (** The argument order ensures that k is always compared before T. **) Definition locked_with k := let: tt := k in fun T x => x : T. (** This can be used as a cheap alternative to cloning the unlockable instance below, but with caution as unkeyed matching can be expensive. **) Lemma locked_withE T k x : unkeyed (locked_with k x) = x :> T. Proof. by case: k. Qed. (** Intensionaly, this instance will not apply to locked u. **) Canonical locked_with_unlockable T k x := @Unlockable T x (locked_with k x) (locked_withE k x). (** More accurate variant of unlock, and safer alternative to locked_withE. **) Lemma unlock_with T k x : unlocked (locked_with_unlockable k x) = x :> T. Proof. exact: unlock. Qed. (** The internal lemmas for the have tactics. **) Definition ssr_have Plemma Pgoal (step : Plemma) rest : Pgoal := rest step. Arguments ssr_have Plemma [Pgoal]. Definition ssr_have_let Pgoal Plemma step (rest : let x : Plemma := step in Pgoal) : Pgoal := rest. Arguments ssr_have_let [Pgoal]. Register ssr_have as plugins.ssreflect.ssr_have. Register ssr_have_let as plugins.ssreflect.ssr_have_let. Definition ssr_suff Plemma Pgoal step (rest : Plemma) : Pgoal := step rest. Arguments ssr_suff Plemma [Pgoal]. Definition ssr_wlog := ssr_suff. Arguments ssr_wlog Plemma [Pgoal]. Register ssr_suff as plugins.ssreflect.ssr_suff. Register ssr_wlog as plugins.ssreflect.ssr_wlog. (** Internal N-ary congruence lemmas for the congr tactic. **) Fixpoint nary_congruence_statement (n : nat) : (forall B, (B -> B -> Prop) -> Prop) -> Prop := match n with | O => fun k => forall B, k B (fun x1 x2 : B => x1 = x2) | S n' => let k' A B e (f1 f2 : A -> B) := forall x1 x2, x1 = x2 -> (e (f1 x1) (f2 x2) : Prop) in fun k => forall A, nary_congruence_statement n' (fun B e => k _ (k' A B e)) end. Lemma nary_congruence n (k := fun B e => forall y : B, (e y y : Prop)) : nary_congruence_statement n k. Proof. have: k _ _ := _; rewrite {1}/k. elim: n k => [|n IHn] k k_P /= A; first exact: k_P. by apply: IHn => B e He; apply: k_P => f x1 x2 <-. Qed. Lemma ssr_congr_arrow Plemma Pgoal : Plemma = Pgoal -> Plemma -> Pgoal. Proof. by move->. Qed. Arguments ssr_congr_arrow : clear implicits. Register nary_congruence as plugins.ssreflect.nary_congruence. Register ssr_congr_arrow as plugins.ssreflect.ssr_congr_arrow. (** View lemmas that don't use reflection. **) Section ApplyIff. Variables P Q : Prop. Hypothesis eqPQ : P <-> Q. Lemma iffLR : P -> Q. Proof. by case: eqPQ. Qed. Lemma iffRL : Q -> P. Proof. by case: eqPQ. Qed. Lemma iffLRn : ~P -> ~Q. Proof. by move=> nP tQ; case: nP; case: eqPQ tQ. Qed. Lemma iffRLn : ~Q -> ~P. Proof. by move=> nQ tP; case: nQ; case: eqPQ tP. Qed. End ApplyIff. Hint View for move/ iffLRn|2 iffRLn|2 iffLR|2 iffRL|2. Hint View for apply/ iffRLn|2 iffLRn|2 iffRL|2 iffLR|2. (** To focus non-ssreflect tactics on a subterm, eg vm_compute. Usage: elim/abstract_context: (pattern) => G defG. vm_compute; rewrite {}defG {G}. Note that vm_cast are not stored in the proof term for reductions occurring in the context, hence set here := pattern; vm_compute in (value of here) blows up at Qed time. **) Lemma abstract_context T (P : T -> Type) x : (forall Q, Q = P -> Q x) -> P x. Proof. by move=> /(_ P); apply. Qed. (*****************************************************************************) (* Material for under/over (to rewrite under binders using "context lemmas") *) Require Export ssrunder. #[global] Hint Extern 0 (@Under_rel.Over_rel _ _ _ _) => solve [ apply: Under_rel.over_rel_done ] : core. #[global] Hint Resolve Under_rel.over_rel_done : core. Register Under_rel.Under_rel as plugins.ssreflect.Under_rel. Register Under_rel.Under_rel_from_rel as plugins.ssreflect.Under_rel_from_rel. (** Closing rewrite rule *) Definition over := over_rel. (** Closing tactic *) Ltac over := by [ apply: Under_rel.under_rel_done | rewrite over ]. (** Convenience rewrite rule to unprotect evars, e.g., to instantiate them in another way than with reflexivity. *) Definition UnderE := Under_relE. (*****************************************************************************) (** An interface for non-Prop types; used to avoid improper instantiation of polymorphic lemmas with on-demand implicits when they are used as views. For example: Some_inj {T} : forall x y : T, Some x = Some y -> x = y. Using move/Some_inj on a goal of the form Some n = Some 0 will fail: SSReflect will interpret the view as @Some_inj ?T _top_assumption_ since this is the well-typed application of the view with the minimal number of inserted evars (taking ?T := Some n = Some 0), and then will later complain that it cannot erase _top_assumption_ after having abstracted the viewed assumption. Making x and y maximal implicits would avoid this and force the intended @Some_inj nat x y _top_assumption_ interpretation, but is undesirable as it makes it harder to use Some_inj with the many SSReflect and MathComp lemmas that have an injectivity premise. Specifying {T : nonPropType} solves this more elegantly, as then (?T : Type) no longer unifies with (Some n = Some 0), which has sort Prop. **) Module NonPropType. (** Implementation notes: We rely on three interface Structures: - test_of r, the middle structure, performs the actual check: it has two canonical instances whose 'condition' projection are maybeProj (?P : Prop) and tt, and which set r := true and r := false, respectively. Unifying condition (?t : test_of ?r) with maybeProj T will thus set ?r to true if T is in Prop as the test_Prop T instance will apply, and otherwise simplify maybeProp T to tt and use the test_negative instance and set ?r to false. - call_of c r sets up a call to test_of on condition c with expected result r. It has a default instance for its 'callee' projection to Type, which sets c := maybeProj T and r := false when unifying with a type T. - type is a telescope on call_of c r, which checks that unifying test_of ?r1 with c indeed sets ?r1 := r; the type structure bundles the 'test' instance and its 'result' value along with its call_of c r projection. The default instance essentially provides eta-expansion for 'type'. This is only essential for the first 'result' projection to bool; using the instance for other projection merely avoids spurious delta expansions that would spoil the notProp T notation. In detail, unifying T =~= ?S with ?S : nonPropType, i.e., (1) T =~= @callee (@condition (result ?S) (test ?S)) (result ?S) (frame ?S) first uses the default call instance with ?T := T to reduce (1) to (2a) @condition (result ?S) (test ?S) =~= maybeProp T (3) result ?S =~= false (4) frame ?S =~= call T along with some trivial universe-related checks which are irrelevant here. Then the unification tries to use the test_Prop instance to reduce (2a) to (6a) result ?S =~= true (7a) ?P =~= T with ?P : Prop (8a) test ?S =~= test_Prop ?P Now the default 'check' instance with ?result := true resolves (6a) as (9a) ?S := @check true ?test ?frame Then (7a) can be solved precisely if T has sort at most (hence exactly) Prop, and then (8a) is solved by the check instance, yielding ?test := test_Prop T, and completing the solution of (2a), and _committing_ to it. But now (3) is inconsistent with (9a), and this makes the entire problem (1) fails. If on the other hand T does not have sort Prop then (7a) fails and the unification resorts to delta expanding (2a), which gives (2b) @condition (result ?S) (test ?S) =~= tt which is then reduced, using the test_negative instance, to (6b) result ?S =~= false (8b) test ?S =~= test_negative Both are solved using the check default instance, as in the (2a) branch, giving (9b) ?S := @check false test_negative ?frame Then (3) and (4) are similarly solved using check, giving the final assignment (9) ?S := notProp T Observe that we _must_ perform the actual test unification on the arguments of the initial canonical instance, and not on the instance itself as we do in mathcomp/matrix and mathcomp/vector, because we want the unification to fail when T has sort Prop. If both the test_of _and_ the result check unifications were done as part of the structure telescope then the latter would be a sub-problem of the former, and thus failing the check would merely make the test_of unification backtrack and delta-expand and we would not get failure. **) Structure call_of (condition : unit) (result : bool) := Call {callee : Type}. Definition maybeProp (T : Type) := tt. Definition call T := Call (maybeProp T) false T. Structure test_of (result : bool) := Test {condition :> unit}. Definition test_Prop (P : Prop) := Test true (maybeProp P). Definition test_negative := Test false tt. Structure type := Check {result : bool; test : test_of result; frame : call_of test result}. Definition check result test frame := @Check result test frame. Module Exports. Canonical call. Canonical test_Prop. Canonical test_negative. Canonical check. Notation nonPropType := type. Coercion callee : call_of >-> Sortclass. Coercion frame : type >-> call_of. Notation notProp T := (@check false test_negative (call T)). End Exports. End NonPropType. Export NonPropType.Exports. Module Export ipat. Notation "'[' 'apply' ']'" := (ltac:(let f := fresh "_top_" in move=> f {}/f)) (at level 0, only parsing) : ssripat_scope. (* we try to preserve the naming by matching the names from the goal *) (* we do move to perform a hnf before trying to match *) Notation "'[' 'swap' ']'" := (ltac:(move; let x := lazymatch goal with | |- forall (x : _), _ => fresh x | |- let x := _ in _ => fresh x | _ => fresh "_top_" end in intro x; move; let y := lazymatch goal with | |- forall (y : _), _ => fresh y | |- let y := _ in _ => fresh y | _ => fresh "_top_" end in intro y; revert x; revert y)) (at level 0, only parsing) : ssripat_scope. (* we try to preserve the naming by matching the names from the goal *) (* we do move to perform a hnf before trying to match *) Notation "'[' 'dup' ']'" := (ltac:(move; lazymatch goal with | |- forall (x : _), _ => let x := fresh x in intro x; let copy := fresh x in have copy := x; revert x; revert copy | |- let x := _ in _ => let x := fresh x in intro x; let copy := fresh x in pose copy := x; do [unfold x in (value of copy)]; revert x; revert copy | |- _ => let x := fresh "_top_" in move=> x; let copy := fresh "_top" in have copy := x; revert x; revert copy end)) (at level 0, only parsing) : ssripat_scope. End ipat. coq-8.15.0/theories/ssr/ssrfun.v000066400000000000000000001016561417001151100165270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* .doc { font-family: monospace; white-space: pre; } # **) Require Import ssreflect. (** This file contains the basic definitions and notations for working with functions. The definitions provide for: - Pair projections: p.1 == first element of a pair p.2 == second element of a pair These notations also apply to p : P /\ Q, via an and >-> pair coercion. - Simplifying functions, beta-reduced by /= and simpl: #[#fun : T => E#]# == constant function from type T that returns E #[#fun x => E#]# == unary function #[#fun x : T => E#]# == unary function with explicit domain type #[#fun x y => E#]# == binary function #[#fun x y : T => E#]# == binary function with common domain type #[#fun (x : T) y => E#]# \ #[#fun (x : xT) (y : yT) => E#]# | == binary function with (some) explicit, #[#fun x (y : T) => E#]# / independent domain types for each argument - Partial functions using option type: oapp f d ox == if ox is Some x returns f x, d otherwise odflt d ox == if ox is Some x returns x, d otherwise obind f ox == if ox is Some x returns f x, None otherwise omap f ox == if ox is Some x returns Some (f x), None otherwise - Singleton types: all_equal_to x0 == x0 is the only value in its type, so any such value can be rewritten to x0. - A generic wrapper type: wrapped T == the inductive type with values Wrap x for x : T. unwrap w == the projection of w : wrapped T on T. wrap x == the canonical injection of x : T into wrapped T; it is equivalent to Wrap x, but is declared as a (default) Canonical Structure, which lets the Coq HO unification automatically expand x into unwrap (wrap x). The delta reduction of wrap x to Wrap can be exploited to introduce controlled nondeterminism in Canonical Structure inference, as in the implementation of the mxdirect predicate in matrix.v. - The empty type: void == a notation for the Empty_set type of the standard library. of_void T == the canonical injection void -> T. - Sigma types: tag w == the i of w : {i : I & T i}. tagged w == the T i component of w : {i : I & T i}. Tagged T x == the {i : I & T i} with component x : T i. tag2 w == the i of w : {i : I & T i & U i}. tagged2 w == the T i component of w : {i : I & T i & U i}. tagged2' w == the U i component of w : {i : I & T i & U i}. Tagged2 T U x y == the {i : I & T i} with components x : T i and y : U i. sval u == the x of u : {x : T | P x}. s2val u == the x of u : {x : T | P x & Q x}. The properties of sval u, s2val u are given by lemmas svalP, s2valP, and s2valP'. We provide coercions sigT2 >-> sigT and sig2 >-> sig >-> sigT. A suite of lemmas (all_sig, ...) let us skolemize sig, sig2, sigT, sigT2 and pair, e.g., have /all_sig#[#f fP#]# (x : T): {y : U | P y} by ... yields an f : T -> U such that fP : forall x, P (f x). - Identity functions: id == NOTATION for the explicit identity function fun x => x. @id T == notation for the explicit identity at type T. idfun == an expression with a head constant, convertible to id; idfun x simplifies to x. @idfun T == the expression above, specialized to type T. phant_id x y == the function type phantom _ x -> phantom _ y. *** In addition to their casual use in functional programming, identity functions are often used to trigger static unification as part of the construction of dependent Records and Structures. For example, if we need a structure sT over a type T, we take as arguments T, sT, and a "dummy" function T -> sort sT: Definition foo T sT & T -> sort sT := ... We can avoid specifying sT directly by calling foo (@id T), or specify the call completely while still ensuring the consistency of T and sT, by calling @foo T sT idfun. The phant_id type allows us to extend this trick to non-Type canonical projections. It also allows us to sidestep dependent type constraints when building explicit records, e.g., given Record r := R {x; y : T(x)}. if we need to build an r from a given y0 while inferring some x0, such that y0 : T(x0), we pose Definition mk_r .. y .. (x := ...) y' & phant_id y y' := R x y'. Calling @mk_r .. y0 .. id will cause Coq to use y' := y0, while checking the dependent type constraint y0 : T(x0). - Extensional equality for functions and relations (i.e. functions of two arguments): f1 =1 f2 == f1 x is equal to f2 x for all x. f1 =1 f2 :> A == ... and f2 is explicitly typed. f1 =2 f2 == f1 x y is equal to f2 x y for all x y. f1 =2 f2 :> A == ... and f2 is explicitly typed. - Composition for total and partial functions: f^~ y == function f with second argument specialised to y, i.e., fun x => f x y CAVEAT: conditional (non-maximal) implicit arguments of f are NOT inserted in this context @^~ x == application at x, i.e., fun f => f x #[#eta f#]# == the explicit eta-expansion of f, i.e., fun x => f x CAVEAT: conditional (non-maximal) implicit arguments of f are NOT inserted in this context. fun=> v := the constant function fun _ => v. f1 \o f2 == composition of f1 and f2. Note: (f1 \o f2) x simplifies to f1 (f2 x). f1 \; f2 == categorical composition of f1 and f2. This expands to to f2 \o f1 and (f1 \; f2) x simplifies to f2 (f1 x). pcomp f1 f2 == composition of partial functions f1 and f2. - Properties of functions: injective f <-> f is injective. cancel f g <-> g is a left inverse of f / f is a right inverse of g. pcancel f g <-> g is a left inverse of f where g is partial. ocancel f g <-> g is a left inverse of f where f is partial. bijective f <-> f is bijective (has a left and right inverse). involutive f <-> f is involutive. - Properties for operations. left_id e op <-> e is a left identity for op (e op x = x). right_id e op <-> e is a right identity for op (x op e = x). left_inverse e inv op <-> inv is a left inverse for op wrt identity e, i.e., (inv x) op x = e. right_inverse e inv op <-> inv is a right inverse for op wrt identity e i.e., x op (i x) = e. self_inverse e op <-> each x is its own op-inverse (x op x = e). idempotent op <-> op is idempotent for op (x op x = x). associative op <-> op is associative, i.e., x op (y op z) = (x op y) op z. commutative op <-> op is commutative (x op y = y op x). left_commutative op <-> op is left commutative, i.e., x op (y op z) = y op (x op z). right_commutative op <-> op is right commutative, i.e., (x op y) op z = (x op z) op y. left_zero z op <-> z is a left zero for op (z op x = z). right_zero z op <-> z is a right zero for op (x op z = z). left_distributive op1 op2 <-> op1 distributes over op2 to the left: (x op2 y) op1 z = (x op1 z) op2 (y op1 z). right_distributive op1 op2 <-> op distributes over add to the right: x op1 (y op2 z) = (x op1 z) op2 (x op1 z). interchange op1 op2 <-> op1 and op2 satisfy an interchange law: (x op2 y) op1 (z op2 t) = (x op1 z) op2 (y op1 t). Note that interchange op op is a commutativity property. left_injective op <-> op is injective in its left argument: x op y = z op y -> x = z. right_injective op <-> op is injective in its right argument: x op y = x op z -> y = z. left_loop inv op <-> op, inv obey the inverse loop left axiom: (inv x) op (x op y) = y for all x, y, i.e., op (inv x) is always a left inverse of op x rev_left_loop inv op <-> op, inv obey the inverse loop reverse left axiom: x op ((inv x) op y) = y, for all x, y. right_loop inv op <-> op, inv obey the inverse loop right axiom: (x op y) op (inv y) = x for all x, y. rev_right_loop inv op <-> op, inv obey the inverse loop reverse right axiom: (x op (inv y)) op y = x for all x, y. Note that familiar "cancellation" identities like x + y - y = x or x - y + y = x are respectively instances of right_loop and rev_right_loop The corresponding lemmas will use the K and NK/VK suffixes, respectively. - Morphisms for functions and relations: {morph f : x / a >-> r} <-> f is a morphism with respect to functions (fun x => a) and (fun x => r); if r == R#[#x#]#, this states that f a = R#[#f x#]# for all x. {morph f : x / a} <-> f is a morphism with respect to the function expression (fun x => a). This is shorthand for {morph f : x / a >-> a}; note that the two instances of a are often interpreted at different types. {morph f : x y / a >-> r} <-> f is a morphism with respect to functions (fun x y => a) and (fun x y => r). {morph f : x y / a} <-> f is a morphism with respect to the function expression (fun x y => a). {homo f : x / a >-> r} <-> f is a homomorphism with respect to the predicates (fun x => a) and (fun x => r); if r == R#[#x#]#, this states that a -> R#[#f x#]# for all x. {homo f : x / a} <-> f is a homomorphism with respect to the predicate expression (fun x => a). {homo f : x y / a >-> r} <-> f is a homomorphism with respect to the relations (fun x y => a) and (fun x y => r). {homo f : x y / a} <-> f is a homomorphism with respect to the relation expression (fun x y => a). {mono f : x / a >-> r} <-> f is monotone with respect to projectors (fun x => a) and (fun x => r); if r == R#[#x#]#, this states that R#[#f x#]# = a for all x. {mono f : x / a} <-> f is monotone with respect to the projector expression (fun x => a). {mono f : x y / a >-> r} <-> f is monotone with respect to relators (fun x y => a) and (fun x y => r). {mono f : x y / a} <-> f is monotone with respect to the relator expression (fun x y => a). The file also contains some basic lemmas for the above concepts. Lemmas relative to cancellation laws use some abbreviated suffixes: K - a cancellation rule like esymK : cancel (@esym T x y) (@esym T y x). LR - a lemma moving an operation from the left hand side of a relation to the right hand side, like canLR: cancel g f -> x = g y -> f x = y. RL - a lemma moving an operation from the right to the left, e.g., canRL. Beware that the LR and RL orientations refer to an "apply" (back chaining) usage; when using the same lemmas with "have" or "move" (forward chaining) the directions will be reversed!. **) Set Implicit Arguments. Unset Strict Implicit. Unset Printing Implicit Defensive. (** Parsing / printing declarations. *) Reserved Notation "p .1" (at level 2, left associativity, format "p .1"). Reserved Notation "p .2" (at level 2, left associativity, format "p .2"). Reserved Notation "f ^~ y" (at level 10, y at level 8, no associativity, format "f ^~ y"). Reserved Notation "@^~ x" (at level 10, x at level 8, no associativity, format "@^~ x"). Reserved Notation "[ 'eta' f ]" (at level 0, format "[ 'eta' f ]"). Reserved Notation "'fun' => E" (at level 200, format "'fun' => E"). Reserved Notation "[ 'fun' : T => E ]" (at level 0, format "'[hv' [ 'fun' : T => '/ ' E ] ']'"). Reserved Notation "[ 'fun' x => E ]" (at level 0, x name, format "'[hv' [ 'fun' x => '/ ' E ] ']'"). Reserved Notation "[ 'fun' x : T => E ]" (at level 0, x name, format "'[hv' [ 'fun' x : T => '/ ' E ] ']'"). Reserved Notation "[ 'fun' x y => E ]" (at level 0, x name, y name, format "'[hv' [ 'fun' x y => '/ ' E ] ']'"). Reserved Notation "[ 'fun' x y : T => E ]" (at level 0, x name, y name, format "'[hv' [ 'fun' x y : T => '/ ' E ] ']'"). Reserved Notation "[ 'fun' ( x : T ) y => E ]" (at level 0, x name, y name, format "'[hv' [ 'fun' ( x : T ) y => '/ ' E ] ']'"). Reserved Notation "[ 'fun' x ( y : T ) => E ]" (at level 0, x name, y name, format "'[hv' [ 'fun' x ( y : T ) => '/ ' E ] ']'"). Reserved Notation "[ 'fun' ( x : T ) ( y : U ) => E ]" (at level 0, x name, y name, format "[ 'fun' ( x : T ) ( y : U ) => E ]" ). Reserved Notation "f =1 g" (at level 70, no associativity). Reserved Notation "f =1 g :> A" (at level 70, g at next level, A at level 90). Reserved Notation "f =2 g" (at level 70, no associativity). Reserved Notation "f =2 g :> A" (at level 70, g at next level, A at level 90). Reserved Notation "f \o g" (at level 50, format "f \o '/ ' g"). Reserved Notation "f \; g" (at level 60, right associativity, format "f \; '/ ' g"). Reserved Notation "{ 'morph' f : x / a >-> r }" (at level 0, f at level 99, x name, format "{ 'morph' f : x / a >-> r }"). Reserved Notation "{ 'morph' f : x / a }" (at level 0, f at level 99, x name, format "{ 'morph' f : x / a }"). Reserved Notation "{ 'morph' f : x y / a >-> r }" (at level 0, f at level 99, x name, y name, format "{ 'morph' f : x y / a >-> r }"). Reserved Notation "{ 'morph' f : x y / a }" (at level 0, f at level 99, x name, y name, format "{ 'morph' f : x y / a }"). Reserved Notation "{ 'homo' f : x / a >-> r }" (at level 0, f at level 99, x name, format "{ 'homo' f : x / a >-> r }"). Reserved Notation "{ 'homo' f : x / a }" (at level 0, f at level 99, x name, format "{ 'homo' f : x / a }"). Reserved Notation "{ 'homo' f : x y / a >-> r }" (at level 0, f at level 99, x name, y name, format "{ 'homo' f : x y / a >-> r }"). Reserved Notation "{ 'homo' f : x y / a }" (at level 0, f at level 99, x name, y name, format "{ 'homo' f : x y / a }"). Reserved Notation "{ 'homo' f : x y /~ a }" (at level 0, f at level 99, x name, y name, format "{ 'homo' f : x y /~ a }"). Reserved Notation "{ 'mono' f : x / a >-> r }" (at level 0, f at level 99, x name, format "{ 'mono' f : x / a >-> r }"). Reserved Notation "{ 'mono' f : x / a }" (at level 0, f at level 99, x name, format "{ 'mono' f : x / a }"). Reserved Notation "{ 'mono' f : x y / a >-> r }" (at level 0, f at level 99, x name, y name, format "{ 'mono' f : x y / a >-> r }"). Reserved Notation "{ 'mono' f : x y / a }" (at level 0, f at level 99, x name, y name, format "{ 'mono' f : x y / a }"). Reserved Notation "{ 'mono' f : x y /~ a }" (at level 0, f at level 99, x name, y name, format "{ 'mono' f : x y /~ a }"). Reserved Notation "@ 'id' T" (at level 10, T at level 8, format "@ 'id' T"). Reserved Notation "@ 'sval'" (at level 10, format "@ 'sval'"). (** Syntax for defining auxiliary recursive function. Usage: Section FooDefinition. Variables (g1 : T1) (g2 : T2). (globals) Fixoint foo_auxiliary (a3 : T3) ... := body, using #[#rec e3, ... #]# for recursive calls where " #[# 'rec' a3 , a4 , ... #]#" := foo_auxiliary. Definition foo x y .. := #[#rec e1, ... #]#. + proofs about foo End FooDefinition. **) Reserved Notation "[ 'rec' a ]" (at level 0, format "[ 'rec' a ]"). Reserved Notation "[ 'rec' a , b ]" (at level 0, format "[ 'rec' a , b ]"). Reserved Notation "[ 'rec' a , b , c ]" (at level 0, format "[ 'rec' a , b , c ]"). Reserved Notation "[ 'rec' a , b , c , d ]" (at level 0, format "[ 'rec' a , b , c , d ]"). Reserved Notation "[ 'rec' a , b , c , d , e ]" (at level 0, format "[ 'rec' a , b , c , d , e ]"). Reserved Notation "[ 'rec' a , b , c , d , e , f ]" (at level 0, format "[ 'rec' a , b , c , d , e , f ]"). Reserved Notation "[ 'rec' a , b , c , d , e , f , g ]" (at level 0, format "[ 'rec' a , b , c , d , e , f , g ]"). Reserved Notation "[ 'rec' a , b , c , d , e , f , g , h ]" (at level 0, format "[ 'rec' a , b , c , d , e , f , g , h ]"). Reserved Notation "[ 'rec' a , b , c , d , e , f , g , h , i ]" (at level 0, format "[ 'rec' a , b , c , d , e , f , g , h , i ]"). Reserved Notation "[ 'rec' a , b , c , d , e , f , g , h , i , j ]" (at level 0, format "[ 'rec' a , b , c , d , e , f , g , h , i , j ]"). Declare Scope pair_scope. Delimit Scope pair_scope with PAIR. Open Scope pair_scope. (** Notations for pair/conjunction projections **) Notation "p .1" := (fst p) : pair_scope. Notation "p .2" := (snd p) : pair_scope. Coercion pair_of_and P Q (PandQ : P /\ Q) := (proj1 PandQ, proj2 PandQ). Definition all_pair I T U (w : forall i : I, T i * U i) := (fun i => (w i).1, fun i => (w i).2). (** Complements on the option type constructor, used below to encode partial functions. **) Module Option. Definition apply aT rT (f : aT -> rT) x u := if u is Some y then f y else x. Definition default T := apply (fun x : T => x). Definition bind aT rT (f : aT -> option rT) := apply f None. Definition map aT rT (f : aT -> rT) := bind (fun x => Some (f x)). End Option. Notation oapp := Option.apply. Notation odflt := Option.default. Notation obind := Option.bind. Notation omap := Option.map. Notation some := (@Some _) (only parsing). (** Shorthand for some basic equality lemmas. **) Notation erefl := refl_equal. Notation ecast i T e x := (let: erefl in _ = i := e return T in x). Definition esym := sym_eq. Definition nesym := sym_not_eq. Definition etrans := trans_eq. Definition congr1 := f_equal. Definition congr2 := f_equal2. (** Force at least one implicit when used as a view. **) Prenex Implicits esym nesym. (** A predicate for singleton types. **) Definition all_equal_to T (x0 : T) := forall x, unkeyed x = x0. Lemma unitE : all_equal_to tt. Proof. by case. Qed. (** A generic wrapper type **) #[universes(template)] Structure wrapped T := Wrap {unwrap : T}. Canonical wrap T x := @Wrap T x. Prenex Implicits unwrap wrap Wrap. Declare Scope fun_scope. Delimit Scope fun_scope with FUN. Open Scope fun_scope. (** Notations for argument transpose **) Notation "f ^~ y" := (fun x => f x y) : fun_scope. Notation "@^~ x" := (fun f => f x) : fun_scope. (** Definitions and notation for explicit functions with simplification, i.e., which simpl and /= beta expand (this is complementary to nosimpl). **) #[universes(template)] Variant simpl_fun (aT rT : Type) := SimplFun of aT -> rT. Section SimplFun. Variables aT rT : Type. Definition fun_of_simpl (f : simpl_fun aT rT) := fun x => let: SimplFun lam := f in lam x. End SimplFun. Coercion fun_of_simpl : simpl_fun >-> Funclass. Notation "[ 'fun' : T => E ]" := (SimplFun (fun _ : T => E)) : fun_scope. Notation "[ 'fun' x => E ]" := (SimplFun (fun x => E)) : fun_scope. Notation "[ 'fun' x y => E ]" := (fun x => [fun y => E]) : fun_scope. Notation "[ 'fun' x : T => E ]" := (SimplFun (fun x : T => E)) (only parsing) : fun_scope. Notation "[ 'fun' x y : T => E ]" := (fun x : T => [fun y : T => E]) (only parsing) : fun_scope. Notation "[ 'fun' ( x : T ) y => E ]" := (fun x : T => [fun y => E]) (only parsing) : fun_scope. Notation "[ 'fun' x ( y : T ) => E ]" := (fun x => [fun y : T => E]) (only parsing) : fun_scope. Notation "[ 'fun' ( x : T ) ( y : U ) => E ]" := (fun x : T => [fun y : U => E]) (only parsing) : fun_scope. (** For delta functions in eqtype.v. **) Definition SimplFunDelta aT rT (f : aT -> aT -> rT) := [fun z => f z z]. (** Extensional equality, for unary and binary functions, including syntactic sugar. **) Section ExtensionalEquality. Variables A B C : Type. Definition eqfun (f g : B -> A) : Prop := forall x, f x = g x. Definition eqrel (r s : C -> B -> A) : Prop := forall x y, r x y = s x y. Lemma frefl f : eqfun f f. Proof. by []. Qed. Lemma fsym f g : eqfun f g -> eqfun g f. Proof. by move=> eq_fg x. Qed. Lemma ftrans f g h : eqfun f g -> eqfun g h -> eqfun f h. Proof. by move=> eq_fg eq_gh x; rewrite eq_fg. Qed. Lemma rrefl r : eqrel r r. Proof. by []. Qed. End ExtensionalEquality. Global Typeclasses Opaque eqfun eqrel. #[global] Hint Resolve frefl rrefl : core. Notation "f1 =1 f2" := (eqfun f1 f2) : fun_scope. Notation "f1 =1 f2 :> A" := (f1 =1 (f2 : A)) : fun_scope. Notation "f1 =2 f2" := (eqrel f1 f2) : fun_scope. Notation "f1 =2 f2 :> A" := (f1 =2 (f2 : A)) : fun_scope. Section Composition. Variables A B C : Type. Definition comp (f : B -> A) (g : C -> B) x := f (g x). Definition catcomp g f := comp f g. Definition pcomp (f : B -> option A) (g : C -> option B) x := obind f (g x). Lemma eq_comp f f' g g' : f =1 f' -> g =1 g' -> comp f g =1 comp f' g'. Proof. by move=> eq_ff' eq_gg' x; rewrite /comp eq_gg' eq_ff'. Qed. End Composition. Arguments comp {A B C} f g x /. Arguments catcomp {A B C} g f x /. Notation "f1 \o f2" := (comp f1 f2) : fun_scope. Notation "f1 \; f2" := (catcomp f1 f2) : fun_scope. Notation "[ 'eta' f ]" := (fun x => f x) : fun_scope. Notation "'fun' => E" := (fun _ => E) : fun_scope. Notation id := (fun x => x). Notation "@ 'id' T" := (fun x : T => x) (only parsing) : fun_scope. Definition idfun T x : T := x. Arguments idfun {T} x /. Definition phant_id T1 T2 v1 v2 := phantom T1 v1 -> phantom T2 v2. (** The empty type. **) Notation void := Empty_set. Definition of_void T (x : void) : T := match x with end. (** Strong sigma types. **) Section Tag. Variables (I : Type) (i : I) (T_ U_ : I -> Type). Definition tag := projT1. Definition tagged : forall w, T_(tag w) := @projT2 I [eta T_]. Definition Tagged x := @existT I [eta T_] i x. Definition tag2 (w : @sigT2 I T_ U_) := let: existT2 _ _ i _ _ := w in i. Definition tagged2 w : T_(tag2 w) := let: existT2 _ _ _ x _ := w in x. Definition tagged2' w : U_(tag2 w) := let: existT2 _ _ _ _ y := w in y. Definition Tagged2 x y := @existT2 I [eta T_] [eta U_] i x y. End Tag. Arguments Tagged [I i]. Arguments Tagged2 [I i]. Prenex Implicits tag tagged Tagged tag2 tagged2 tagged2' Tagged2. Coercion tag_of_tag2 I T_ U_ (w : @sigT2 I T_ U_) := Tagged (fun i => T_ i * U_ i)%type (tagged2 w, tagged2' w). Lemma all_tag I T U : (forall x : I, {y : T x & U x y}) -> {f : forall x, T x & forall x, U x (f x)}. Proof. by move=> fP; exists (fun x => tag (fP x)) => x; case: (fP x). Qed. Lemma all_tag2 I T U V : (forall i : I, {y : T i & U i y & V i y}) -> {f : forall i, T i & forall i, U i (f i) & forall i, V i (f i)}. Proof. by case/all_tag=> f /all_pair[]; exists f. Qed. (** Refinement types. **) (** Prenex Implicits and renaming. **) Notation sval := (@proj1_sig _ _). Notation "@ 'sval'" := (@proj1_sig) (at level 10, format "@ 'sval'"). Section Sig. Variables (T : Type) (P Q : T -> Prop). Lemma svalP (u : sig P) : P (sval u). Proof. by case: u. Qed. Definition s2val (u : sig2 P Q) := let: exist2 _ _ x _ _ := u in x. Lemma s2valP u : P (s2val u). Proof. by case: u. Qed. Lemma s2valP' u : Q (s2val u). Proof. by case: u. Qed. End Sig. Prenex Implicits svalP s2val s2valP s2valP'. Coercion tag_of_sig I P (u : @sig I P) := Tagged P (svalP u). Coercion sig_of_sig2 I P Q (u : @sig2 I P Q) := exist (fun i => P i /\ Q i) (s2val u) (conj (s2valP u) (s2valP' u)). Lemma all_sig I T P : (forall x : I, {y : T x | P x y}) -> {f : forall x, T x | forall x, P x (f x)}. Proof. by case/all_tag=> f; exists f. Qed. Lemma all_sig2 I T P Q : (forall x : I, {y : T x | P x y & Q x y}) -> {f : forall x, T x | forall x, P x (f x) & forall x, Q x (f x)}. Proof. by case/all_sig=> f /all_pair[]; exists f. Qed. Section Morphism. Variables (aT rT sT : Type) (f : aT -> rT). (** Morphism property for unary and binary functions **) Definition morphism_1 aF rF := forall x, f (aF x) = rF (f x). Definition morphism_2 aOp rOp := forall x y, f (aOp x y) = rOp (f x) (f y). (** Homomorphism property for unary and binary relations **) Definition homomorphism_1 (aP rP : _ -> Prop) := forall x, aP x -> rP (f x). Definition homomorphism_2 (aR rR : _ -> _ -> Prop) := forall x y, aR x y -> rR (f x) (f y). (** Stability property for unary and binary relations **) Definition monomorphism_1 (aP rP : _ -> sT) := forall x, rP (f x) = aP x. Definition monomorphism_2 (aR rR : _ -> _ -> sT) := forall x y, rR (f x) (f y) = aR x y. End Morphism. Notation "{ 'morph' f : x / a >-> r }" := (morphism_1 f (fun x => a) (fun x => r)) : type_scope. Notation "{ 'morph' f : x / a }" := (morphism_1 f (fun x => a) (fun x => a)) : type_scope. Notation "{ 'morph' f : x y / a >-> r }" := (morphism_2 f (fun x y => a) (fun x y => r)) : type_scope. Notation "{ 'morph' f : x y / a }" := (morphism_2 f (fun x y => a) (fun x y => a)) : type_scope. Notation "{ 'homo' f : x / a >-> r }" := (homomorphism_1 f (fun x => a) (fun x => r)) : type_scope. Notation "{ 'homo' f : x / a }" := (homomorphism_1 f (fun x => a) (fun x => a)) : type_scope. Notation "{ 'homo' f : x y / a >-> r }" := (homomorphism_2 f (fun x y => a) (fun x y => r)) : type_scope. Notation "{ 'homo' f : x y / a }" := (homomorphism_2 f (fun x y => a) (fun x y => a)) : type_scope. Notation "{ 'homo' f : x y /~ a }" := (homomorphism_2 f (fun y x => a) (fun x y => a)) : type_scope. Notation "{ 'mono' f : x / a >-> r }" := (monomorphism_1 f (fun x => a) (fun x => r)) : type_scope. Notation "{ 'mono' f : x / a }" := (monomorphism_1 f (fun x => a) (fun x => a)) : type_scope. Notation "{ 'mono' f : x y / a >-> r }" := (monomorphism_2 f (fun x y => a) (fun x y => r)) : type_scope. Notation "{ 'mono' f : x y / a }" := (monomorphism_2 f (fun x y => a) (fun x y => a)) : type_scope. Notation "{ 'mono' f : x y /~ a }" := (monomorphism_2 f (fun y x => a) (fun x y => a)) : type_scope. (** In an intuitionistic setting, we have two degrees of injectivity. The weaker one gives only simplification, and the strong one provides a left inverse (we show in `fintype' that they coincide for finite types). We also define an intermediate version where the left inverse is only a partial function. **) Section Injections. Variables (rT aT : Type) (f : aT -> rT). Definition injective := forall x1 x2, f x1 = f x2 -> x1 = x2. Definition cancel g := forall x, g (f x) = x. Definition pcancel g := forall x, g (f x) = Some x. Definition ocancel (g : aT -> option rT) h := forall x, oapp h x (g x) = x. Lemma can_pcan g : cancel g -> pcancel (fun y => Some (g y)). Proof. by move=> fK x; congr (Some _). Qed. Lemma pcan_inj g : pcancel g -> injective. Proof. by move=> fK x y /(congr1 g); rewrite !fK => [[]]. Qed. Lemma can_inj g : cancel g -> injective. Proof. by move/can_pcan; apply: pcan_inj. Qed. Lemma canLR g x y : cancel g -> x = f y -> g x = y. Proof. by move=> fK ->. Qed. Lemma canRL g x y : cancel g -> f x = y -> x = g y. Proof. by move=> fK <-. Qed. End Injections. Lemma Some_inj {T : nonPropType} : injective (@Some T). Proof. by move=> x y []. Qed. Lemma of_voidK T : pcancel (of_void T) [fun _ => None]. Proof. by case. Qed. (** cancellation lemmas for dependent type casts. **) Lemma esymK T x y : cancel (@esym T x y) (@esym T y x). Proof. by case: y /. Qed. Lemma etrans_id T x y (eqxy : x = y :> T) : etrans (erefl x) eqxy = eqxy. Proof. by case: y / eqxy. Qed. Section InjectionsTheory. Variables (A B C : Type) (f g : B -> A) (h : C -> B). Lemma inj_id : injective (@id A). Proof. by []. Qed. Lemma inj_can_sym f' : cancel f f' -> injective f' -> cancel f' f. Proof. by move=> fK injf' x; apply: injf'. Qed. Lemma inj_comp : injective f -> injective h -> injective (f \o h). Proof. by move=> injf injh x y /injf; apply: injh. Qed. Lemma inj_compr : injective (f \o h) -> injective h. Proof. by move=> injfh x y /(congr1 f) /injfh. Qed. Lemma can_comp f' h' : cancel f f' -> cancel h h' -> cancel (f \o h) (h' \o f'). Proof. by move=> fK hK x; rewrite /= fK hK. Qed. Lemma pcan_pcomp f' h' : pcancel f f' -> pcancel h h' -> pcancel (f \o h) (pcomp h' f'). Proof. by move=> fK hK x; rewrite /pcomp fK /= hK. Qed. Lemma eq_inj : injective f -> f =1 g -> injective g. Proof. by move=> injf eqfg x y; rewrite -2!eqfg; apply: injf. Qed. Lemma eq_can f' g' : cancel f f' -> f =1 g -> f' =1 g' -> cancel g g'. Proof. by move=> fK eqfg eqfg' x; rewrite -eqfg -eqfg'. Qed. Lemma inj_can_eq f' : cancel f f' -> injective f' -> cancel g f' -> f =1 g. Proof. by move=> fK injf' gK x; apply: injf'; rewrite fK. Qed. End InjectionsTheory. Section Bijections. Variables (A B : Type) (f : B -> A). Variant bijective : Prop := Bijective g of cancel f g & cancel g f. Hypothesis bijf : bijective. Lemma bij_inj : injective f. Proof. by case: bijf => g fK _; apply: can_inj fK. Qed. Lemma bij_can_sym f' : cancel f' f <-> cancel f f'. Proof. split=> fK; first exact: inj_can_sym fK bij_inj. by case: bijf => h _ hK x; rewrite -[x]hK fK. Qed. Lemma bij_can_eq f' f'' : cancel f f' -> cancel f f'' -> f' =1 f''. Proof. by move=> fK fK'; apply: (inj_can_eq _ bij_inj); apply/bij_can_sym. Qed. End Bijections. Section BijectionsTheory. Variables (A B C : Type) (f : B -> A) (h : C -> B). Lemma eq_bij : bijective f -> forall g, f =1 g -> bijective g. Proof. by case=> f' fK f'K g eqfg; exists f'; eapply eq_can; eauto. Qed. Lemma bij_comp : bijective f -> bijective h -> bijective (f \o h). Proof. by move=> [f' fK f'K] [h' hK h'K]; exists (h' \o f'); apply: can_comp; auto. Qed. Lemma bij_can_bij : bijective f -> forall f', cancel f f' -> bijective f'. Proof. by move=> bijf; exists f; first by apply/(bij_can_sym bijf). Qed. End BijectionsTheory. Section Involutions. Variables (A : Type) (f : A -> A). Definition involutive := cancel f f. Hypothesis Hf : involutive. Lemma inv_inj : injective f. Proof. exact: can_inj Hf. Qed. Lemma inv_bij : bijective f. Proof. by exists f. Qed. End Involutions. Section OperationProperties. Variables S T R : Type. Section SopTisR. Implicit Type op : S -> T -> R. Definition left_inverse e inv op := forall x, op (inv x) x = e. Definition right_inverse e inv op := forall x, op x (inv x) = e. Definition left_injective op := forall x, injective (op^~ x). Definition right_injective op := forall y, injective (op y). End SopTisR. Section SopTisS. Implicit Type op : S -> T -> S. Definition right_id e op := forall x, op x e = x. Definition left_zero z op := forall x, op z x = z. Definition right_commutative op := forall x y z, op (op x y) z = op (op x z) y. Definition left_distributive op add := forall x y z, op (add x y) z = add (op x z) (op y z). Definition right_loop inv op := forall y, cancel (op^~ y) (op^~ (inv y)). Definition rev_right_loop inv op := forall y, cancel (op^~ (inv y)) (op^~ y). End SopTisS. Section SopTisT. Implicit Type op : S -> T -> T. Definition left_id e op := forall x, op e x = x. Definition right_zero z op := forall x, op x z = z. Definition left_commutative op := forall x y z, op x (op y z) = op y (op x z). Definition right_distributive op add := forall x y z, op x (add y z) = add (op x y) (op x z). Definition left_loop inv op := forall x, cancel (op x) (op (inv x)). Definition rev_left_loop inv op := forall x, cancel (op (inv x)) (op x). End SopTisT. Section SopSisT. Implicit Type op : S -> S -> T. Definition self_inverse e op := forall x, op x x = e. Definition commutative op := forall x y, op x y = op y x. End SopSisT. Section SopSisS. Implicit Type op : S -> S -> S. Definition idempotent op := forall x, op x x = x. Definition associative op := forall x y z, op x (op y z) = op (op x y) z. Definition interchange op1 op2 := forall x y z t, op1 (op2 x y) (op2 z t) = op2 (op1 x z) (op1 y t). End SopSisS. End OperationProperties. coq-8.15.0/theories/ssr/ssrsetoid.v000066400000000000000000000030461417001151100172200ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* .doc { font-family: monospace; white-space: pre; } # **) (** Compatibility layer for [under] and [setoid_rewrite]. This file is intended to be required by [Require Import Setoid]. In particular, we can use the [under] tactic with other relations than [eq] or [iff], e.g. a [RewriteRelation], by doing: [Require Import ssreflect. Require Setoid.] This file's instances have priority 12 > other stdlib instances. (Note: this file could be skipped when porting [under] to stdlib2.) *) Require Import ssrclasses. Require Import ssrunder. Require Import RelationClasses. Require Import Relation_Definitions. (** Reconcile [Coq.Classes.RelationClasses.Reflexive] with [Coq.ssr.ssrclasses.Reflexive] *) #[global] Instance compat_Reflexive : forall {A} {R : relation A}, RelationClasses.Reflexive R -> ssrclasses.Reflexive R | 12. Proof. now trivial. Qed. coq-8.15.0/theories/ssr/ssrunder.v000066400000000000000000000056471417001151100170570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* .doc { font-family: monospace; white-space: pre; } # **) (** Constants for under/over, to rewrite under binders using "context lemmas" Note: this file does not require [ssreflect]; it is both required by [ssrsetoid] and *exported* by [ssrunder]. This preserves the following feature: we can use [Setoid] without requiring [ssreflect] and use [ssreflect] without requiring [Setoid]. *) Require Import ssrclasses. Module Type UNDER_REL. Parameter Under_rel : forall (A : Type) (eqA : A -> A -> Prop), A -> A -> Prop. Parameter Under_rel_from_rel : forall (A : Type) (eqA : A -> A -> Prop) (x y : A), @Under_rel A eqA x y -> eqA x y. Parameter Under_relE : forall (A : Type) (eqA : A -> A -> Prop), @Under_rel A eqA = eqA. (** [Over_rel, over_rel, over_rel_done]: for "by rewrite over_rel" *) Parameter Over_rel : forall (A : Type) (eqA : A -> A -> Prop), A -> A -> Prop. Parameter over_rel : forall (A : Type) (eqA : A -> A -> Prop) (x y : A), @Under_rel A eqA x y = @Over_rel A eqA x y. Parameter over_rel_done : forall (A : Type) (eqA : A -> A -> Prop) (EeqA : Reflexive eqA) (x : A), @Over_rel A eqA x x. (** [under_rel_done]: for Ltac-style over *) Parameter under_rel_done : forall (A : Type) (eqA : A -> A -> Prop) (EeqA : Reflexive eqA) (x : A), @Under_rel A eqA x x. Notation "''Under[' x ]" := (@Under_rel _ _ x _) (at level 8, format "''Under[' x ]", only printing). End UNDER_REL. Module Export Under_rel : UNDER_REL. Definition Under_rel (A : Type) (eqA : A -> A -> Prop) := eqA. Lemma Under_rel_from_rel : forall (A : Type) (eqA : A -> A -> Prop) (x y : A), @Under_rel A eqA x y -> eqA x y. Proof. now trivial. Qed. Lemma Under_relE (A : Type) (eqA : A -> A -> Prop) : @Under_rel A eqA = eqA. Proof. now trivial. Qed. Definition Over_rel := Under_rel. Lemma over_rel : forall (A : Type) (eqA : A -> A -> Prop) (x y : A), @Under_rel A eqA x y = @Over_rel A eqA x y. Proof. now trivial. Qed. Lemma over_rel_done : forall (A : Type) (eqA : A -> A -> Prop) (EeqA : Reflexive eqA) (x : A), @Over_rel A eqA x x. Proof. now unfold Over_rel. Qed. Lemma under_rel_done : forall (A : Type) (eqA : A -> A -> Prop) (EeqA : Reflexive eqA) (x : A), @Under_rel A eqA x x. Proof. now trivial. Qed. End Under_rel. coq-8.15.0/theories/ssrmatching/000077500000000000000000000000001417001151100165225ustar00rootroot00000000000000coq-8.15.0/theories/ssrmatching/ssrmatching.v000066400000000000000000000032611417001151100212350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t) (only parsing) : ssrpatternscope. (* Some shortcuts for recurrent "X in t" parts. *) Notation RHS := (X in _ = X)%pattern. Notation LHS := (X in X = _)%pattern. End SsrMatchingSyntax. Export SsrMatchingSyntax. Tactic Notation "ssrpattern" ssrpatternarg(p) := ssrpattern p . coq-8.15.0/tools/000077500000000000000000000000001417001151100135165ustar00rootroot00000000000000coq-8.15.0/tools/CoqMakefile.in000066400000000000000000000734201417001151100162340ustar00rootroot00000000000000########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## /dev/null 2>/dev/null; echo $$?)) STDTIME?=command time -f $(TIMEFMT) else ifeq (0,$(shell gtime -f "" true >/dev/null 2>/dev/null; echo $$?)) STDTIME?=gtime -f $(TIMEFMT) else STDTIME?=command time endif endif else STDTIME?=command time -f $(TIMEFMT) endif COQBIN?= ifneq (,$(COQBIN)) # add an ending / COQBIN:=$(COQBIN)/ endif # Coq binaries COQC ?= "$(COQBIN)coqc" COQTOP ?= "$(COQBIN)coqtop" COQCHK ?= "$(COQBIN)coqchk" COQNATIVE ?= "$(COQBIN)coqnative" COQDEP ?= "$(COQBIN)coqdep" COQDOC ?= "$(COQBIN)coqdoc" COQPP ?= "$(COQBIN)coqpp" COQMKFILE ?= "$(COQBIN)coq_makefile" OCAMLLIBDEP ?= "$(COQBIN)ocamllibdep" # Timing scripts COQMAKE_ONE_TIME_FILE ?= "$(COQCORELIB)/tools/make-one-time-file.py" COQMAKE_BOTH_TIME_FILES ?= "$(COQCORELIB)/tools/make-both-time-files.py" COQMAKE_BOTH_SINGLE_TIMING_FILES ?= "$(COQCORELIB)/tools/make-both-single-timing-files.py" BEFORE ?= AFTER ?= # FIXME this should be generated by Coq (modules already linked by Coq) CAMLDONTLINK=str,unix,dynlink,threads,zarith # OCaml binaries CAMLC ?= "$(OCAMLFIND)" ocamlc -c CAMLOPTC ?= "$(OCAMLFIND)" opt -c CAMLLINK ?= "$(OCAMLFIND)" ocamlc -linkpkg -dontlink $(CAMLDONTLINK) CAMLOPTLINK ?= "$(OCAMLFIND)" opt -linkpkg -dontlink $(CAMLDONTLINK) CAMLDOC ?= "$(OCAMLFIND)" ocamldoc CAMLDEP ?= "$(OCAMLFIND)" ocamldep -slash -ml-synonym .mlpack # DESTDIR is prepended to all installation paths DESTDIR ?= # Debug builds, typically -g to OCaml, -debug to Coq. CAMLDEBUG ?= COQDEBUG ?= # Extra packages to be linked in (as in findlib -package) CAMLPKGS ?= # Option for making timing files TIMING?= # Option for changing sorting of timing output file TIMING_SORT_BY ?= auto # Option for changing the fuzz parameter on the output file TIMING_FUZZ ?= 0 # Option for changing whether to use real or user time for timing tables TIMING_REAL?= # Option for including the memory column(s) TIMING_INCLUDE_MEM?= # Option for sorting by the memory column TIMING_SORT_BY_MEM?= # Output file names for timed builds TIME_OF_BUILD_FILE ?= time-of-build.log TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log TIME_OF_BUILD_AFTER_FILE ?= time-of-build-after.log TIME_OF_PRETTY_BUILD_FILE ?= time-of-build-pretty.log TIME_OF_PRETTY_BOTH_BUILD_FILE ?= time-of-build-both.log TIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line TGTS ?= # Retro compatibility (DESTDIR is standard on Unix, DSTROOT is not) ifdef DSTROOT DESTDIR := $(DSTROOT) endif # Substitution of the path by appending $(DESTDIR) if needed. # The variable $(COQMF_WINDRIVE) can be needed for Cygwin environments. windrive_path = $(if $(COQMF_WINDRIVE),$(subst $(COQMF_WINDRIVE),/,$(1)),$(1)) destination_path = $(if $(DESTDIR),$(DESTDIR)/$(call windrive_path,$(1)),$(1)) # Installation paths of libraries and documentation. COQLIBINSTALL ?= $(call destination_path,$(COQLIB)/user-contrib) COQDOCINSTALL ?= $(call destination_path,$(DOCDIR)/coq/user-contrib) COQTOPINSTALL ?= $(call destination_path,$(COQLIB)/toploop) # FIXME: Unused variable? ########## End of parameters ################################################## # What follows may be relevant to you only if you need to # extend this Makefile. If so, look for 'Extension point' here and # put in @LOCAL_FILE@ double colon rules accordingly. # E.g. to perform some work after the all target completes you can write # # post-all:: # echo "All done!" # # in @LOCAL_FILE@ # ############################################################################### # Flags ####################################################################### # # We define a bunch of variables combining the parameters. # To add additional flags to coq, coqchk or coqdoc, set the # {COQ,COQCHK,COQDOC}EXTRAFLAGS variable to whatever you want to add. # To overwrite the default choice and set your own flags entirely, set the # {COQ,COQCHK,COQDOC}FLAGS variable. SHOW := $(if $(VERBOSE),@true "",@echo "") HIDE := $(if $(VERBOSE),,@) TIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD)) OPT?= # The DYNOBJ and DYNLIB variables are used by "coqdep -dyndep var" in .v.d ifeq '$(OPT)' '-byte' USEBYTE:=true DYNOBJ:=.cma DYNLIB:=.cma else USEBYTE:= DYNOBJ:=.cmxs DYNLIB:=.cmxs endif # these variables are meant to be overridden if you want to add *extra* flags COQEXTRAFLAGS?= COQCHKEXTRAFLAGS?= COQDOCEXTRAFLAGS?= # Find the last argument of the form "-native-compiler FLAG" COQUSERNATIVEFLAG:=$(strip \ $(subst -native-compiler-,,\ $(lastword \ $(filter -native-compiler-%,\ $(subst -native-compiler ,-native-compiler-,\ $(strip $(COQEXTRAFLAGS))))))) COQFILTEREDEXTRAFLAGS:=$(strip \ $(filter-out -native-compiler-%,\ $(subst -native-compiler ,-native-compiler-,\ $(strip $(COQEXTRAFLAGS))))) COQACTUALNATIVEFLAG:=$(lastword $(COQMF_COQ_NATIVE_COMPILER_DEFAULT) $(COQMF_COQPROJECTNATIVEFLAG) $(COQUSERNATIVEFLAG)) ifeq '$(COQACTUALNATIVEFLAG)' 'yes' COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" COQDONATIVE="yes" else ifeq '$(COQACTUALNATIVEFLAG)' 'ondemand' COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" COQDONATIVE="no" else COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "no" COQDONATIVE="no" endif endif # these flags do NOT contain the libraries, to make them easier to overwrite COQFLAGS?=-q $(OTHERFLAGS) $(COQFILTEREDEXTRAFLAGS) $(COQNATIVEFLAG) COQCHKFLAGS?=-silent -o $(COQCHKEXTRAFLAGS) COQDOCFLAGS?=-interpolate -utf8 $(COQDOCEXTRAFLAGS) COQDOCLIBS?=$(COQLIBS_NOML) # The version of Coq being run and the version of coq_makefile that # generated this makefile COQ_VERSION:=$(shell $(COQC) --print-version | cut -d " " -f 1) COQMAKEFILE_VERSION:=@COQ_VERSION@ # COQ_SRC_SUBDIRS is for user-overriding, usually to add # `user-contrib/Foo` to the includes, we keep COQCORE_SRC_SUBDIRS for # Coq's own core libraries, which should be replaced by ocamlfind # options at some point. COQ_SRC_SUBDIRS?= COQSRCLIBS?= $(foreach d,$(COQCORE_SRC_SUBDIRS), -I "$(COQCORELIB)/$(d)") $(foreach d,$(COQ_SRC_SUBDIRS), -I "$(COQLIB)/$(d)") CAMLFLAGS+=$(OCAMLLIBS) $(COQSRCLIBS) # ocamldoc fails with unknown argument otherwise CAMLDOCFLAGS:=$(filter-out -annot, $(filter-out -bin-annot, $(CAMLFLAGS))) CAMLFLAGS+=$(OCAMLWARN) ifneq (,$(TIMING)) TIMING_ARG=-time ifeq (after,$(TIMING)) TIMING_EXT=after-timing else ifeq (before,$(TIMING)) TIMING_EXT=before-timing else TIMING_EXT=timing endif endif else TIMING_ARG= endif # Files ####################################################################### # # We here define a bunch of variables about the files being part of the # Coq project in order to ease the writing of build target and build rules VDFILE := @DEP_FILE@ ALLSRCFILES := \ $(MLGFILES) \ $(MLFILES) \ $(MLPACKFILES) \ $(MLLIBFILES) \ $(MLIFILES) # helpers vo_to_obj = $(addsuffix .o,\ $(filter-out Warning: Error:,\ $(shell $(COQTOP) -q -noinit -batch -quiet -print-mod-uid $(1)))) strip_dotslash = $(patsubst ./%,%,$(1)) # without this we get undefined variables in the expansion for the # targets of the [deprecated,use-mllib-or-mlpack] rule with_undef = $(if $(filter-out undefined, $(origin $(1))),$($(1))) VO = vo VOS = vos VOFILES = $(VFILES:.v=.$(VO)) GLOBFILES = $(VFILES:.v=.glob) HTMLFILES = $(VFILES:.v=.html) GHTMLFILES = $(VFILES:.v=.g.html) BEAUTYFILES = $(addsuffix .beautified,$(VFILES)) TEXFILES = $(VFILES:.v=.tex) GTEXFILES = $(VFILES:.v=.g.tex) CMOFILES = \ $(MLGFILES:.mlg=.cmo) \ $(MLFILES:.ml=.cmo) \ $(MLPACKFILES:.mlpack=.cmo) CMXFILES = $(CMOFILES:.cmo=.cmx) OFILES = $(CMXFILES:.cmx=.o) CMAFILES = $(MLLIBFILES:.mllib=.cma) $(MLPACKFILES:.mlpack=.cma) CMXAFILES = $(CMAFILES:.cma=.cmxa) CMIFILES = \ $(CMOFILES:.cmo=.cmi) \ $(MLIFILES:.mli=.cmi) # the /if/ is because old _CoqProject did not list a .ml(pack|lib) but just # a .mlg file CMXSFILES = \ $(MLPACKFILES:.mlpack=.cmxs) \ $(CMXAFILES:.cmxa=.cmxs) \ $(if $(MLPACKFILES)$(CMXAFILES),,\ $(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs)) # files that are packed into a plugin (no extension) PACKEDFILES = \ $(call strip_dotslash, \ $(foreach lib, \ $(call strip_dotslash, \ $(MLPACKFILES:.mlpack=_MLPACK_DEPENDENCIES)),$(call with_undef,$(lib)))) # files that are archived into a .cma (mllib) LIBEDFILES = \ $(call strip_dotslash, \ $(foreach lib, \ $(call strip_dotslash, \ $(MLLIBFILES:.mllib=_MLLIB_DEPENDENCIES)),$(call with_undef,$(lib)))) CMIFILESTOINSTALL = $(filter-out $(addsuffix .cmi,$(PACKEDFILES)),$(CMIFILES)) CMOFILESTOINSTALL = $(filter-out $(addsuffix .cmo,$(PACKEDFILES)),$(CMOFILES)) OBJFILES = $(call vo_to_obj,$(VOFILES)) ALLNATIVEFILES = \ $(OBJFILES:.o=.cmi) \ $(OBJFILES:.o=.cmx) \ $(OBJFILES:.o=.cmxs) # trick: wildcard filters out non-existing files, so that `install` doesn't show # warnings and `clean` doesn't pass to rm a list of files that is too long for # the shell. NATIVEFILES = $(wildcard $(ALLNATIVEFILES)) FILESTOINSTALL = \ $(VOFILES) \ $(VFILES) \ $(GLOBFILES) \ $(NATIVEFILES) \ $(CMIFILESTOINSTALL) BYTEFILESTOINSTALL = \ $(CMOFILESTOINSTALL) \ $(CMAFILES) ifeq '$(HASNATDYNLINK)' 'true' DO_NATDYNLINK = yes FILESTOINSTALL += $(CMXSFILES) $(CMXAFILES) $(CMOFILESTOINSTALL:.cmo=.cmx) else DO_NATDYNLINK = endif ALLDFILES = $(addsuffix .d,$(ALLSRCFILES)) $(VDFILE) # Compilation targets ######################################################### all: $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all .PHONY: all all.timing.diff: $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all.timing.diff TIME_OF_PRETTY_BUILD_EXTRA_FILES="" $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all .PHONY: all.timing.diff ifeq (0,$(TIMING_REAL)) TIMING_REAL_ARG := TIMING_USER_ARG := --user else ifeq (1,$(TIMING_REAL)) TIMING_REAL_ARG := --real TIMING_USER_ARG := else TIMING_REAL_ARG := TIMING_USER_ARG := endif endif ifeq (0,$(TIMING_INCLUDE_MEM)) TIMING_INCLUDE_MEM_ARG := --no-include-mem else TIMING_INCLUDE_MEM_ARG := endif ifeq (1,$(TIMING_SORT_BY_MEM)) TIMING_SORT_BY_MEM_ARG := --sort-by-mem else TIMING_SORT_BY_MEM_ARG := endif make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: $(HIDE)rm -f pretty-timed-success.ok $(HIDE)($(MAKE) --no-print-directory -f "$(PARENT)" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed print-pretty-timed:: $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) print-pretty-timed-diff:: $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) ifeq (,$(BEFORE)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else ifeq (,$(AFTER)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else print-pretty-single-time-diff:: $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) endif endif pretty-timed: $(HIDE)$(MAKE) --no-print-directory -f "$(PARENT)" make-pretty-timed $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-timed .PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff # Extension points for actions to be performed before/after the all target pre-all:: @# Extension point $(HIDE)if [ "$(COQMAKEFILE_VERSION)" != "$(COQ_VERSION)" ]; then\ echo "W: This Makefile was generated by Coq $(COQMAKEFILE_VERSION)";\ echo "W: while the current Coq version is $(COQ_VERSION)";\ fi .PHONY: pre-all post-all:: @# Extension point .PHONY: post-all real-all: $(VOFILES) $(if $(USEBYTE),bytefiles,optfiles) .PHONY: real-all real-all.timing.diff: $(VOFILES:.vo=.v.timing.diff) .PHONY: real-all.timing.diff bytefiles: $(CMOFILES) $(CMAFILES) .PHONY: bytefiles optfiles: $(if $(DO_NATDYNLINK),$(CMXSFILES)) .PHONY: optfiles # FIXME, see Ralf's bugreport # quick is deprecated, now renamed vio vio: $(VOFILES:.vo=.vio) .PHONY: vio quick: vio $(warning "'make quick' is deprecated, use 'make vio' or consider using 'vos' files") .PHONY: quick vio2vo: $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ -schedule-vio2vo $(J) $(VOFILES:%.vo=%.vio) .PHONY: vio2vo # quick2vo is undocumented quick2vo: $(HIDE)make -j $(J) vio $(HIDE)VIOFILES=$$(for vofile in $(VOFILES); do \ viofile="$$(echo "$$vofile" | sed "s/\.vo$$/.vio/")"; \ if [ "$$vofile" -ot "$$viofile" -o ! -e "$$vofile" ]; then printf "$$viofile "; fi; \ done); \ echo "VIO2VO: $$VIOFILES"; \ if [ -n "$$VIOFILES" ]; then \ $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -schedule-vio2vo $(J) $$VIOFILES; \ fi .PHONY: quick2vo checkproofs: $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ -schedule-vio-checking $(J) $(VOFILES:%.vo=%.vio) .PHONY: checkproofs vos: $(VOFILES:%.vo=%.vos) .PHONY: vos vok: $(VOFILES:%.vo=%.vok) .PHONY: vok validate: $(VOFILES) $(TIMER) $(COQCHK) $(COQCHKFLAGS) $(COQLIBS_NOML) $^ .PHONY: validate only: $(TGTS) .PHONY: only # Documentation targets ####################################################### html: $(GLOBFILES) $(VFILES) $(SHOW)'COQDOC -d html $(GAL)' $(HIDE)mkdir -p html $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -html $(GAL) $(COQDOCLIBS) -d html $(VFILES) mlihtml: $(MLIFILES:.mli=.cmi) $(SHOW)'CAMLDOC -d $@' $(HIDE)mkdir $@ || rm -rf $@/* $(HIDE)$(CAMLDOC) -html \ -d $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) all-mli.tex: $(MLIFILES:.mli=.cmi) $(SHOW)'CAMLDOC -latex $@' $(HIDE)$(CAMLDOC) -latex \ -o $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) all.ps: $(VFILES) $(SHOW)'COQDOC -ps $(GAL)' $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -ps $(GAL) $(COQDOCLIBS) \ -o $@ `$(COQDEP) -sort $(VFILES)` all.pdf: $(VFILES) $(SHOW)'COQDOC -pdf $(GAL)' $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -pdf $(GAL) $(COQDOCLIBS) \ -o $@ `$(COQDEP) -sort $(VFILES)` # FIXME: not quite right, since the output name is different gallinahtml: GAL=-g gallinahtml: html all-gal.ps: GAL=-g all-gal.ps: all.ps all-gal.pdf: GAL=-g all-gal.pdf: all.pdf # ? beautify: $(BEAUTYFILES) for file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done @echo 'Do not do "make clean" until you are sure that everything went well!' @echo 'If there were a problem, execute "for file in $$(find . -name \*.v.old -print); do mv $${file} $${file%.old}; done" in your shell/' .PHONY: beautify # Installation targets ######################################################## # # There rules can be extended in @LOCAL_FILE@ # Extensions can't assume when they run. install: $(HIDE)code=0; for f in $(FILESTOINSTALL); do\ if ! [ -f "$$f" ]; then >&2 echo $$f does not exist; code=1; fi \ done; exit $$code $(HIDE)for f in $(FILESTOINSTALL); do\ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\ if [ "$$?" != "0" -o -z "$$df" ]; then\ echo SKIP "$$f" since it has no logical path;\ else\ install -d "$(COQLIBINSTALL)/$$df" &&\ install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\ echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\ fi;\ done $(HIDE)$(MAKE) install-extra -f "$(SELF)" install-extra:: @# Extension point .PHONY: install install-extra install-byte: $(HIDE)for f in $(BYTEFILESTOINSTALL); do\ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\ if [ "$$?" != "0" -o -z "$$df" ]; then\ echo SKIP "$$f" since it has no logical path;\ else\ install -d "$(COQLIBINSTALL)/$$df" &&\ install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\ echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\ fi;\ done install-doc:: html mlihtml @# Extension point $(HIDE)install -d "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" $(HIDE)for i in html/*; do \ dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ install -m 0644 "$$i" "$$dest";\ echo INSTALL "$$i" "$$dest";\ done $(HIDE)install -d \ "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" $(HIDE)for i in mlihtml/*; do \ dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ install -m 0644 "$$i" "$$dest";\ echo INSTALL "$$i" "$$dest";\ done .PHONY: install-doc uninstall:: @# Extension point $(HIDE)for f in $(FILESTOINSTALL); do \ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\ instf="$(COQLIBINSTALL)/$$df/`basename $$f`" &&\ rm -f "$$instf" &&\ echo RM "$$instf" &&\ (rmdir "$(COQLIBINSTALL)/$$df/" 2>/dev/null || true); \ done .PHONY: uninstall uninstall-doc:: @# Extension point $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html' $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml' $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" $(HIDE) rmdir "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/" || true .PHONY: uninstall-doc # Cleaning #################################################################### # # There rules can be extended in @LOCAL_FILE@ # Extensions can't assume when they run. clean:: @# Extension point $(SHOW)'CLEAN' $(HIDE)rm -f $(CMOFILES) $(HIDE)rm -f $(CMIFILES) $(HIDE)rm -f $(CMAFILES) $(HIDE)rm -f $(CMOFILES:.cmo=.cmx) $(HIDE)rm -f $(CMXAFILES) $(HIDE)rm -f $(CMXSFILES) $(HIDE)rm -f $(CMOFILES:.cmo=.o) $(HIDE)rm -f $(CMXAFILES:.cmxa=.a) $(HIDE)rm -f $(MLGFILES:.mlg=.ml) $(HIDE)rm -f $(ALLDFILES) $(HIDE)rm -f $(NATIVEFILES) $(HIDE)find . -name .coq-native -type d -empty -delete $(HIDE)rm -f $(VOFILES) $(HIDE)rm -f $(VOFILES:.vo=.vio) $(HIDE)rm -f $(VOFILES:.vo=.vos) $(HIDE)rm -f $(VOFILES:.vo=.vok) $(HIDE)rm -f $(BEAUTYFILES) $(VFILES:=.old) $(HIDE)rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob all-mli.tex $(HIDE)rm -f $(VFILES:.v=.glob) $(HIDE)rm -f $(VFILES:.v=.tex) $(HIDE)rm -f $(VFILES:.v=.g.tex) $(HIDE)rm -f pretty-timed-success.ok $(HIDE)rm -rf html mlihtml .PHONY: clean cleanall:: clean @# Extension point $(SHOW)'CLEAN *.aux *.timing' $(HIDE)rm -f $(foreach f,$(VFILES:.v=),$(dir $(f)).$(notdir $(f)).aux) $(HIDE)rm -f $(TIME_OF_BUILD_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(HIDE)rm -f $(VOFILES:.vo=.v.timing) $(HIDE)rm -f $(VOFILES:.vo=.v.before-timing) $(HIDE)rm -f $(VOFILES:.vo=.v.after-timing) $(HIDE)rm -f $(VOFILES:.vo=.v.timing.diff) $(HIDE)rm -f .lia.cache .nia.cache .PHONY: cleanall archclean:: @# Extension point $(SHOW)'CLEAN *.cmx *.o' $(HIDE)rm -f $(NATIVEFILES) $(HIDE)rm -f $(CMOFILES:%.cmo=%.cmx) .PHONY: archclean # Compilation rules ########################################################### $(MLIFILES:.mli=.cmi): %.cmi: %.mli $(SHOW)'CAMLC -c $<' $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $< $(MLGFILES:.mlg=.ml): %.ml: %.mlg $(SHOW)'COQPP $<' $(HIDE)$(COQPP) $< # Stupid hack around a deficient syntax: we cannot concatenate two expansions $(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml $(SHOW)'CAMLC -c $<' $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $< # Same hack $(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml $(SHOW)'CAMLOPT -c $(FOR_PACK) $<' $(HIDE)$(TIMER) $(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $(FOR_PACK) $< $(MLLIBFILES:.mllib=.cmxs): %.cmxs: %.cmxa $(SHOW)'CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \ -linkall -shared -o $@ $< $(MLLIBFILES:.mllib=.cma): %.cma: | %.mllib $(SHOW)'CAMLC -a -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) -a -o $@ $^ $(MLLIBFILES:.mllib=.cmxa): %.cmxa: | %.mllib $(SHOW)'CAMLOPT -a -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -a -o $@ $^ $(MLPACKFILES:.mlpack=.cmxs): %.cmxs: %.cmxa $(SHOW)'CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \ -shared -linkall -o $@ $< $(MLPACKFILES:.mlpack=.cmxa): %.cmxa: %.cmx $(SHOW)'CAMLOPT -a -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -a -o $@ $< $(MLPACKFILES:.mlpack=.cma): %.cma: %.cmo | %.mlpack $(SHOW)'CAMLC -a -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) -a -o $@ $^ $(MLPACKFILES:.mlpack=.cmo): %.cmo: | %.mlpack $(SHOW)'CAMLC -pack -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) -pack -o $@ $^ $(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack $(SHOW)'CAMLOPT -pack -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -pack -o $@ $^ # This rule is for _CoqProject with no .mllib nor .mlpack $(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx $(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \ -shared -o $@ $< ifneq (,$(TIMING)) TIMING_EXTRA = > $<.$(TIMING_EXT) else TIMING_EXTRA = endif $(VOFILES): %.vo: %.v | $(VDFILE) $(SHOW)COQC $< $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $< $(TIMING_EXTRA) ifeq ($(COQDONATIVE), "yes") $(SHOW)COQNATIVE $@ $(HIDE)$(COQNATIVE) $(COQLIBS) $@ endif # FIXME ?merge with .vo / .vio ? $(GLOBFILES): %.glob: %.v $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(VFILES:.v=.vio): %.vio: %.v $(SHOW)COQC -vio $< $(HIDE)$(TIMER) $(COQC) -vio $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(VFILES:.v=.vos): %.vos: %.v $(SHOW)COQC -vos $< $(HIDE)$(TIMER) $(COQC) -vos $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(VFILES:.v=.vok): %.vok: %.v $(SHOW)COQC -vok $< $(HIDE)$(TIMER) $(COQC) -vok $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing $(SHOW)PYTHON TIMING-DIFF $*.{before,after}-timing $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" $(BEAUTYFILES): %.v.beautified: %.v $(SHOW)'BEAUTIFY $<' $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -beautify $< $(TEXFILES): %.tex: %.v $(SHOW)'COQDOC -latex $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@ $(GTEXFILES): %.g.tex: %.v $(SHOW)'COQDOC -latex -g $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@ $(HTMLFILES): %.html: %.v %.glob $(SHOW)'COQDOC -html $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html $< -o $@ $(GHTMLFILES): %.g.html: %.v %.glob $(SHOW)'COQDOC -html -g $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html -g $< -o $@ # Dependency files ############################################################ ifndef MAKECMDGOALS -include $(ALLDFILES) else ifneq ($(filter-out archclean clean cleanall printenv make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff,$(MAKECMDGOALS)),) -include $(ALLDFILES) endif endif .SECONDARY: $(ALLDFILES) redir_if_ok = > "$@" || ( RV=$$?; rm -f "$@"; exit $$RV ) GENMLFILES:=$(MLGFILES:.mlg=.ml) $(addsuffix .d,$(ALLSRCFILES)): $(GENMLFILES) $(addsuffix .d,$(MLIFILES)): %.mli.d: %.mli $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLFILES)): %.ml.d: %.ml $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLLIBFILES)): %.mllib.d: %.mllib $(SHOW)'OCAMLLIBDEP $<' $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLPACKFILES)): %.mlpack.d: %.mlpack $(SHOW)'OCAMLLIBDEP $<' $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) # If this makefile is created using a _CoqProject we have coqdep get # options from it. This avoids argument length limits for pathological # projects. Note that extra options might be on the command line. VDFILE_FLAGS:=$(if @PROJECT_FILE@,-f @PROJECT_FILE@,) $(CMDLINE_COQLIBS) $(CMDLINE_VFILES) $(VDFILE): @PROJECT_FILE@ $(VFILES) $(SHOW)'COQDEP VFILES' $(HIDE)$(COQDEP) -vos -dyndep var $(VDFILE_FLAGS) $(redir_if_ok) # Misc ######################################################################## byte: $(HIDE)$(MAKE) all "OPT:=-byte" -f "$(SELF)" .PHONY: byte opt: $(HIDE)$(MAKE) all "OPT:=-opt" -f "$(SELF)" .PHONY: opt # This is deprecated. To extend this makefile use # extension points and @LOCAL_FILE@ printenv:: $(warning printenv is deprecated) $(warning write extensions in @LOCAL_FILE@ or include @CONF_FILE@) @echo 'COQLIB = $(COQLIB)' @echo 'COQCORELIB = $(COQCORELIB)' @echo 'DOCDIR = $(DOCDIR)' @echo 'OCAMLFIND = $(OCAMLFIND)' @echo 'HASNATDYNLINK = $(HASNATDYNLINK)' @echo 'SRC_SUBDIRS = $(SRC_SUBDIRS)' @echo 'COQ_SRC_SUBDIRS = $(COQ_SRC_SUBDIRS)' @echo 'COQCORE_SRC_SUBDIRS = $(COQCORE_SRC_SUBDIRS)' @echo 'OCAMLFIND = $(OCAMLFIND)' @echo 'PP = $(PP)' @echo 'COQFLAGS = $(COQFLAGS)' @echo 'COQLIB = $(COQLIBS)' @echo 'COQLIBINSTALL = $(COQLIBINSTALL)' @echo 'COQDOCINSTALL = $(COQDOCINSTALL)' .PHONY: printenv # Generate a .merlin file. If you need to append directives to this # file you can extend the merlin-hook target in @LOCAL_FILE@ .merlin: $(SHOW)'FILL .merlin' $(HIDE)echo 'FLG $(COQMF_CAMLFLAGS)' > .merlin $(HIDE)echo 'B $(COQCORELIB)' >> .merlin $(HIDE)echo 'S $(COQCORELIB)' >> .merlin $(HIDE)$(foreach d,$(COQCORE_SRC_SUBDIRS), \ echo 'B $(COQCORELIB)$(d)' >> .merlin;) $(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \ echo 'S $(COQLIB)$(d)' >> .merlin;) $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'B $(d)' >> .merlin;) $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'S $(d)' >> .merlin;) $(HIDE)$(MAKE) merlin-hook -f "$(SELF)" .PHONY: merlin merlin-hook:: @# Extension point .PHONY: merlin-hook # prints all variables debug: $(foreach v,\ $(sort $(filter-out $(INITIAL_VARS) INITIAL_VARS,\ $(.VARIABLES))),\ $(info $(v) = $($(v)))) .PHONY: debug .DEFAULT_GOAL := all # Users can create @LOCAL_LATE_FILE@ to hook into double-colon rules # or add other needed Makefile code, using defined # variables if necessary. -include @LOCAL_LATE_FILE@ # Local Variables: # mode: makefile-gmake # End: coq-8.15.0/tools/TimeFileMaker.py000066400000000000000000000642751417001151100165640ustar00rootroot00000000000000from __future__ import with_statement from __future__ import division from __future__ import unicode_literals from __future__ import print_function import sys import re import argparse from io import open # This script parses the output of `make TIMED=1` into a dictionary # mapping names of compiled files to the number of minutes and seconds # that they took to compile. STRIP_REG = re.compile('^(coq/|contrib/|)(?:theories/|src/)?') STRIP_REP = r'\1' INFINITY = '\u221e' TIME_KEY = 'time' MEM_KEY = 'mem' def nonnegative(arg): v = int(arg) if v < 0: raise argparse.ArgumentTypeError("%s is an invalid non-negative int value" % arg) return v def add_sort_by(parser): return parser.add_argument( '--sort-by', type=str, dest='sort_by', choices=('auto', 'absolute', 'diff'), default='auto', help=('How to sort the table entries.\n' + 'The "auto" method sorts by absolute time differences ' + 'rounded towards zero to a whole-number of seconds, then ' + 'by times in the "after" column, and finally ' + 'lexicographically by file name. This will put the ' + 'biggest changes in either direction first, and will ' + 'prefer sorting by build-time over subsecond changes in ' + 'build time (which are frequently noise); lexicographic ' + 'sorting forces an order on files which take effectively ' + 'no time to compile.\n' + 'The "absolute" method sorts by the total time taken.\n' + 'The "diff" method sorts by the signed difference in time.')) def add_sort_by_mem(parser): return parser.add_argument( '--sort-by-mem', action='store_true', dest='sort_by_mem', help=('Sort the table entries by memory rather than time.')) def add_fuzz(parser): return parser.add_argument( '--fuzz', dest='fuzz', metavar='N', type=nonnegative, default=0, help=('By default, two lines are only considered the same if ' + 'the character offsets and initial code strings match. ' 'This option relaxes this constraint by allowing the ' + 'character offsets to differ by up to N characters, as long ' + 'as the total number of characters and initial code strings ' + 'continue to match. This is useful when there are small changes ' + 'to a file, and you want to match later lines that have not ' + 'changed even though the character offsets have changed.')) def add_real(parser, single_timing=False): return parser.add_argument( '--real', action='store_true', help=(r'''Use real times rather than user times. ''' + ('''By default, the input is expected to contain lines in the format: FILE_NAME (...user: NUMBER_IN_SECONDS...mem: NUMBER ko...) If --real is passed, then the lines are instead expected in the format: FILE_NAME (...real: NUMBER_IN_SECONDS...mem: NUMBER ko...)''' if not single_timing else '''The input is expected to contain lines in the format: Chars START - END COMMAND NUMBER secs (NUMBERu...)'''))) def add_user(parser, single_timing=False): return parser.add_argument( '--user', dest='real', action='store_false', help=(r'''Use user times rather than real times. ''' + ('''By default, the input is expected to contain lines in the format: FILE_NAME (...real: NUMBER_IN_SECONDS...mem: NUMBER ko...) If --user is passed, then the lines are instead expected in the format: FILE_NAME (...user: NUMBER_IN_SECONDS...mem: NUMBER ko...)''' if not single_timing else '''The input is expected to contain lines in the format: Chars START - END COMMAND NUMBER secs (NUMBERu...)'''))) def add_include_mem(parser): return parser.add_argument( '--no-include-mem', dest='include_mem', default=True, action='store_false', help=(r'''Don't include memory in the table.''')) # N.B. We need to include default=None for nargs='*', c.f., https://bugs.python.org/issue28609#msg280180 def add_file_name_gen(parser, prefix='', descr='file containing the build log', stddir='in', defaults=None, **kwargs): extra = ('' if defaults is None else ' (defaults to %s if no argument is passed)' % defaults) return parser.add_argument( prefix + 'FILE_NAME', type=str, help=('The name of the %s (use "-" for std%s)%s.' % (descr, stddir, extra)), **kwargs) def add_file_name(parser): return add_file_name_gen(parser) def add_after_file_name(parser): return add_file_name_gen(parser, 'AFTER_', 'file containing the "after" build log') def add_before_file_name(parser): return add_file_name_gen(parser, 'BEFORE_', 'file containing the "before" build log') def add_output_file_name(parser): return add_file_name_gen(parser, 'OUTPUT_', 'file to write the output table to', stddir='out', defaults='-', nargs='*', default=None) def reformat_time_string(time): try: seconds, milliseconds = time.split('.') except ValueError: print('WARNING: Invalid time string: not the right number of dots (.); expected one: %s' % repr(time), file=sys.stderr) seconds, milliseconds = (time + '.').split('.')[:2] if seconds == '': seconds = 0 seconds = int(seconds) minutes, seconds = divmod(seconds, 60) return '%dm%02d.%ss' % (minutes, seconds, milliseconds) def get_file_lines(file_name): if file_name == '-': if hasattr(sys.stdin, 'buffer'): lines = sys.stdin.buffer.readlines() else: lines = sys.stdin.readlines() else: with open(file_name, 'rb') as f: lines = f.readlines() for line in lines: try: # Since we read the files in binary mode, we have to # normalize Windows line endings from \r\n to \n yield line.decode('utf-8').replace('\r\n', '\n') except UnicodeDecodeError: # invalid utf-8 pass def get_file(file_name): return ''.join(get_file_lines(file_name)) def merge_dicts(d1, d2): if d2 is None: return d1 if d1 is None: return d2 assert(isinstance(d1, dict)) assert(isinstance(d2, dict)) ret = {} for k in set(list(d1.keys()) + list(d2.keys())): ret[k] = merge_dicts(d1.get(k), d2.get(k)) return ret def get_mems_of_lines(lines): reg = re.compile(r'^([^\s]+) \([^\)]*?mem: ([0-9]+) ko[^\)]*?\)\s*$', re.MULTILINE) mems = reg.findall(lines) if all(STRIP_REG.search(name.strip()) for name, mem in mems): mems = tuple((STRIP_REG.sub(STRIP_REP, name.strip()), mem) for name, mem in mems) return dict((name, {MEM_KEY:int(mem)}) for name, mem in mems) def get_times_of_lines(lines, use_real=False): reg_user = re.compile(r'^([^\s]+) \([^\)]*?user: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) reg_real = re.compile(r'^([^\s]+) \([^\)]*?real: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) reg = reg_real if use_real else reg_user times = reg.findall(lines) if all(time in ('0.00', '0.01') for name, time in times): reg = reg_real times = reg.findall(lines) if all(STRIP_REG.search(name.strip()) for name, time in times): times = tuple((STRIP_REG.sub(STRIP_REP, name.strip()), time) for name, time in times) return dict((name, {TIME_KEY:reformat_time_string(time)}) for name, time in times) def get_times_and_mems(file_name, use_real=False, include_mem=True): # we only get the file once, in case it is a stream like stdin lines = get_file(file_name) return merge_dicts(get_times_of_lines(lines, use_real=use_real), (get_mems_of_lines(lines) if include_mem else None)) def get_mems(file_name): ''' Reads the contents of file_name, which should be the output of 'make TIMED=1', and parses it to construct a dict mapping file names to peak memory usage, as integers. Removes common prefixes using STRIP_REG and STRIP_REP. ''' return get_mems_of_lines(get_file(file_name)) def get_times(file_name, use_real=False): ''' Reads the contents of file_name, which should be the output of 'make TIMED=1', and parses it to construct a dict mapping file names to compile durations, as strings. Removes common prefixes using STRIP_REG and STRIP_REP. ''' return get_times_of_lines(get_file(file_name)) def get_single_file_times(file_name, use_real=False): ''' Reads the contents of file_name, which should be the output of 'coqc -time', and parses it to construct a dict mapping lines to to compile durations, as strings. ''' lines = get_file(file_name) reg = re.compile(r'^Chars ([0-9]+) - ([0-9]+) ([^ ]+) ([0-9\.]+) secs \(([0-9\.]+)u(.*)\)$', re.MULTILINE) times = reg.findall(lines) if len(times) == 0: return dict() longest = max(max((len(start), len(stop))) for start, stop, name, real, user, extra in times) FORMAT = 'Chars %%0%dd - %%0%dd %%s' % (longest, longest) return dict((FORMAT % (int(start), int(stop), name), {TIME_KEY:reformat_time_string(real if use_real else user)}) for start, stop, name, real, user, extra in times) def fuzz_merge(l1, l2, fuzz): '''Takes two iterables of ((start, end, code), times) and a fuzz parameter, and yields a single iterable of ((start, stop, code), times1, times2) We only give both left and right if (a) the codes are the same, (b) the number of characters (stop - start) is the same, and (c) the difference between left and right code locations is <= fuzz. We keep a current guess at the overall offset, and prefer drawing from whichever list is earliest after correcting for current offset. ''' assert(fuzz >= 0) cur_fuzz = 0 l1 = list(l1) l2 = list(l2) cur1, cur2 = None, None while (len(l1) > 0 or cur1 is not None) and (len(l2) > 0 or cur2 is not None): if cur1 is None: cur1 = l1.pop(0) if cur2 is None: cur2 = l2.pop(0) ((s1, e1, c1), t1), ((s2, e2, c2), t2) = cur1, cur2 assert(t1 is not None) assert(t2 is not None) s2_adjusted, e2_adjusted = s2 + cur_fuzz, e2 + cur_fuzz if cur1[0] == cur2[0]: yield (cur1, cur2) cur1, cur2 = None, None cur_fuzz = 0 elif c1 == c2 and e1-s1 == e2-s2 and abs(s1 - s2) <= fuzz: yield (((s1, e1, c1), t1), ((s2, e2, c2), t2)) cur1, cur2 = None, None cur_fuzz = s1 - s2 elif s1 < s2_adjusted or (s1 == s2_adjusted and e1 <= e2): yield (((s1, e1, c1), t1), ((s1 - cur_fuzz, e1 - cur_fuzz, c1), None)) cur1 = None else: yield (((s2 + cur_fuzz, e2 + cur_fuzz, c2), None), ((s2, e2, c2), t2)) cur2 = None if len(l1) > 0: for i in l1: yield (i, (i[0], None)) elif len(l2) > 0: for i in l2: yield ((i[0], None), i) def adjust_fuzz(left_dict, right_dict, fuzz): reg = re.compile(r'Chars ([0-9]+) - ([0-9]+) (.*)$') left_dict_list = sorted(((int(s), int(e), c), v) for ((s, e, c), v) in ((reg.match(k).groups(), v) for k, v in left_dict.items())) right_dict_list = sorted(((int(s), int(e), c), v) for ((s, e, c), v) in ((reg.match(k).groups(), v) for k, v in right_dict.items())) merged = list(fuzz_merge(left_dict_list, right_dict_list, fuzz)) if len(merged) == 0: # assert that both left and right dicts are empty assert(not left_dict) assert(not right_dict) return left_dict, right_dict longest = max(max((len(str(start1)), len(str(stop1)), len(str(start2)), len(str(stop2)))) for ((start1, stop1, code1), t1), ((start2, stop2, code2), t2) in merged) FORMAT1 = 'Chars %%0%dd - %%0%dd %%s' % (longest, longest) FORMAT2 = 'Chars %%0%dd-%%0%dd ~ %%0%dd-%%0%dd %%s' % (longest, longest, longest, longest) if fuzz == 0: left_dict = dict((FORMAT1 % k, t1) for (k, t1), _ in merged if t1 is not None) right_dict = dict((FORMAT1 % k, t2) for _, (k, t2) in merged if t2 is not None) else: left_dict = dict((FORMAT2 % (s1, e1, s2, e2, c1), t1) for ((s1, e1, c1), t1), ((s2, e2, c2), t2) in merged if t1 is not None) right_dict = dict((FORMAT2 % (s1, e1, s2, e2, c1), t2) for ((s1, e1, c1), t1), ((s2, e2, c2), t2) in merged if t2 is not None) return left_dict, right_dict def fix_sign_for_sorting(num, descending=True): return -num if descending else num def make_sorting_key(stats_dict, descending=True, sort_by_mem=False): if sort_by_mem: def get_key(name): if MEM_KEY not in stats_dict[name].keys(): print('WARNING: %s has no mem key: %s' % (name, repr(stats_dict[name])), file=sys.stderr) mem = stats_dict[name].get(MEM_KEY, '0') return (fix_sign_for_sorting(int(mem), descending=descending), name) else: def get_key(name): if TIME_KEY not in stats_dict[name].keys(): print('WARNING: %s has no time key: %s' % (name, repr(stats_dict[name])), file=sys.stderr) minutes, seconds = stats_dict[name].get(TIME_KEY, '0m00s').replace('s', '').split('m') return (fix_sign_for_sorting(int(minutes), descending=descending), fix_sign_for_sorting(float(seconds), descending=descending), name) return get_key def get_sorted_file_list_from_stats_dict(stats_dict, descending=True, sort_by_mem=False): ''' Takes the output dict of get_times and returns the list of keys, sorted by duration. ''' return sorted(stats_dict.keys(), key=make_sorting_key(stats_dict, descending=descending, sort_by_mem=sort_by_mem)) def to_seconds(time): ''' Converts a string time into a number of seconds. ''' minutes, seconds = time.replace('s', '').split('m') sign = -1 if time[0] == '-' else 1 return sign * (abs(int(minutes)) * 60 + float(seconds)) def from_seconds(seconds, signed=False): ''' Converts a number of seconds into a string time. ''' sign = ('-' if seconds < 0 else '+') if signed else '' seconds = abs(seconds) minutes = int(seconds) // 60 seconds -= minutes * 60 full_seconds = int(seconds) partial_seconds = int(100 * (seconds - full_seconds)) return sign + '%dm%02d.%02ds' % (minutes, full_seconds, partial_seconds) def sum_times(times, signed=False): ''' Takes the values of an output from get_times, parses the time strings, and returns their sum, in the same string format. ''' # sort the times before summing because floating point addition is not associative return from_seconds(sum(sorted(map(to_seconds, times))), signed=signed) def format_percentage(num, signed=True): sign = ('-' if num < 0 else '+') if signed else '' num = abs(num) whole_part = int(num * 100) frac_part = int(100 * (num * 100 - whole_part)) return sign + '%d.%02d%%' % (whole_part, frac_part) def make_diff_table_string(left_dict, right_dict, sort_by='auto', descending=True, sort_by_mem=False, left_tag='After', tag='File Name', right_tag='Before', with_percent=True, left_mem_tag='Peak Mem', right_mem_tag='Peak Mem', include_mem=False, change_tag='Change', percent_change_tag='% Change', change_mem_tag='Change (mem)', percent_change_mem_tag='% Change (mem)', mem_fmt='%d ko'): # We first get the names of all of the compiled files: all files # that were compiled either before or after. all_names_dict = dict() all_names_dict.update(right_dict) all_names_dict.update(left_dict) # do the left (after) last, so that we give precedence to those ones if len(all_names_dict.keys()) == 0: return 'No timing data' get_time = (lambda d, name: to_seconds(d.get(name, {}).get(TIME_KEY, '0m0.0s'))) prediff_times = tuple((name, get_time(left_dict, name), get_time(right_dict, name)) for name in all_names_dict.keys()) diff_times_dict = dict((name, from_seconds(lseconds - rseconds, signed=True)) for name, lseconds, rseconds in prediff_times) percent_diff_times_dict = dict((name, ((format_percentage((lseconds - rseconds) / rseconds)) if rseconds != 0 else (INFINITY if lseconds > 0 else 'N/A'))) for name, lseconds, rseconds in prediff_times) get_mem = (lambda d, name: d.get(name, {}).get(MEM_KEY, 0)) prediff_mems = tuple((name, get_mem(left_dict, name), get_mem(right_dict, name)) for name in all_names_dict.keys()) diff_mems_dict = dict((name, lmem - rmem) for name, lmem, rmem in prediff_mems) percent_diff_mems_dict = dict((name, ((format_percentage((lmem - rmem) / float(rmem))) if rmem != 0 else (INFINITY if lmem > 0 else 'N/A'))) for name, lmem, rmem in prediff_mems) # update to sort by approximate difference, first if sort_by_mem: get_prekey = (lambda name: diff_mems_dict[name]) else: get_prekey = (lambda name: to_seconds(diff_times_dict[name])) get_key_abs = make_sorting_key(all_names_dict, descending=descending, sort_by_mem=sort_by_mem) get_key_diff_float = (lambda name: fix_sign_for_sorting(get_prekey(name), descending=descending)) get_key_diff_absint = (lambda name: fix_sign_for_sorting(int(abs(get_prekey(name))), descending=descending)) get_key_with_name = (lambda get_key: lambda name: (get_key(name), name)) if sort_by == 'absolute': get_key = get_key_with_name(get_key_abs) elif sort_by == 'diff': get_key = get_key_with_name(get_key_diff_float) else: # sort_by == 'auto' get_key = get_key_with_name((lambda name: (get_key_diff_absint(name), get_key_abs(name)))) names = sorted(all_names_dict.keys(), key=get_key) #names = get_sorted_file_list_from_stats_dict(all_names_dict, descending=descending) # set the widths of each of the columns by the longest thing to go in that column left_sum = sum_times(v[TIME_KEY] for v in left_dict.values() if TIME_KEY in v.keys()) right_sum = sum_times(v[TIME_KEY] for v in right_dict.values() if TIME_KEY in v.keys()) left_sum_float = sum(sorted(to_seconds(v[TIME_KEY]) for v in left_dict.values() if TIME_KEY in v.keys())) right_sum_float = sum(sorted(to_seconds(v[TIME_KEY]) for v in right_dict.values() if TIME_KEY in v.keys())) diff_sum = from_seconds(left_sum_float - right_sum_float, signed=True) percent_diff_sum = (format_percentage((left_sum_float - right_sum_float) / right_sum_float) if right_sum_float > 0 else 'N/A') left_width = max(max(map(len, ['N/A', left_tag] + [v[TIME_KEY] for v in left_dict.values() if TIME_KEY in v.keys()])), len(left_sum)) right_width = max(max(map(len, ['N/A', right_tag] + [v[TIME_KEY] for v in right_dict.values() if TIME_KEY in v.keys()])), len(right_sum)) far_right_width = max(max(map(len, ['N/A', change_tag] + list(diff_times_dict.values()))), len(diff_sum)) far_far_right_width = max(max(map(len, ['N/A', percent_change_tag] + list(percent_diff_times_dict.values()))), len(percent_diff_sum)) total_string = 'Total' if not include_mem else 'Total Time / Peak Mem' middle_width = max(map(len, names + [tag, total_string])) left_peak = max([0] + [v.get(MEM_KEY, 0) for v in left_dict.values()]) right_peak = max([0] + [v.get(MEM_KEY, 0) for v in right_dict.values()]) diff_peak = left_peak - right_peak percent_diff_peak = (format_percentage((left_peak - right_peak) / float(right_peak)) if right_peak != 0 else (INFINITY if left_peak > 0 else 'N/A')) left_mem_width = max(max(map(len, ['N/A', left_mem_tag] + [mem_fmt % v.get(MEM_KEY, 0) for v in left_dict.values()])), len(mem_fmt % left_peak)) right_mem_width = max(max(map(len, ['N/A', right_mem_tag] + [mem_fmt % v.get(MEM_KEY, 0) for v in right_dict.values()])), len(mem_fmt % right_peak)) far_right_mem_width = max(max(map(len, ['N/A', change_mem_tag] + [mem_fmt % v for v in diff_mems_dict.values()])), len(mem_fmt % diff_peak)) far_far_right_mem_width = max(max(map(len, ['N/A', percent_change_mem_tag] + list(percent_diff_mems_dict.values()))), len(percent_diff_peak)) if include_mem: format_string = ("%%(left)%ds | %%(left_mem)%ds | %%(middle)-%ds | %%(right)%ds | %%(right_mem)%ds || %%(far_right)%ds || %%(far_right_mem)%ds" % (left_width, left_mem_width, middle_width, right_width, right_mem_width, far_right_width, far_right_mem_width)) else: format_string = ("%%(left)%ds | %%(middle)-%ds | %%(right)%ds || %%(far_right)%ds" % (left_width, middle_width, right_width, far_right_width)) if with_percent: format_string += " | %%(far_far_right)%ds" % far_far_right_width if include_mem: format_string += " | %%(far_far_right_mem)%ds" % far_far_right_mem_width header = format_string % {'left': left_tag, 'left_mem': left_mem_tag, 'middle': tag, 'right': right_tag, 'right_mem': right_mem_tag, 'far_right': change_tag, 'far_right_mem': change_mem_tag, 'far_far_right': percent_change_tag, 'far_far_right_mem': percent_change_mem_tag} total = format_string % {'left': left_sum, 'left_mem': mem_fmt % left_peak, 'middle': total_string, 'right': right_sum, 'right_mem': mem_fmt % right_peak, 'far_right': diff_sum, 'far_right_mem': mem_fmt % diff_peak, 'far_far_right': percent_diff_sum, 'far_far_right_mem': percent_diff_peak} # separator to go between headers and body sep = '-' * len(header) # the representation of the default value (0), to get replaced by N/A left_rep, right_rep, far_right_rep, far_far_right_rep = ("%%%ds | " % left_width) % 'N/A', (" | %%%ds |" % right_width) % 'N/A', ("|| %%%ds" % far_right_width) % 'N/A', ("| %%%ds" % far_far_right_width) % 'N/A' left_mem_rep, right_mem_rep, far_right_mem_rep, far_far_right_mem_rep = ("%%%ds | " % left_mem_width) % 'N/A', (" | %%%ds |" % right_mem_width) % 'N/A', ("|| %%%ds" % far_right_mem_width) % 'N/A', ("| %%%ds" % far_far_right_mem_width) % 'N/A' get_formatted_mem = (lambda k, v: (mem_fmt % v[k]) if k in v.keys() else 'N/A') return '\n'.join([header, sep, total, sep] + [format_string % {'left': left_dict.get(name, {}).get(TIME_KEY, 'N/A'), 'left_mem': get_formatted_mem(MEM_KEY, left_dict.get(name, {})), 'middle': name, 'right': right_dict.get(name, {}).get(TIME_KEY, 'N/A'), 'right_mem': get_formatted_mem(MEM_KEY, right_dict.get(name, {})), 'far_right': diff_times_dict.get(name, 'N/A'), 'far_right_mem': get_formatted_mem(name, diff_mems_dict), 'far_far_right': percent_diff_times_dict.get(name, 'N/A'), 'far_far_right_mem': percent_diff_mems_dict.get(name, 'N/A')} for name in names]).replace(left_rep, 'N/A'.center(len(left_rep) - 3) + ' | ').replace(right_rep, ' | ' + 'N/A'.center(len(right_rep) - 5) + ' |').replace(far_right_rep, '|| ' + 'N/A'.center(len(far_right_rep) - 3)).replace(far_far_right_rep, '| ' + 'N/A'.center(len(far_far_right_rep) - 2)).replace(left_mem_rep, 'N/A'.center(len(left_mem_rep) - 3) + ' | ').replace(right_mem_rep, ' | ' + 'N/A'.center(len(right_mem_rep) - 5) + ' |').replace(far_right_mem_rep, '|| ' + 'N/A'.center(len(far_right_mem_rep) - 3)).replace(far_far_right_mem_rep, '| ' + 'N/A'.center(len(far_far_right_mem_rep) - 2)) def make_table_string(stats_dict, descending=True, sort_by_mem=False, tag="Time", mem_tag="Peak Mem", mem_fmt='%d ko', include_mem=False): if len(stats_dict.keys()) == 0: return 'No timing data' # We first get the names of all of the compiled files, sorted by # duration names = get_sorted_file_list_from_stats_dict(stats_dict, descending=descending, sort_by_mem=sort_by_mem) # compute the widths of the columns times_width = max(len('N/A'), len(tag), max(len(v[TIME_KEY]) for v in stats_dict.values() if TIME_KEY in v.keys()), len(sum_times(v[TIME_KEY] for v in stats_dict.values() if TIME_KEY in v.keys()))) mems_width = max(len('N/A'), len(mem_tag), max(len(mem_fmt % v.get(MEM_KEY, 0)) for v in stats_dict.values()), len(mem_fmt % (max(v.get(MEM_KEY, 0) for v in stats_dict.values())))) total_string = 'Total' if not include_mem else 'Total Time / Peak Mem' names_width = max(map(len, names + ["File Name", total_string])) if include_mem: format_string = "%%(time)%ds | %%(mem)%ds | %%(name)-%ds" % (times_width, mems_width, names_width) else: format_string = "%%(time)%ds | %%(name)-%ds" % (times_width, names_width) get_formatted_mem = (lambda k, v: (mem_fmt % v[k]) if k in v.keys() else 'N/A') header = format_string % {'time': tag, 'mem': mem_tag, 'name': 'File Name'} total = format_string % {'time': sum_times(v[TIME_KEY] for v in stats_dict.values() if TIME_KEY in v.keys()), 'mem': ((mem_fmt % max(v[MEM_KEY] for v in stats_dict.values() if MEM_KEY in v.keys())) if any(MEM_KEY in v.keys() for v in stats_dict.values()) else 'N/A'), 'name': total_string} sep = '-' * len(header) return '\n'.join([header, sep, total, sep] + [format_string % {'time': stats_dict[name].get(TIME_KEY, 'N/A'), 'mem': get_formatted_mem(MEM_KEY, stats_dict[name]), 'name': name} for name in names]) def print_or_write_table(table, files): if table[-1] != '\n': table += '\n' if len(files) == 0 or '-' in files: if hasattr(sys.stdout, 'buffer'): sys.stdout.buffer.write(table.encode("utf-8")) else: sys.stdout.write(table.encode("utf-8")) for file_name in files: if file_name != '-': with open(file_name, 'w', encoding="utf-8") as f: f.write(table) coq-8.15.0/tools/beautify-archive000077500000000000000000000040431417001151100166740ustar00rootroot00000000000000#!/bin/sh #This script compiles and beautifies an archive, check the correctness #of beautified files, then replace the original files by the #beautified ones, keeping a copy of original files in $OLDARCHIVE. #The script assumes: #- that the archive provides a Makefile built by coq_makefile, #- that coqc is in the path or that variables COQTOP and COQBIN are set. OLDARCHIVE=old_files NEWARCHIVE=beautify_files BEAUTIFYSUFFIX=.beautified if [ -e $OLDARCHIVE ]; then echo "Warning: $OLDARCHIVE directory found, the files are maybe already beautified"; sleep 5; fi echo ---- Producing beautified files in the beautification directory ------- if [ -e $NEWARCHIVE ]; then rm -r $NEWARCHIVE; fi if [ -e /tmp/$OLDARCHIVE.$$ ]; then rm -r /tmp/$OLDARCHIVE.$$; fi cp -pr . /tmp/$OLDARCHIVE.$$ cp -pr /tmp/$OLDARCHIVE.$$ $NEWARCHIVE cd $NEWARCHIVE rm description || true make clean make COQFLAGS='-beautify -q $(OPT) $(COQLIBS) $(OTHERFLAGS)' || \ { echo ---- Failed to beautify; exit 1; } echo -------- Upgrading files in the beautification directory -------------- beaufiles=`find . -name \*.v$BEAUTIFYSUFFIX` for i in $beaufiles; do j=`dirname $i`/`basename $i .v$BEAUTIFYSUFFIX`.v echo Upgrading $j in the beautification directory if [ $i -nt $j ]; then mv -f $i $j; fi done echo ---- Recompiling beautified files in the beautification directory ----- make clean make || { echo ---- Failed to recompile; exit 1; } echo ----- Saving old files in directory $OLDARCHIVE ------------------------- /bin/rm -r ../$OLDARCHIVE mv /tmp/$OLDARCHIVE.$$ ../$OLDARCHIVE echo Saving $OLDARCHIVE files done echo --------- Upgrading files in current directory ------------------------ vfiles=`find . -name \*.v` cd .. for i in $vfiles; do echo Upgrading $i in current directory if [ $NEWARCHIVE/$i -nt $i ]; then mv -f $NEWARCHIVE/$i $i; fi done echo -------- Beautification completed ------------------------------------- echo Old files are in directory '"'$OLDARCHIVE'"' echo New files are in current directory echo You can now remove the beautification directory '"'$NEWARCHIVE'"' coq-8.15.0/tools/configure/000077500000000000000000000000001417001151100154775ustar00rootroot00000000000000coq-8.15.0/tools/configure/cmdArgs.ml000066400000000000000000000155631417001151100174230ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* devel | s -> raise (Arg.Bad ("profile name expected instead of "^s)) let doc = " Sets a bunch of flags. Supported profiles: devel = " ^ devel_doc end let get_bool = function | "true" | "yes" | "y" | "all" -> true | "false" | "no" | "n" -> false | s -> raise (Arg.Bad ("boolean argument expected instead of "^s)) let get_ide = function | "opt" -> Opt | "byte" -> Byte | "no" -> No | s -> raise (Arg.Bad ("(opt|byte|no) argument expected instead of "^s)) let get_native = function | "yes" -> NativeYes | "no" -> NativeNo | "ondemand" -> NativeOndemand | s -> raise (Arg.Bad ("(yes|no|ondemand) argument expected instead of "^s)) let prefs = ref Profiles.default let arg_bool f = Arg.String (fun s -> prefs := f !prefs (get_bool s)) let arg_string f = Arg.String (fun s -> prefs := f !prefs s) let arg_string_option f = Arg.String (fun s -> prefs := f !prefs (Some s)) let arg_set f = Arg.Unit (fun () -> prefs := f !prefs) let arg_ide f = Arg.String (fun s -> prefs := f !prefs (Some (get_ide s))) let arg_native f = Arg.String (fun s -> prefs := f !prefs (get_native s)) let arg_profile = Arg.String (fun s -> prefs := Profiles.get s !prefs) (* TODO : earlier any option -foo was also available as --foo *) let check_absolute = function | None -> () | Some path -> if Filename.is_relative path then die "argument to -prefix must be an absolute path" else () let args_options = Arg.align [ "-prefix", arg_string_option (fun p prefix -> check_absolute prefix; { p with prefix }), "

Set installation directory to (absolute path required)"; "-no-ask", arg_set (fun p -> { p with interactive = false }), " Don't ask questions / print variables during configure [questions will be filled with defaults]"; "-libdir", arg_string_option (fun p libdir -> { p with libdir }), " Where to install lib files"; "-configdir", arg_string_option (fun p configdir -> { p with configdir }), " Where to install config files"; "-datadir", arg_string_option (fun p datadir -> { p with datadir }), " Where to install data files"; "-mandir", arg_string_option (fun p mandir -> { p with mandir }), " Where to install man files"; "-docdir", arg_string_option (fun p docdir -> { p with docdir }), " Where to install doc files"; "-arch", arg_string_option (fun p arch -> { p with arch }), " Specifies the architecture"; "-natdynlink", arg_bool (fun p natdynlink -> { p with natdynlink }), "(yes|no) Use dynamic loading of native code or not"; "-coqide", arg_ide (fun p coqide -> { p with coqide }), "(opt|byte|no) Specifies whether or not to compile CoqIDE"; "-nomacintegration", arg_set (fun p -> { p with macintegration = false}), " Do not try to build CoqIDE MacOS integration"; "-browser", arg_string_option (fun p browser -> { p with browser }), " Use to open URL %s"; "-with-doc", arg_bool (fun p withdoc -> { p with withdoc }), "(yes|no) Compile the documentation or not"; "-byte-only", arg_set (fun p -> { p with byteonly = true }), " Compiles only bytecode version of Coq"; "-annot", arg_set (fun p -> { p with annot = true }), " Dumps ml text annotation files while compiling Coq (e.g. for Tuareg)"; "-bin-annot", arg_set (fun p -> { p with bin_annot = true }), " Dumps ml binary annotation files while compiling Coq (e.g. for Merlin)"; "-bytecode-compiler", arg_bool (fun p bytecodecompiler -> { p with bytecodecompiler }), "(yes|no) Enable Coq's bytecode reduction machine (VM)"; "-native-compiler", arg_native (fun p nativecompiler -> { p with nativecompiler }), "(yes|no|ondemand) Compilation to native code for conversion and normalization yes: -native-compiler option of coqc will default to 'yes', stdlib will be precompiled no: no native compilation available at all ondemand (default): -native-compiler option of coqc will default to 'ondemand', stdlib will not be precompiled"; "-coqwebsite", arg_string (fun p coqwebsite -> { p with coqwebsite }), " URL of the coq website"; "-warn-error", arg_bool (fun p warn_error -> { p with warn_error }), "(yes|no) Make OCaml warnings into errors (default no)"; "-profile", arg_profile, Profiles.doc; "-debug", arg_set (fun p -> { p with debug = true }), " Enable debug information for package detection" ] let parse_args () = Arg.parse args_options (fun s -> raise (Arg.Bad ("Unknown option: "^s))) "Available options for configure are:"; !prefs (* Support don't ask *) let cprintf prefs x = if prefs.interactive then cprintf x else Printf.ifprintf stdout x coq-8.15.0/tools/configure/cmdArgs.mli000066400000000000000000000050501417001151100175620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Prefs.t val cprintf : Prefs.t -> ('a, out_channel, unit, unit) format4 -> 'a coq-8.15.0/tools/configure/configure.ml000066400000000000000000000611251417001151100200170ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* b | None when arch_is_win32 arch -> "start %s" | None when arch = "Darwin" -> "open %s" | _ -> "firefox -remote \"OpenURL(%s,new-tab)\" || firefox %s &" (** * OCaml programs *) module CamlConf = struct type t = { camlbin : string ; caml_version : string ; camllib : string ; findlib_version : string } end let resolve_caml prefs = let () = try reset_caml_find camlexec (which camlexec.find) with Not_found -> die (sprintf "Error: cannot find '%s' in your path!\n" camlexec.find ^ "Please adjust your path or use the -ocamlfind option of ./configure") in if not (is_executable camlexec.find) then die ("Error: cannot find the executable '"^camlexec.find^"'.") else let findlib_version, _ = run camlexec.find ["query"; "findlib"; "-format"; "%v"] in let caml_version, _ = run camlexec.find ["ocamlc";"-version"] in let camllib, _ = run camlexec.find ["printconf";"stdlib"] in let camlbin = (* TODO beurk beurk beurk *) Filename.dirname (Filename.dirname camllib) / "bin/" in { CamlConf.camlbin; caml_version; camllib; findlib_version } (** Caml version as a list of ints [4;0;1] *) let caml_version_nums { CamlConf.caml_version } = generic_version_nums ~name:"the OCaml compiler" caml_version let check_caml_version prefs caml_version caml_version_nums = if caml_version_nums >= [4;5;0] then cprintf prefs "You have OCaml %s. Good!" caml_version else let () = cprintf prefs "Your version of OCaml is %s." caml_version in die "You need OCaml 4.05.0 or later." let check_findlib_version prefs { CamlConf.findlib_version } = let findlib_version_nums = generic_version_nums ~name:"findlib" findlib_version in if findlib_version_nums >= [1;8;0] then cprintf prefs "You have OCamlfind %s. Good!" findlib_version else let () = cprintf prefs "Your version of OCamlfind is %s." findlib_version in die "You need OCamlfind 1.8.0 or later." (** Note, these warnings are only used in Coq Makefile *) (** Explanation of enabled/disabled warnings: 4: fragile pattern matching: too common in the code and too annoying to avoid in general 9: missing fields in a record pattern: too common in the code and not worth the bother 27: innocuous unused variable: innocuous 41: ambiguous constructor or label: too common 42: disambiguated counstructor or label: too common 44: "open" shadowing already defined identifier: too common, especially when some are aliases 45: "open" shadowing a label or constructor: see 44 48: implicit elimination of optional arguments: too common 58: "no cmx file was found in path": See https://github.com/ocaml/num/issues/9 67: "unused functor parameter" seems totally bogus 68: "This pattern depends on mutable state" no idea what it means, dune builds don't display it 70: ".ml file without .mli file" bogus warning when used generally *) (* Note, we list all warnings to be complete *) let coq_warnings = "-w -a+1..3-4+5..8-9+10..26-27+28..40-41-42+43-44-45+46..47-48+49..57-58+59..66-67-68+69-70" let coq_warn_error prefs = if prefs.warn_error then "-warn-error +a" else "" (* Flags used to compile Coq and plugins (via coq_makefile) *) let caml_flags coq_annot_flag coq_bin_annot_flag = Printf.sprintf "-thread -rectypes %s %s %s %s %s" coq_warnings coq_annot_flag coq_bin_annot_flag coq_safe_string coq_strict_sequence (* Flags used to compile Coq but _not_ plugins (via coq_makefile) *) let coq_caml_flags = coq_warn_error (** * Native compiler *) let msg_byteonly = "Only the bytecode version of Coq will be available." let msg_no_ocamlopt () = warn "Cannot find the OCaml native-code compiler.\n%s" msg_byteonly let msg_no_dynlink_cmxa prefs = warn "Cannot find native-code dynlink library.\n%s" msg_byteonly; cprintf prefs "For building a native-code Coq, you may try to first"; cprintf prefs "compile and install a dummy dynlink.cmxa (see dev/dynlink.ml)"; cprintf prefs "and then run ./configure -natdynlink no" let check_native prefs camlenv = let () = if prefs.byteonly then raise Not_found in let version, _ = tryrun camlexec.find ["opt";"-version"] in if version = "" then let () = msg_no_ocamlopt () in raise Not_found else if fst (tryrun camlexec.find ["query";"dynlink"]) = "" then let () = msg_no_dynlink_cmxa prefs in raise Not_found else let () = let { CamlConf.caml_version } = camlenv in if version <> caml_version then warn "Native and bytecode compilers do not have the same version!" in cprintf prefs "You have native-code compilation. Good!" let best_compiler prefs camlenv = try check_native prefs camlenv; "opt" with Not_found -> "byte" (** * Native dynlink *) let hasnatdynlink prefs best_compiler = prefs.natdynlink && best_compiler = "opt" (** * OS dependent libraries *) (** Check for dune *) let dune_install_warning () = warn "You are using Dune < 2.9, the install procedure will not respect the -docdir and -configdir configure directives; please see dev/doc/INSTALL.make.md for more information" (* returns true if dune >= 2.9 *) let check_for_dune_29 () = let dune_version, _ = tryrun "dune" ["--version"] in let dune_version = generic_version_nums ~name:"dune" dune_version in match dune_version with (* Development version, consider it >= 2.9 *) | [] -> true | _ -> if dune_version < [2;9;0] then (dune_install_warning (); false) else true (** Zarith library *) let check_for_zarith prefs = let zarith,_ = tryrun camlexec.find ["query";"zarith"] in let zarith_cmai base = Sys.file_exists (base / "z.cmi") && Sys.file_exists (base / "zarith.cma") in let zarith_version, _ = run camlexec.find ["query"; "zarith"; "-format"; "%v"] in match zarith with | "" -> die "Zarith library not installed, required" | _ when not (zarith_cmai zarith) -> die "Zarith library installed but no development files found (try installing the -dev package)" | _ -> let zarith_version_int = generic_version_nums ~name:"Zarith" zarith_version in if zarith_version_int >= [1;10;0] then cprintf prefs "You have the Zarith library %s installed. Good!" zarith_version else die ("Zarith version 1.10 is required, you have " ^ zarith_version) (** * Documentation : do we have latex, hevea, ... *) let check_sphinx_deps () = ignore (run (which "python3") ["doc/tools/coqrst/checkdeps.py"]) let check_doc () = let err s = die (sprintf "A documentation build was requested, but %s was not found." s); in if not (program_in_path "python3") then err "python3"; if not (program_in_path "sphinx-build") then err "sphinx-build"; check_sphinx_deps () (** * Installation directories : bindir, libdir, mandir, docdir, etc *) (* Source code root *) let coqsrc = Sys.getcwd () let unix arch = os_type_cygwin || not (arch_is_win32 arch) (** Variable name, description, ref in prefs, default dir, prefix-relative *) type path_style = | Absolute of string (* Should start with a "/" *) | Relative of string (* Should not start with a "/" *) module InstallDir = struct type t = { var : string (** Makefile variable to write *) ; msg : string (** Description of the directory *) ; uservalue : string option (** Value given explictly by the user *) ; selfcontainedlayout : path_style (** Path style when layout is "local" *) ; unixlayout : path_style (** Path style for installation *) } let make var msg uservalue selfcontainedlayout unixlayout = { var; msg; uservalue; selfcontainedlayout; unixlayout } end let install prefs = [ InstallDir.make "COQPREFIX" "Coq" prefs.prefix (Relative "") (Relative "") ; InstallDir.make "COQLIBINSTALL" "the Coq library" prefs.libdir (Relative "lib") (Relative "lib/coq") ; InstallDir.make "CONFIGDIR" "the Coqide configuration files" prefs.configdir (Relative "config") (Absolute "/etc/xdg/coq") ; InstallDir.make "DATADIR" "the Coqide data files" prefs.datadir (Relative "share") (Relative "share/coq") ; InstallDir.make "MANDIR" "the Coq man pages" prefs.mandir (Relative "man") (Relative "share/man") ; InstallDir.make "DOCDIR" "documentation prefix path for all Coq packages" prefs.docdir (Relative "doc") (Relative "share/doc") ] let strip_trailing_slash_if_any p = if p.[String.length p - 1] = '/' then String.sub p 0 (String.length p - 1) else p let use_suffix prefix = function | Relative "" -> prefix | Relative suff -> prefix ^ "/" ^ suff | Absolute path -> path let relativize = function (* Turn a global layout based on some prefix to a relative layout *) | Relative _ as suffix -> suffix | Absolute path -> Relative (String.sub path 1 (String.length path - 1)) let find_suffix prefix path = match prefix with | None -> Absolute path | Some p -> let p = strip_trailing_slash_if_any p in let lpath = String.length path in let lp = String.length p in if lpath > lp && String.sub path 0 lp = p then Relative (String.sub path (lp+1) (lpath - lp - 1)) else Absolute path (* This computes the actual effective path for an install directory, based on the given prefix; if prefix is absent, it is assumed that the profile is "local" *) let do_one_instdir ~prefix ~arch InstallDir.{var; msg; uservalue; selfcontainedlayout; unixlayout} = (var,msg), match uservalue, prefix with | Some d, p -> d, find_suffix p d | None, Some p -> let suffix = if (arch_is_win32 arch) then selfcontainedlayout else relativize unixlayout in use_suffix p suffix, suffix | None, None -> let suffix = if (unix arch) then unixlayout else selfcontainedlayout in let base = if (unix arch) then "/usr/local" else "C:/coq" in let dflt = use_suffix base suffix in let () = printf "Where should I install %s [%s]? " msg dflt in let line = read_line () in if line = "" then (dflt,suffix) else (line,find_suffix None line) let install_dirs prefs arch = let prefix = match prefs.prefix with | None -> begin try Some (Sys.getenv "COQ_CONFIGURE_PREFIX") with | Not_found when prefs.interactive -> None | Not_found -> Some Sys.(getcwd () ^ "/../install/default") end | p -> p in List.map (do_one_instdir ~prefix ~arch) (install prefs) let select var install_dirs = List.find (fun ((v,_),_) -> v=var) install_dirs |> snd module CoqEnv = struct (** Coq core paths, for libraries, documentation, configuration, and data *) type t = { coqlib : string ; coqlibsuffix : path_style ; docdir : string ; docdirsuffix : path_style ; configdir : string ; configdirsuffix : path_style ; datadir : string ; datadirsuffix : path_style } end let resolve_coqenv install_dirs = let coqlib, coqlibsuffix = select "COQLIBINSTALL" install_dirs in let docdir, docdirsuffix = select "DOCDIR" install_dirs in let configdir, configdirsuffix = select "CONFIGDIR" install_dirs in let datadir,datadirsuffix = select "DATADIR" install_dirs in { CoqEnv.coqlib; coqlibsuffix; docdir; docdirsuffix ; configdir; configdirsuffix; datadir; datadirsuffix } (** * CC runtime flags *) (* Note that Coq's VM requires at least C99-compliant floating-point arithmetic; this should be ensured by OCaml's own C flags, which set a minimum of [--std=gnu99] ; modern compilers by default assume C11 or later, so no explicit [--std=] flags are added by OCaml *) let cflags_dflt = "-Wall -Wno-unused -g -O2" let cflags_sse2 = "-msse2 -mfpmath=sse" (* cflags, sse2_math = *) let compute_cflags () = let _, slurp = (* Test SSE2_MATH support *) tryrun camlexec.find ["ocamlc"; "-ccopt"; cflags_dflt ^ " -march=native -dM -E " ^ cflags_sse2; "-c"; coqsrc/"dev/header.c"] in (* any file *) if List.exists (fun line -> starts_with line "#define __SSE2_MATH__ 1") slurp then (cflags_dflt ^ " " ^ cflags_sse2, true) else (cflags_dflt, false) (** Test at configure time that no harmful double rounding seems to be performed with an intermediate 80-bit representation (x87). If this test fails but SSE2_MATH is available, the build can go further as Coq's primitive floats will use it through a dedicated external C implementation (instead of relying on OCaml operations) If this test fails and SSE2_MATH is not available, abort. *) let check_fmath sse2_math = let add = (+.) in let b = ldexp 1. 53 in let s = add 1. (ldexp 1. (-52)) in if (add b s <= b || add b 1. <> b || ldexp 1. (-1074) <= 0.) && not sse2_math then die "Detected non IEEE-754 compliant architecture (or wrong \ rounding mode). Use of Float is thus unsafe." let esc s = if String.contains s ' ' then "\"" ^ s ^ "\"" else s (** * Summary of the configuration *) let pr_native = function | NativeYes -> "yes" | NativeNo -> "no" | NativeOndemand -> "ondemand" let print_summary prefs arch camlenv best_compiler install_dirs coqide lablgtkdir hasnatdynlink idearchdef browser = let { CamlConf.caml_version; camlbin; camllib } = camlenv in let pr s = printf s in pr "\n"; pr " Architecture : %s\n" arch; pr " Sys.os_type : %s\n" Sys.os_type; pr " OCaml version : %s\n" caml_version; pr " OCaml binaries in : %s\n" (esc camlbin); pr " OCaml library in : %s\n" (esc camllib); if best_compiler = "opt" then pr " Native dynamic link support : %B\n" hasnatdynlink; if coqide <> "no" then pr " Lablgtk3 library in : %s\n" (esc lablgtkdir); if idearchdef = "QUARTZ" then pr " Mac OS integration is on\n"; pr " CoqIDE : %s\n" coqide; pr " Documentation : %s\n" (if prefs.withdoc then "All" else "None"); pr " Web browser : %s\n" browser; pr " Coq web site : %s\n" prefs.coqwebsite; pr " Bytecode VM enabled : %B\n" prefs.bytecodecompiler; pr " Native Compiler enabled : %s\n\n" (pr_native prefs.nativecompiler); (pr " Paths for true installation:\n"; List.iter (fun ((_,msg),(dir,_)) -> pr " - %s will be copied in %s\n" msg (esc dir)) install_dirs); pr "\n"; pr "If anything is wrong above, please restart './configure'.\n\n"; pr "*Warning* To compile the system for a new architecture\n"; pr " don't forget to do a 'make clean' before './configure'.\n" (** * Build the dev/ocamldebug-coq file *) let write_dbg_wrapper camlenv o = let { CamlConf.camlbin } = camlenv in let pr s = fprintf o s in pr "#!/bin/sh\n\n"; pr "###### ocamldebug-coq : a wrapper around ocamldebug for Coq ######\n\n"; pr "# DO NOT EDIT THIS FILE: automatically generated by ../configure #\n\n"; pr "export COQTOP=%S\n" coqsrc; pr "OCAMLDEBUG=%S\n" (camlbin^"/ocamldebug"); pr ". $COQTOP/dev/ocamldebug-coq.run\n" (** * Build the config/coq_config.ml file *) let write_configml camlenv coqenv caml_flags caml_version_nums arch arch_is_win32 hasnatdynlink browser idearchdef prefs o = let { CoqEnv.coqlib; coqlibsuffix; configdir; configdirsuffix; docdir; docdirsuffix; datadir; datadirsuffix } = coqenv in let { CamlConf.caml_version } = camlenv in let pr s = fprintf o s in let pr_s = pr "let %s = %S\n" in let pr_b = pr "let %s = %B\n" in let pr_i32 = pr "let %s = %dl\n" in let pr_p s o = pr "let %s = %S\n" s (match o with Relative s -> s | Absolute s -> s) in let pr_li n l = pr "let %s = [%s]\n" n (String.concat ";" (List.map string_of_int l)) in pr "(* DO NOT EDIT THIS FILE: automatically generated by ../configure *)\n"; pr "(* Exact command that generated this file: *)\n"; pr "(* %s *)\n\n" (String.concat " " (Array.to_list Sys.argv)); pr_s "coqlib" coqlib; pr_s "configdir" configdir; pr_s "datadir" datadir; pr_s "docdir" docdir; pr_p "coqlibsuffix" coqlibsuffix; pr_p "configdirsuffix" configdirsuffix; pr_p "datadirsuffix" datadirsuffix; pr_p "docdirsuffix" docdirsuffix; pr_s "ocamlfind" camlexec.find; pr_s "caml_flags" caml_flags; pr_s "version" coq_version; pr_s "caml_version" caml_version; pr_li "caml_version_nums" caml_version_nums; pr_s "arch" arch; pr_b "arch_is_win32" arch_is_win32; pr_s "exec_extension" !exe; pr "let gtk_platform = `%s\n" idearchdef; pr_b "has_natdynlink" hasnatdynlink; pr_i32 "vo_version" vo_magic; pr_s "browser" browser; pr_s "wwwcoq" prefs.coqwebsite; pr_s "wwwbugtracker" (prefs.coqwebsite ^ "bugs/"); pr_s "wwwrefman" (prefs.coqwebsite ^ "distrib/V" ^ coq_version ^ "/refman/"); pr_s "wwwstdlib" (prefs.coqwebsite ^ "distrib/V" ^ coq_version ^ "/stdlib/"); pr_s "localwwwrefman" ("file:/" ^ docdir ^ "/html/refman"); pr_b "bytecode_compiler" prefs.bytecodecompiler; pr "type native_compiler = NativeOff | NativeOn of { ondemand : bool }\n"; pr "let native_compiler = %s\n" (match prefs.nativecompiler with | NativeYes -> "NativeOn {ondemand=false}" | NativeNo -> "NativeOff" | NativeOndemand -> "NativeOn {ondemand=true}"); let core_src_dirs = [ "boot"; "config"; "lib"; "clib"; "kernel"; "library"; "engine"; "pretyping"; "interp"; "gramlib"; "parsing"; "proofs"; "tactics"; "toplevel"; "printing"; "ide"; "stm"; "vernac" ] in let core_src_dirs = List.fold_left (fun acc core_src_subdir -> acc ^ " \"" ^ core_src_subdir ^ "\";\n") "" core_src_dirs in pr "\nlet core_src_dirs = [\n%s]\n" core_src_dirs; pr "\nlet plugins_dirs = [\n"; let plugins = match open_in "config/plugin_list" with | exception Sys_error _ -> let plugins = try Sys.readdir "plugins" with _ -> [||] in Array.sort compare plugins; plugins | ch -> Array.of_list (snd (read_lines_and_close ch)) in Array.iter (fun f -> let f' = "plugins/"^f in if Sys.is_directory f' && f.[0] <> '.' then pr " %S;\n" f') plugins; pr "]\n"; pr "\nlet all_src_dirs = core_src_dirs @ plugins_dirs\n" (** * Build the config/Makefile file *) let write_makefile prefs install_dirs best_compiler caml_flags coq_caml_flags coqide arch exe dune_29 o = let pr s = fprintf o s in pr "###### config/Makefile : Configuration file for Coq ##############\n"; pr "# #\n"; pr "# This file is generated by the script \"configure\" #\n"; pr "# DO NOT EDIT IT !! DO NOT EDIT IT !! DO NOT EDIT IT !! #\n"; pr "# If something is wrong below, then rerun the script \"configure\" #\n"; pr "# with the good options (see the file INSTALL). #\n"; pr "# #\n"; pr "##################################################################\n\n"; pr "#Variable used to detect whether ./configure has run successfully.\n"; pr "COQ_CONFIGURED=yes\n\n"; pr "# Paths for true installation\n"; List.iter (fun ((v,msg),_) -> pr "# %s: path for %s\n" v msg) install_dirs; List.iter (fun ((v,_),(dir,_)) -> pr "%s=%S\n" v dir) install_dirs; pr "\n# Coq version\n"; pr "VERSION=%s\n" coq_version; pr "# The best compiler: native (=opt) or bytecode (=byte)\n"; pr "BEST=%s\n\n" best_compiler; (* Only used in the test suite: OCAMLFIND CAMLFLAGS *) pr "# Findlib command\n"; pr "OCAMLFIND=%S\n" camlexec.find; pr "# Caml flags\n"; pr "CAMLFLAGS=%s %s\n" caml_flags coq_caml_flags; pr "# Your architecture\n"; pr "# Can be obtain by UNIX command arch\n"; pr "ARCH=%s\n" arch; pr "# executable files extension, currently:\n"; pr "# Unix systems:\n"; pr "# Win32 systems : .exe\n"; pr "EXE=%s\n" exe; pr "# the command MKDIR (try to use mkdirhier if you have problems)\n"; pr "MKDIR=mkdir -p\n\n"; pr "# CoqIDE (no/byte/opt)\n"; pr "HASCOQIDE=%s\n" coqide; pr "# Defining REVISION\n"; pr "# Option to control compilation and installation of the documentation\n"; pr "WITHDOC=%s\n\n" (if prefs.withdoc then "all" else "no"); pr "# Option to produce precompiled files for native_compute\n"; pr "NATIVECOMPUTE=%s\n" (if prefs.nativecompiler = NativeYes then "-native-compiler yes" else ""); pr "COQWARNERROR=%s\n" (if prefs.warn_error then "-w +default" else ""); pr "CONFIGURE_DPROFILE=%s\n" prefs.dune_profile; pr "COQ_INSTALL_ENABLED=%b\n" prefs.install_enabled; if dune_29 then pr "DUNE_29_PLUS=yes\n"; () let write_dune_c_flags cflags o = let pr s = fprintf o s in pr "(%s)\n" cflags let write_configpy o = let pr s = fprintf o s in pr "# DO NOT EDIT THIS FILE: automatically generated by ../configure\n"; pr "version = '%s'\n" coq_version; pr "is_a_released_version = %s\n" (if is_a_released_version then "True" else "False") (* Main configure routine *) let main () = let prefs = CmdArgs.parse_args () in Util.debug := prefs.debug; let dune_29 = check_for_dune_29 () in let coq_annot_flag = coq_annot_flag prefs in let coq_bin_annot_flag = coq_bin_annot_flag prefs in let arch = arch prefs in let arch_is_win32 = arch_is_win32 arch in let exe = resolve_binary_suffix arch in Util.exe := exe; install_precommit_hook prefs; let browser = browser prefs arch in let camlenv = resolve_caml prefs in let caml_version_nums = caml_version_nums camlenv in check_caml_version prefs camlenv.CamlConf.caml_version caml_version_nums; check_findlib_version prefs camlenv; let best_compiler = best_compiler prefs camlenv in let caml_flags = caml_flags coq_annot_flag coq_bin_annot_flag in let coq_caml_flags = coq_caml_flags prefs in let hasnatdynlink = hasnatdynlink prefs best_compiler in check_for_zarith prefs; let coqide, lablgtkdir = Coqide.coqide camlexec.find prefs best_compiler camlenv.CamlConf.camllib in let idearchdef = Coqide.idearchdef camlexec.find prefs coqide arch in (if prefs.withdoc then check_doc ()); let install_dirs = install_dirs prefs arch in let coqenv = resolve_coqenv install_dirs in let cflags, sse2_math = compute_cflags () in check_fmath sse2_math; if prefs.interactive then print_summary prefs arch camlenv best_compiler install_dirs coqide lablgtkdir hasnatdynlink idearchdef browser; write_config_file ~file:"dev/ocamldebug-coq" ~bin:true (write_dbg_wrapper camlenv); write_config_file ~file:"config/coq_config.ml" (write_configml camlenv coqenv caml_flags caml_version_nums arch arch_is_win32 hasnatdynlink browser idearchdef prefs); write_config_file ~file:"config/Makefile" (write_makefile prefs install_dirs best_compiler caml_flags coq_caml_flags coqide arch exe dune_29); write_config_file ~file:"config/dune.c_flags" (write_dune_c_flags cflags); write_config_file ~file:"config/coq_config.py" write_configpy; () let _ = main () coq-8.15.0/tools/configure/configure.mli000066400000000000000000000012431417001151100201630ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (false, v) let pr_ide = function No -> "no" | Byte -> "only bytecode" | Opt -> "native" exception Ide of ide (** If the user asks an impossible coqide, we abort the configuration *) let set_ide prefs ide msg = match ide, prefs.coqide with | No, Some (Byte|Opt) | Byte, Some Opt -> die (msg^":\n=> cannot build requested CoqIDE") | _ -> cprintf prefs "%s:\n=> %s CoqIDE will be built." msg (pr_ide ide); raise (Ide ide) (* XXX *) let lablgtkdir = ref "" (** Which CoqIDE is possible ? Which one is requested ? This function also sets the lablgtkdir reference in case of success. *) let check_coqide ocamlfind prefs best_compiler camllib = if prefs.coqide = Some No then set_ide prefs No "CoqIde manually disabled"; let dir, via = get_lablgtkdir ocamlfind in if dir = "" then set_ide prefs No "LablGtk3 or LablGtkSourceView3 not found" else let (ok, version) = check_lablgtk_version ocamlfind in let found = Format.sprintf "LablGtk3 and LablGtkSourceView3 found (%s)" version in if not ok then set_ide prefs No (found^", but too old (required >= 3.1.0, found " ^ version ^ ")"); (* We're now sure to produce at least one kind of coqide *) lablgtkdir := dir; if prefs.coqide = Some Byte then set_ide prefs Byte (found^", bytecode requested"); if best_compiler <> "opt" then set_ide prefs Byte (found^", but no native compiler"); if not (Sys.file_exists (camllib/"threads"/"threads.cmxa")) then set_ide prefs Byte (found^", but no native threads"); set_ide prefs Opt (found^", with native threads") let coqide ocamlfind prefs best_compiler camllib = (try check_coqide ocamlfind prefs best_compiler camllib with Ide Opt -> "opt" | Ide Byte -> "byte" | Ide No -> "no"), !lablgtkdir (** System-specific CoqIDE flags *) let idearchdef ocamlfind prefs coqide arch = match coqide, arch with | "opt", "Darwin" when prefs.macintegration -> let osxdir,_ = tryrun ocamlfind ["query";"lablgtkosx"] in if osxdir <> "" then "QUARTZ" else "X11" | "opt", "win32" -> "WIN32" | _, "win32" -> "WIN32" | _ -> "X11" coq-8.15.0/tools/configure/coqide.mli000066400000000000000000000017651417001151100174570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* CmdArgs.Prefs.t -> string -> string -> string * string (** [idearchdef ocamlfind prefs coqide arch] returns ["QUARTZ"|"X11"|"WIN32"] *) val idearchdef : string -> CmdArgs.Prefs.t -> string -> string -> string coq-8.15.0/tools/configure/dune000066400000000000000000000002101417001151100163460ustar00rootroot00000000000000(library (name conf) (modules :standard \ configure)) (executable (name configure) (modules configure) (libraries unix str conf)) coq-8.15.0/tools/configure/util.ml000066400000000000000000000172731417001151100170200ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* fprintf oc "\n%!") oc let cprintf s = cfprintf stdout s let ceprintf s = cfprintf stderr s let die msg = ceprintf "%s%s%s\nConfiguration script failed!" red msg reset; exit 1 let warn s = kfprintf (fun oc -> cfprintf oc "%s" reset) stdout ("%sWarning: " ^^ s) yellow let i2s = string_of_int let (/) x y = x ^ "/" ^ y (** Remove the final '\r' that may exists on Win32 *) let remove_final_cr s = let n = String.length s in if n<>0 && s.[n-1] = '\r' then String.sub s 0 (n-1) else s let check_exit_code (_,code) = match code with | Unix.WEXITED 0 -> () | Unix.WEXITED 127 -> failwith "no such command" | Unix.WEXITED n -> failwith ("exit code " ^ i2s n) | Unix.WSIGNALED n -> failwith ("killed by signal " ^ i2s n) | Unix.WSTOPPED n -> failwith ("stopped by signal " ^ i2s n) (** As for Unix.close_process, our Unix.waipid will ignore all EINTR *) let rec waitpid_non_intr pid = try Unix.waitpid [] pid with Unix.Unix_error (Unix.EINTR, _, _) -> waitpid_non_intr pid (** Below, we'd better read all lines on a channel before closing it, otherwise a SIGPIPE could be encountered by the sub-process *) let read_lines_and_close cin = let lines = ref [] in begin try while true do lines := remove_final_cr (input_line cin) :: !lines done with End_of_file -> () end; close_in cin; let lines = List.rev !lines in try List.hd lines, lines with Failure _ -> "", [] let read_lines_and_close_fd fd = read_lines_and_close (Unix.in_channel_of_descr fd) (** Run some unix command and read the first line of its output. We avoid Unix.open_process and its non-fully-portable /bin/sh, especially when it comes to quoting the filenames. See open_process_pid in ide/coqide/coq.ml for more details. Error messages: - if err=StdErr, any error message goes in the stderr of our script. - if err=StdOut, we merge stderr and stdout (just as 2>&1). - if err=DevNull, we drop the error messages (same as 2>/dev/null). *) type err = StdErr | StdOut | DevNull let exe = ref "" (* Will be set later on, when the call to uname is done *) let run ?(fatal=true) ?(verbose=false) ?(err=StdErr) prog args = let prog = (* Ensure prog ends with exe *) if Str.string_match (Str.regexp ("^.*" ^ !exe ^ "$")) prog 0 then prog else (prog ^ !exe) in let argv = Array.of_list (prog::args) in try let out_r,out_w = Unix.pipe () in let nul_r,nul_w = Unix.pipe () in let () = Unix.set_close_on_exec out_r in let () = Unix.set_close_on_exec nul_r in let fd_err = match err with | StdErr -> Unix.stderr | StdOut -> out_w | DevNull -> nul_w in let pid = Unix.create_process prog argv Unix.stdin out_w fd_err in let () = Unix.close out_w in let () = Unix.close nul_w in let line, all = read_lines_and_close_fd out_r in let _ = read_lines_and_close_fd nul_r in let () = check_exit_code (waitpid_non_intr pid) in line, all with | _ when not fatal && not verbose -> "", [] | e -> let cmd = String.concat " " (prog::args) in let exn = match e with Failure s -> s | _ -> Printexc.to_string e in let msg = sprintf "Error while running '%s' (%s)" cmd exn in if fatal then die msg else (warn "%s" msg; "", []) let tryrun prog args = run ~fatal:false ~err:DevNull prog args (** Splitting a string at some character *) let string_split c s = let len = String.length s in let rec split n = try let pos = String.index_from s n c in let dir = String.sub s n (pos-n) in dir :: split (succ pos) with | Not_found -> [String.sub s n (len-n)] in if len = 0 then [] else split 0 (** String prefix test : does [s1] starts with [s2] ? *) let starts_with s1 s2 = let l1 = String.length s1 and l2 = String.length s2 in l2 <= l1 && s2 = String.sub s1 0 l2 (** Turn a version string such as "4.01.0+rc2" into the list ["4";"01";"1"], stopping at the first non-digit or "." *) let numeric_prefix_list s = let isnum c = (c = '.' || (c >= '0' && c <= '9')) in let max = String.length s in let i = ref 0 in while !i < max && isnum s.[!i] do incr i done; match string_split '.' (String.sub s 0 !i) with | [v] -> [v;"0";"0"] | [v1;v2] -> [v1;v2;"0"] | [v1;v2;""] -> [v1;v2;"0"] (* e.g. because it ends with ".beta" *) | v -> v let generic_version_nums ~name version_string = let version_list = numeric_prefix_list version_string in if !debug then begin let pp_sep = Format.pp_print_space in Format.(eprintf "Parsing version info for %s: @[raw: %s / split: %a@]@\n%!" name version_string (pp_print_list ~pp_sep pp_print_string) version_list) end; try List.map int_of_string version_list with _ -> "I found " ^ name ^ " but cannot read its version number!\n" ^ "Is it installed properly?" |> die (** Combined existence and directory tests *) let dir_exists f = Sys.file_exists f && Sys.is_directory f (** Does a file exist and is executable ? *) let is_executable f = try let () = Unix.access f [Unix.X_OK] in true with Unix.Unix_error _ -> false (** Equivalent of rm -f *) let safe_remove f = try Unix.chmod f 0o644; Sys.remove f with _ -> () (** The PATH list for searching programs *) let os_type_win32 = (Sys.os_type = "Win32") let os_type_cygwin = (Sys.os_type = "Cygwin") let global_path = try string_split (if os_type_win32 then ';' else ':') (Sys.getenv "PATH") with Not_found -> [] (** A "which" command. May raise [Not_found] *) let which prog = let rec search = function | [] -> raise Not_found | dir :: path -> let file = if os_type_win32 then dir/prog^".exe" else dir/prog in if is_executable file then file else search path in search global_path let program_in_path prog = try let _ = which prog in true with Not_found -> false (** * Architecture *) let arch_progs = [("/bin/uname",["-s"]); ("/usr/bin/uname",["-s"]); ("/bin/arch", []); ("/usr/bin/arch", []); ("/usr/ucb/arch", []) ] let query_arch () = cprintf "I can not automatically find the name of your architecture."; cprintf "Give me a name, please [win32 for Win95, Win98 or WinNT]: %!"; read_line () let rec try_archs = function | (prog,args)::rest when is_executable prog -> let arch, _ = tryrun prog args in if arch <> "" then arch else try_archs rest | _ :: rest -> try_archs rest | [] -> query_arch () let arch = function | Some a -> a | None -> let arch,_ = tryrun "uname" ["-s"] in if starts_with arch "CYGWIN" then "win32" else if starts_with arch "MINGW32" then "win32" else if arch <> "" then arch else try_archs arch_progs let write_config_file ~file ?(bin=false) action = safe_remove file; let o = if bin then open_out_bin file else open_out file in try action o; close_out o; Unix.chmod file 0o444 with _ -> close_out o; safe_remove file coq-8.15.0/tools/configure/util.mli000066400000000000000000000032141417001151100171570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a val string_split : char -> string -> string list val starts_with : string -> string -> bool val generic_version_nums : name:string -> string -> int list val warn : ('a, out_channel, unit, unit, unit, unit) format6 -> 'a val die : string -> 'a val is_executable : string -> bool val dir_exists : string -> bool val which : string -> string val program_in_path : string -> bool val exe : string ref type err = StdErr | StdOut | DevNull val run : ?fatal:bool -> ?verbose:bool -> ?err:err -> string -> string list -> string * string list val tryrun : string -> string list -> string * string list val read_lines_and_close : in_channel -> string * string list val arch : string option -> string (* bin is used to avoid adding \r on Cygwin/Windows *) val write_config_file : file:string -> ?bin:bool -> (out_channel -> unit) -> unit (* enable debug mode *) val debug : bool ref coq-8.15.0/tools/coq_makefile.ml000066400000000000000000000362561417001151100165030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ) f g = fun x -> g (f x) let output_channel = ref stdout let makefile_name = ref "Makefile" let make_name = ref "" let print x = output_string !output_channel x let printf x = Printf.fprintf !output_channel x let rec print_list sep = function | [ x ] -> print x | x :: l -> print x; print sep; print_list sep l | [] -> () let rec print_prefix_list sep = function | x :: l -> print sep; print x; print_prefix_list sep l | [] -> () let usage_coq_makefile () = output_string stderr "Usage summary:\ \n\ \ncoq_makefile .... [file.v] ... [file.ml[ig]?] ... [file.ml{lib,pack}]\ \n ... [-I dir] ... [-R physicalpath logicalpath]\ \n ... [-Q physicalpath logicalpath] ... [VARIABLE = value]\ \n ... [-arg opt] ... [-docroot path] [-f file] [-o file]\ \n [-h] [--help]\ \n"; output_string stderr "\ \nFull list of options:\ \n\ \n[file.v]: Coq file to be compiled\ \n[file.ml[ig]?]: Objective Caml file to be compiled\ \n[file.ml{lib,pack}]: ocamlbuild-style file that describes a Objective Caml\ \n library/module\ \n[-I dir]: look for Objective Caml dependencies in \"dir\"\ \n[-R physicalpath logicalpath]: look for Coq dependencies recursively\ \n starting from \"physicalpath\". The logical path associated to the\ \n physical path is \"logicalpath\".\ \n[-Q physicalpath logicalpath]: look for Coq dependencies starting from\ \n \"physicalpath\". The logical path associated to the physical path\ \n is \"logicalpath\".\ \n[VARIABLE = value]: Add the variable definition \"VARIABLE=value\"\ \n[-arg opt]: send option \"opt\" to coqc\ \n[-docroot path]: Install the documentation in this folder, relative to\ \n \"user-contrib\".\ \n[-f file]: take the contents of file as arguments\ \n[-o file]: output should go in file file (recommended)\ \n Output file outside the current directory is forbidden.\ \n[-h]: print this usage summary\ \n[--help]: equivalent to [-h]\n"; exit 1 let is_genrule r = (* generic rule (like bar%foo: ...) *) let genrule = Str.regexp("%") in Str.string_match genrule r 0 let is_prefix dir1 dir2 = let l1 = String.length dir1 in let l2 = String.length dir2 in let sep = Filename.dir_sep in if dir1 = dir2 then true else if l1 + String.length sep <= l2 then let dir1' = String.sub dir2 0 l1 in let sep' = String.sub dir2 l1 (String.length sep) in dir1' = dir1 && sep' = sep else false let physical_dir_of_logical_dir ldir = let ldir = Bytes.of_string ldir in let le = Bytes.length ldir - 1 in let pdir = if le >= 0 && Bytes.get ldir le = '.' then Bytes.sub ldir 0 (le - 1) else Bytes.copy ldir in for i = 0 to le - 1 do if Bytes.get pdir i = '.' then Bytes.set pdir i '/'; done; Bytes.to_string pdir let read_whole_file s = let ic = open_in s in let b = Buffer.create (1 lsl 12) in try while true do let s = input_line ic in Buffer.add_string b s; Buffer.add_char b '\n'; done; assert false; with End_of_file -> close_in ic; Buffer.contents b (* Use this for quoting contents of variables which never appears as target or * pattern. *) let makefile_quote s = let out = Buffer.create 16 in Buffer.add_string out "'"; String.iter (fun c -> match c with | '$' -> Buffer.add_string out "$$" | '#' -> Buffer.add_string out "\\#" | '\'' -> Buffer.add_string out "'\\''" | _ -> Buffer.add_char out c ) s; Buffer.add_string out "'"; Buffer.contents out let quote s = if String.contains s ' ' || CString.is_empty s then "'" ^ s ^ "'" else s let generate_makefile oc conf_file local_file local_late_file dep_file args project = let env = Boot.Env.init () in (* XX coq makefile should ship files on its own dir *) let cmf_dir = Boot.Env.tool env "" in let makefile_template = Boot.Path.relative cmf_dir "CoqMakefile.in" in if not (Boot.Path.exists makefile_template) then begin let makefile_template = Boot.Path.to_string makefile_template in Format.eprintf "Error: cannot find %s" makefile_template; exit 1 end; let makefile_template = Boot.Path.to_string makefile_template in let s = read_whole_file makefile_template in let s = List.fold_left (* We use global_substitute to avoid running into backslash issues due to \1 etc. *) (fun s (k,v) -> Str.global_substitute (Str.regexp_string k) (fun _ -> v) s) s [ "@CONF_FILE@", conf_file; "@LOCAL_FILE@", local_file; "@LOCAL_LATE_FILE@", local_late_file; "@DEP_FILE@", dep_file; "@COQ_VERSION@", Coq_config.version; "@PROJECT_FILE@", (Option.default "" project.project_file); "@COQ_MAKEFILE_INVOCATION@",String.concat " " (List.map quote args); ] in output_string oc s let section oc s = let pad = String.make (76 - String.length s) ' ' in let sharps = String.make 79 '#' in let spaces = "#" ^ String.make 77 ' ' ^ "#" in fprintf oc "\n%s\n" sharps; fprintf oc "%s\n" spaces; fprintf oc "# %s%s#\n" s pad; fprintf oc "%s\n" spaces; fprintf oc "%s\n\n" sharps ;; let generate_conf_includes oc { ml_includes; r_includes; q_includes } = section oc "Path directives (-I, -R, -Q)."; let module S = String in let map = map_sourced_list in let dash1 opt v = sprintf "-%s %s" opt (quote v) in let dash2 opt v1 v2 = sprintf "-%s %s %s" opt (quote v1) (quote v2) in fprintf oc "COQMF_OCAMLLIBS = %s\n" (S.concat " " (map (fun { path } -> dash1 "I" path) ml_includes)); fprintf oc "COQMF_SRC_SUBDIRS = %s\n" (S.concat " " (map (fun { path } -> quote path) ml_includes)); fprintf oc "COQMF_COQLIBS = %s %s %s\n" (S.concat " " (map (fun { path } -> dash1 "I" path) ml_includes)) (S.concat " " (map (fun ({ path },l) -> dash2 "Q" path l) q_includes)) (S.concat " " (map (fun ({ path },l) -> dash2 "R" path l) r_includes)); fprintf oc "COQMF_COQLIBS_NOML = %s %s\n" (S.concat " " (map (fun ({ path },l) -> dash2 "Q" path l) q_includes)) (S.concat " " (map (fun ({ path },l) -> dash2 "R" path l) r_includes)); fprintf oc "COQMF_CMDLINE_COQLIBS = %s %s %s\n" (S.concat " " (map_cmdline (fun { path } -> dash1 "I" path) ml_includes)) (S.concat " " (map_cmdline (fun ({ path },l) -> dash2 "Q" path l) q_includes)) (S.concat " " (map_cmdline (fun ({ path },l) -> dash2 "R" path l) r_includes)); ;; let windrive s = if Coq_config.arch_is_win32 && Str.(string_match (regexp "^[a-zA-Z]:") s 0) then Str.matched_string s else "" ;; let generate_conf_coq_config oc = section oc "Coq configuration."; Envars.print_config ~prefix_var_name:"COQMF_" oc; let env = Boot.Env.init () in let coqlib = Boot.Env.(coqlib env |> Path.to_string) in (* XXX: FIXME, why does this variable needs the root lib *) fprintf oc "COQMF_WINDRIVE=%s\n" (windrive coqlib) ;; let generate_conf_files oc { v_files; mli_files; mlg_files; ml_files; mllib_files; mlpack_files; } = let module S = String in let map = map_sourced_list in section oc "Project files."; fprintf oc "COQMF_VFILES = %s\n" (S.concat " " (map quote v_files)); fprintf oc "COQMF_MLIFILES = %s\n" (S.concat " " (map quote mli_files)); fprintf oc "COQMF_MLFILES = %s\n" (S.concat " " (map quote ml_files)); fprintf oc "COQMF_MLGFILES = %s\n" (S.concat " " (map quote mlg_files)); fprintf oc "COQMF_MLPACKFILES = %s\n" (S.concat " " (map quote mlpack_files)); fprintf oc "COQMF_MLLIBFILES = %s\n" (S.concat " " (map quote mllib_files)); let cmdline_vfiles = filter_cmdline v_files in fprintf oc "COQMF_CMDLINE_VFILES = %s\n" (S.concat " " (List.map quote cmdline_vfiles)); ;; let rec all_start_with prefix = function | [] -> true | [] :: _ -> false | (x :: _) :: rest -> x = prefix && all_start_with prefix rest let rec logic_gcd acc = function | [] -> acc | [] :: _ -> acc | (hd :: tl) :: rest -> if all_start_with hd rest then logic_gcd (acc @ [hd]) (tl :: List.map List.tl rest) else acc let generate_conf_doc oc { docroot; q_includes; r_includes } = let includes = List.map (forget_source > snd) (q_includes @ r_includes) in let logpaths = List.map (String.split_on_char '.') includes in let gcd = logic_gcd [] logpaths in let root = match docroot with | None -> if gcd = [] then let destination = "orphan_" ^ (String.concat "_" includes) in eprintf "Warning: No common logical root.\n"; eprintf "Warning: In this case the -docroot option should be given.\n"; eprintf "Warning: Otherwise the install-doc target is going to install files\n"; eprintf "Warning: in %s\n" destination; destination else String.concat Filename.dir_sep gcd | Some p -> p in Printf.fprintf oc "COQMF_INSTALLCOQDOCROOT = %s\n" (quote root) let generate_conf_native oc native_compiler = section oc "Native compiler."; let flag = match native_compiler with | None -> "" | Some NativeYes -> "yes" | Some NativeNo -> "no" | Some NativeOndemand -> "ondemand" in Printf.fprintf oc "COQMF_COQPROJECTNATIVEFLAG = %s\n" flag let generate_conf_defs oc { defs; extra_args } = section oc "Extra variables."; List.iter (forget_source > (fun (k,v) -> Printf.fprintf oc "%s = %s\n" k v)) defs; Printf.fprintf oc "COQMF_OTHERFLAGS = %s\n" (String.concat " " (List.map (forget_source > makefile_quote) extra_args)) let generate_conf oc project args = fprintf oc "# This configuration file was generated by running:\n"; fprintf oc "# %s\n\n" (String.concat " " (List.map quote args)); generate_conf_files oc project; generate_conf_includes oc project; generate_conf_coq_config oc; generate_conf_native oc project.native_compiler; generate_conf_defs oc project; generate_conf_doc oc project; ;; let ensure_root_dir ({ ml_includes; r_includes; q_includes; v_files; ml_files; mli_files; mlg_files; mllib_files; mlpack_files } as project) = let exists f = List.exists (forget_source > f) in let here = Sys.getcwd () in let not_tops = List.for_all (fun s -> s.thing <> Filename.basename s.thing) in if exists (fun { canonical_path = x } -> x = here) ml_includes || exists (fun ({ canonical_path = x },_) -> is_prefix x here) r_includes || exists (fun ({ canonical_path = x },_) -> is_prefix x here) q_includes || (not_tops v_files && not_tops mli_files && not_tops mlg_files && not_tops ml_files && not_tops mllib_files && not_tops mlpack_files) then project else let source x = {thing=x; source=CmdLine} in let here_path = { path = "."; canonical_path = here } in { project with ml_includes = source here_path :: ml_includes; r_includes = source (here_path, "Top") :: r_includes } ;; let check_overlapping_include { q_includes; r_includes } = let pwd = Sys.getcwd () in let aux = function | [] -> () | {thing = { path; canonical_path }, _} :: l -> if not (is_prefix pwd canonical_path) then eprintf "Warning: %s (used in -R or -Q) is not a subdirectory of the current directory\n\n" path; List.iter (fun {thing={ path = p; canonical_path = cp }, _} -> if is_prefix canonical_path cp || is_prefix cp canonical_path then eprintf "Warning: %s and %s overlap (used in -R or -Q)\n\n" path p) l in aux (q_includes @ r_includes) ;; let check_native_compiler = function | None -> () | Some flag -> match Coq_config.native_compiler, flag with | Coq_config.NativeOff, (NativeYes | NativeOndemand) -> eprintf "Warning: native compilation is globally deactivated by the configure flag\n" | _ -> () let chop_prefix p f = let len_p = String.length p in let len_f = String.length f in String.sub f len_p (len_f - len_p) let destination_of { ml_includes; q_includes; r_includes; } file = let file_dir = CUnix.canonical_path_name (Filename.dirname file) in let includes = q_includes @ r_includes in let mk_destination logic canonical_path = Filename.concat (physical_dir_of_logical_dir logic) (chop_prefix canonical_path file_dir) in let candidates = CList.map_filter (fun {thing={ canonical_path }, logic} -> if is_prefix canonical_path file_dir then Some(mk_destination logic canonical_path) else None) includes in match candidates with | [] -> (* BACKWARD COMPATIBILITY: -I into the only logical root *) begin match r_includes, List.find (fun {thing={ canonical_path = p }} -> is_prefix p file_dir) ml_includes with | [{thing={ canonical_path }, logic}], {thing={ canonical_path = p }} -> let destination = Filename.concat (physical_dir_of_logical_dir logic) (chop_prefix p file_dir) in Printf.printf "%s" (quote destination) | _ -> () (* skip *) | exception Not_found -> () (* skip *) end | [s] -> Printf.printf "%s" (quote s) | _ -> assert false let share_prefix s1 s2 = let s1 = String.split_on_char '.' s1 in let s2 = String.split_on_char '.' s2 in match s1, s2 with | x :: _ , y :: _ -> x = y | _ -> false let _ = let _fhandle = Feedback.(add_feeder (console_feedback_listener Format.err_formatter)) in let prog, args = let args = Array.to_list Sys.argv in let prog = List.hd args in prog, List.tl args in let only_destination, args = match args with | "-destination-of" :: tgt :: rest -> Some tgt, rest | _ -> None, args in let project = let warning_fn x = Format.eprintf "%s@\n%!" x in try cmdline_args_to_project ~warning_fn ~curdir:Filename.current_dir_name args with Parsing_error s -> prerr_endline s; usage_coq_makefile () in if only_destination <> None then begin destination_of project (Option.get only_destination); exit 0 end; if project.makefile = None then eprintf "Warning: Omitting -o is deprecated\n\n"; (* We want to know the name of the Makefile (say m) in order to * generate m.conf and include m.local *) let conf_file = Option.default "CoqMakefile" project.makefile ^ ".conf" in let local_file = Option.default "CoqMakefile" project.makefile ^ ".local" in let local_late_file = Option.default "CoqMakefile" project.makefile ^ ".local-late" in let dep_file = "." ^ Option.default "CoqMakefile" project.makefile ^ ".d" in let project = ensure_root_dir project in check_overlapping_include project; check_native_compiler project.native_compiler; let ocm = Option.cata open_out stdout project.makefile in generate_makefile ocm conf_file local_file local_late_file dep_file (prog :: args) project; close_out ocm; let occ = open_out conf_file in generate_conf occ project (prog :: args); close_out occ; exit 0 coq-8.15.0/tools/coq_tex.ml000066400000000000000000000237001417001151100155140ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (* a dummy command, just in case the last line was a comment *) output_string chan_out "Set Printing Width 78.\n"; close_in chan_in; close_out chan_out (* Second pass: insert the answers of Coq from [coq_output] into the * TeX file [texfile]. The result goes in file [result]. *) let tex_escaped s = let delims = Str.regexp "[_{}&%#$\\^~ <>'`]" in let adapt_delim = function | "{" | "}" | "&" | "%" | "#" | "$" as c -> "\\"^c | "_" -> "{\\char`\\_}" | "\\" -> "{\\char'134}" | "^" -> "{\\char'136}" | "~" -> "{\\char'176}" | " " -> "~" | "<" -> "{<}" | ">" -> "{>}" | "'" -> "{\\textquotesingle}" | "`" -> "\\`{}" | _ -> assert false in let adapt = function | Str.Text s -> s | Str.Delim s -> adapt_delim s in String.concat "" (List.map adapt (Str.full_split delims s)) let encapsule sl c_out s = if sl then Printf.fprintf c_out "\\texttt{\\textit{%s}}\\\\\n" (tex_escaped s) else Printf.fprintf c_out "\\texttt{%s}\\\\\n" (tex_escaped s) let print_block c_out bl = List.iter (fun s -> if s="" then () else encapsule !slanted c_out s) bl let insert texfile coq_output result = let c_tex = open_in texfile in let c_coq = open_in coq_output in let c_out = open_out result in (* read lines until a prompt is found (starting from the second line), purge prompts on the first line and return their number *) let last_read = ref (input_line c_coq) in let read_output () = let first = !last_read in let nb = ref 0 in (* remove the leading prompts *) let rec skip_prompts pos = if Str.string_match any_prompt first pos then let () = incr nb in skip_prompts (Str.match_end ()) else pos in let first = let start = skip_prompts 0 in String.sub first start (String.length first - start) in (* read and return the following lines until a prompt is found *) let rec read_lines () = let s = input_line c_coq in if Str.string_match any_prompt s 0 then begin last_read := s; [] end else s :: read_lines () in (first :: read_lines (), !nb) in let unhandled_output = ref None in let read_output () = match !unhandled_output with | Some some -> unhandled_output := None; some | None -> read_output () in (* we are inside a \begin{coq_...} ... \end{coq_...} block * show_... tell what kind of block it is *) let rec inside show_answers show_questions not_first discarded = let s = input_line c_tex in if s = "" then inside show_answers show_questions not_first (discarded + 1) else if not (Str.string_match end_coq s 0) then begin let (bl,n) = read_output () in assert (n > discarded); let n = n - discarded in if not_first then output_string c_out "\\medskip\n"; if !verbose then Printf.printf "Coq < %s\n" s; if show_questions then encapsule false c_out ("Coq < " ^ s); let rec read_lines k = if k = 0 then [] else let s = input_line c_tex in if Str.string_match end_coq s 0 then [] else s :: read_lines (k - 1) in let al = read_lines (n - 1) in if !verbose then List.iter (Printf.printf " %s\n") al; if show_questions then List.iter (fun s -> encapsule false c_out (" " ^ s)) al; let la = n - 1 - List.length al in if la <> 0 then (* this happens when the block ends with a comment; the output is for the command at the beginning of the next block *) unhandled_output := Some (bl, la) else begin if !verbose then List.iter print_endline bl; if show_answers then print_block c_out bl; inside show_answers show_questions (show_answers || show_questions) 0 end end else if discarded > 0 then begin (* this happens when the block ends with an empty line *) let (bl,n) = read_output () in assert (n > discarded); unhandled_output := Some (bl, n - discarded) end in (* we are outside of a \begin{coq_...} ... \end{coq_...} block *) let rec outside just_after = let start_block () = if !small then output_string c_out "\\begin{small}\n"; output_string c_out "\\begin{flushleft}\n"; if !hrule then output_string c_out "\\hrulefill\\\\\n" in let end_block () = if !hrule then output_string c_out "\\hrulefill\\\\\n"; output_string c_out "\\end{flushleft}\n"; if !small then output_string c_out "\\end{small}\n" in let s = input_line c_tex in if Str.string_match begin_coq s 0 then begin let kind = Str.matched_group 1 s in if kind = "eval" then begin if just_after then end_block (); inside false false false 0; outside false end else begin let show_answers = kind <> "example*" in let show_questions = kind <> "example#" in if not just_after then start_block (); inside show_answers show_questions just_after 0; outside true end end else begin if just_after then end_block (); output_string c_out (s ^ "\n"); outside false end in try let _ = read_output () in (* to skip the Coq banner *) let _ = read_output () in (* to skip the Coq answer to Set Printing Width *) outside false with End_of_file -> begin close_in c_tex; close_in c_coq; close_out c_out end (* Process of one TeX file *) let rm f = try Sys.remove f with _ -> () let one_file texfile = let inputv = Filename.temp_file "coq_tex" ".v" in let coq_output = Filename.temp_file "coq_tex" ".coq_output"in let result = if !output_specified then !output else if Filename.check_suffix texfile ".tex" then (Filename.chop_suffix texfile ".tex") ^ ".v.tex" else texfile ^ ".v.tex" in try (* 1. extract Coq phrases *) extract texfile inputv; (* 2. run Coq on input *) let _ = Sys.command (Printf.sprintf "%s < %s > %s 2>&1" !image inputv coq_output) in (* 3. insert Coq output into original file *) insert texfile coq_output result; (* 4. clean up *) rm inputv; rm coq_output with reraise -> begin rm inputv; rm coq_output; raise reraise end (* Parsing of the command line, check of the Coq command and process * of all the files in the command line, one by one *) let files = ref [] let parse_cl () = Arg.parse [ "-o", Arg.String (fun s -> output_specified := true; output := s), "output-file Specify the resulting LaTeX file"; "-n", Arg.Int (fun n -> linelen := n), "line-width Set the line width"; "-image", Arg.String (fun s -> image := s), "coq-image Use coq-image as Coq command"; "-w", Arg.Set cut_at_blanks, " Try to cut lines at blanks"; "-v", Arg.Set verbose, " Verbose mode (show Coq answers on stdout)"; "-sl", Arg.Set slanted, " Coq answers in slanted font (only with LaTeX2e)"; "-hrule", Arg.Set hrule, " Coq parts are written between 2 horizontal lines"; "-small", Arg.Set small, " Coq parts are written in small font"; ] (fun s -> files := s :: !files) "coq-tex [options] file ..." let find_coqtop () = let prog = Sys.executable_name in try let size = String.length prog in let i = Str.search_backward (Str.regexp_string "coq-tex") prog (size-7) in (String.sub prog 0 i)^"coqtop"^(String.sub prog (i+7) (size-i-7)) with Not_found -> begin Printf.printf "Warning: preprocessing with default image \"coqtop\"\n"; "coqtop" end let _ = parse_cl (); if !image = "" then image := Filename.quote (find_coqtop ()); if Sys.command (!image ^ " -batch -silent") <> 0 then begin Printf.printf "Error: "; let _ = Sys.command (!image ^ " -batch") in exit 1 end else begin (*Printf.printf "Your version of coqtop seems OK\n";*) flush stdout end; List.iter one_file (List.rev !files) coq-8.15.0/tools/coqdep.ml000066400000000000000000000040621417001151100153250ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* CD.add_norec_dir_import CD.add_v_known "." []); (* We don't setup any loadpath if the -boot is passed *) if not !CD.option_boot then begin let env = Boot.Env.init () in let stdlib = Boot.Env.(stdlib env |> Path.to_string) in let plugins = Boot.Env.(plugins env |> Path.to_string) in let user_contrib = Boot.Env.(user_contrib env |> Path.to_string) in CD.add_rec_dir_import CD.add_coqlib_known stdlib ["Coq"]; CD.add_rec_dir_import CD.add_coqlib_known plugins ["Coq"]; if Sys.file_exists user_contrib then CD.add_rec_dir_no_import CD.add_coqlib_known user_contrib []; List.iter (fun s -> CD.add_rec_dir_no_import CD.add_coqlib_known s []) (Envars.xdg_dirs ~warn:(fun x -> CD.coqdep_warning "%s" x)); List.iter (fun s -> CD.add_rec_dir_no_import CD.add_coqlib_known s []) Envars.coqpath; end; if !CD.option_sort then CD.sort () else CD.coq_dependencies () let _ = try coqdep () with exn -> Format.eprintf "*** Error: @[%a@]@\n%!" Pp.pp_with (CErrors.print exn); exit 1 coq-8.15.0/tools/coqdep.mli000066400000000000000000000012431417001151100154740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* fprintf fmt "@]\n%!") err_formatter args module StrSet = Set.Make(String) type dynlink = Opt | Byte | Both | No | Variable let option_noglob = ref false let option_dynlink = ref Both let option_boot = ref false type dir = string option (** [get_extension f l] checks whether [f] has one of the extensions listed in [l]. It returns [f] without its extension, alongside with the extension. When no extension match, [(f,"")] is returned *) let rec get_extension f = function | [] -> (f, "") | s :: _ when Filename.check_suffix f s -> (Filename.chop_suffix f s, s) | _ :: l -> get_extension f l (** File comparison *) let absolute_dir dir = let current = Sys.getcwd () in Sys.chdir dir; let dir' = Sys.getcwd () in Sys.chdir current; dir' let absolute_file_name basename odir = let dir = match odir with Some dir -> dir | None -> "." in absolute_dir dir // basename let compare_file f1 f2 = absolute_file_name (Filename.basename f1) (Some (Filename.dirname f1)) = absolute_file_name (Filename.basename f2) (Some (Filename.dirname f2)) (** [basename_noext] removes both the directory part and the extension (if necessary) of a filename *) let basename_noext filename = let fn = Filename.basename filename in try Filename.chop_extension fn with Invalid_argument _ -> fn (** Coq files specifies on the command line: - first string is the full filename, with only its extension removed - second string is the absolute version of the previous (via getcwd) *) let vAccu = ref ([] : (string * string) list) (** Queue operations *) let addQueue q v = q := v :: !q type dirname = string type basename = string type filename = string type dirpath = string list type root = filename * dirpath type result = | ExactMatches of filename list | PartialMatchesInSameRoot of root * filename list let add_set f l = f :: CList.remove compare_file f l let insert_key root (full,f) m = (* An exact match takes precedence over non-exact matches *) match full, m with | true, ExactMatches l -> (* We add a conflict *) ExactMatches (add_set f l) | true, PartialMatchesInSameRoot _ -> (* We give priority to exact match *) ExactMatches [f] | false, ExactMatches l -> (* We keep the exact match *) m | false, PartialMatchesInSameRoot (root',l) -> PartialMatchesInSameRoot (root, if root = root' then add_set f l else [f]) let safe_add_key q root key (full,f as file) = try let l = Hashtbl.find q key in Hashtbl.add q key (insert_key root file l) with Not_found -> Hashtbl.add q key (if full then ExactMatches [f] else PartialMatchesInSameRoot (root,[f])) let safe_add q root ((from, suffixes), file) = List.iter (fun (full,suff) -> safe_add_key q root (from,suff) (full,file)) suffixes (** Files found in the loadpaths. For the ML files, the string is the basename without extension. *) let same_path_opt s s' = let nf s = (* ./foo/a.ml and foo/a.ml are the same file *) if Filename.is_implicit s then "." // s else s in let s = match s with None -> "." | Some s -> nf s in let s' = match s' with None -> "." | Some s' -> nf s' in s = s' let warning_ml_clash x s suff s' suff' = if suff = suff' && not (same_path_opt s s') then coqdep_warning "%s%s already found in %s (discarding %s%s)\n" x suff (match s with None -> "." | Some d -> d) ((match s' with None -> "." | Some d -> d) // x) suff let mkknown () = let h = (Hashtbl.create 19 : (string, dir * string) Hashtbl.t) in let add x s suff = try let s',suff' = Hashtbl.find h x in warning_ml_clash x s' suff' s suff with Not_found -> Hashtbl.add h x (s,suff) and iter f = Hashtbl.iter (fun x (s,_) -> f x s) h and search x = try Some (fst (Hashtbl.find h x)) with Not_found -> None in add, iter, search let add_mllib_known, _, search_mllib_known = mkknown () let add_mlpack_known, _, search_mlpack_known = mkknown () let vKnown = (Hashtbl.create 19 : (dirpath * dirpath, result) Hashtbl.t) (* The associated boolean is true if this is a root path. *) let coqlibKnown = (Hashtbl.create 19 : (dirpath * dirpath, result) Hashtbl.t) let search_table table ?(from=[]) s = Hashtbl.find table (from, s) let search_v_known ?from s = try Some (search_table vKnown ?from s) with Not_found -> None let is_in_coqlib ?from s = try let _ = search_table coqlibKnown ?from s in true with Not_found -> false let error_cannot_parse s (i,j) = Printf.eprintf "File \"%s\", characters %i-%i: Syntax error\n" s i j; exit 1 let error_cannot_open_project_file msg = Printf.eprintf "%s\n" msg; exit 1 let error_cannot_parse_project_file file msg = Printf.eprintf "Project file \"%s\": Syntax error: %s\n" file msg; exit 1 let error_cannot_stat s unix_error = Printf.eprintf "%s\n" (error_message unix_error); exit 1 let error_cannot_stat_in f s unix_error = Printf.eprintf "In file \"%s\": %s\n" f (error_message unix_error); exit 1 let error_cannot_open s msg = (* Print an arbitrary line number, such that the message matches common error message pattern. *) Printf.eprintf "%s: %s\n" s msg; exit 1 let warning_module_notfound from f s = match from with | None -> coqdep_warning "in file %s, library %s is required and has not been found in the loadpath!" f (String.concat "." s) | Some pth -> coqdep_warning "in file %s, library %s is required from root %s and has not been found in the loadpath!" f (String.concat "." s) (String.concat "." pth) let warning_declare f s = coqdep_warning "in file %s, declared ML module %s has not been found!" f s let warning_clash exact file dir f1 = function | f2::fl -> let f = Filename.basename f1 in let d1 = Filename.dirname f1 in let d2 = Filename.dirname f2 in let dl = List.rev_map Filename.dirname fl in if exact then begin eprintf "*** Warning: in file %s, \n required library %s exactly matches several files in path\n (found %s.v in " file (String.concat "." dir) f; List.iter (fun s -> eprintf "%s, " s) dl; eprintf "%s and %s; used the latter).\n" d2 d1 end else begin eprintf "*** Warning: in file %s, \n required library %s matches several files in path\n (found %s.v in " file (String.concat "." dir) f; List.iter (fun s -> eprintf "%s, " s) dl; eprintf "%s and %s; Require will fail).\n" d2 d1 end | [] -> assert false let warning_cannot_open_dir dir = coqdep_warning "cannot open %s" dir let safe_assoc from verbose file k = match search_v_known ?from k with | None -> None | Some (ExactMatches (f :: l)) -> if verbose && not (CList.is_empty l) then warning_clash true file k f l; Some [f] | Some (PartialMatchesInSameRoot (root, l)) -> (match List.sort String.compare l with [] -> assert false | f :: l as all -> (* If several files match, it will fail at Require; To be "fair", in coqdep, we add dependencies on all matching files *) if verbose && not (CList.is_empty l) then warning_clash false file k f l; Some all) | Some (ExactMatches []) -> assert false (** [find_dir_logpath dir] Return the logical path of directory [dir] if it has been given one. Raise [Not_found] otherwise. In particular we can check if "." has been attributed a logical path after processing all options and silently give the default one if it hasn't. We may also use this to warn if a physical path is met twice. *) let register_dir_logpath,find_dir_logpath = let tbl: (string, string list) Hashtbl.t = Hashtbl.create 19 in let reg physdir logpath = Hashtbl.add tbl (absolute_dir physdir) logpath in let fnd physdir = Hashtbl.find tbl (absolute_dir physdir) in reg,fnd let file_name s = function | None -> s | Some d -> d // s (* Makefile's escaping rules are awful: $ is escaped by doubling and other special characters are escaped by backslash prefixing while backslashes themselves must be escaped only if part of a sequence followed by a special character (i.e. in case of ambiguity with a use of it as escaping character). Moreover (even if not crucial) it is apparently not possible to directly escape ';' and leading '\t'. *) let escape = let s' = Buffer.create 10 in fun s -> Buffer.clear s'; for i = 0 to String.length s - 1 do let c = s.[i] in if c = ' ' || c = '#' || c = ':' (* separators and comments *) || c = '%' (* pattern *) || c = '?' || c = '[' || c = ']' || c = '*' (* expansion in filenames *) || i=0 && c = '~' && (String.length s = 1 || s.[1] = '/' || 'A' <= s.[1] && s.[1] <= 'Z' || 'a' <= s.[1] && s.[1] <= 'z') (* homedir expansion *) then begin let j = ref (i-1) in while !j >= 0 && s.[!j] = '\\' do Buffer.add_char s' '\\'; decr j (* escape all preceding '\' *) done; Buffer.add_char s' '\\'; end; if c = '$' then Buffer.add_char s' '$'; Buffer.add_char s' c done; Buffer.contents s' let canonize f = let f' = absolute_dir (Filename.dirname f) // Filename.basename f in match List.filter (fun (_,full) -> f' = full) !vAccu with | (f,_) :: _ -> escape f | _ -> escape f module VData = struct type t = string list option * string list let compare = compare end module VCache = Set.Make(VData) (** To avoid reading .v files several times for computing dependencies, once for .vo, once for .vio, and once for .vos extensions, the following code performs a single pass and produces a structured list of dependencies, separating dependencies on compiled Coq files (those loaded by [Require]) from other dependencies, e.g. dependencies on ".v" files (for [Load]) or ".cmx", ".cmo", etc... (for [Declare]). *) type dependency = | DepRequire of string (* one basename, to which we later append .vo or .vio or .vos *) | DepOther of string (* filenames of dependencies, separated by spaces *) let string_of_dependency_list suffix_for_require deps = let string_of_dep = function | DepRequire basename -> basename ^ suffix_for_require | DepOther s -> s in String.concat " " (List.map string_of_dep deps) let rec find_dependencies basename = let verbose = true in (* for past/future use? *) try (* Visited marks *) let visited_ml = ref StrSet.empty in let visited_v = ref VCache.empty in let should_visit_v_and_mark from str = if not (VCache.mem (from, str) !visited_v) then begin visited_v := VCache.add (from, str) !visited_v; true end else false in (* Output: dependencies found *) let dependencies = ref [] in let add_dep dep = dependencies := dep::!dependencies in let add_dep_other s = add_dep (DepOther s) in (* Reading file contents *) let f = basename ^ ".v" in let chan = open_in f in let buf = Lexing.from_channel chan in try while true do let tok = coq_action buf in match tok with | Require (from, strl) -> List.iter (fun str -> if should_visit_v_and_mark from str then begin match safe_assoc from verbose f str with | Some files -> List.iter (fun file_str -> add_dep (DepRequire (canonize file_str))) files | None -> if verbose && not (is_in_coqlib ?from str) then warning_module_notfound from f str end) strl | Declare sl -> let declare suff dir s = let base = escape (file_name s dir) in match !option_dynlink with | No -> () | Byte -> add_dep_other (sprintf "%s%s" base suff) | Opt -> add_dep_other (sprintf "%s.cmxs" base) | Both -> add_dep_other (sprintf "%s%s" base suff); add_dep_other (sprintf "%s.cmxs" base) | Variable -> add_dep_other (sprintf "%s%s" base (if suff=".cmo" then "$(DYNOBJ)" else "$(DYNLIB)")) in let decl str = let s = basename_noext str in if not (StrSet.mem s !visited_ml) then begin visited_ml := StrSet.add s !visited_ml; match search_mllib_known s with | Some mldir -> declare ".cma" mldir s | None -> match search_mlpack_known s with | Some mldir -> declare ".cmo" mldir s | None -> warning_declare f str end in List.iter decl sl | Load file -> let canon = match file with | Logical str -> if should_visit_v_and_mark None [str] then safe_assoc None verbose f [str] else None | Physical str -> if String.equal (Filename.basename str) str then if should_visit_v_and_mark None [str] then safe_assoc None verbose f [str] else None else Some [canonize str] in (match canon with | None -> () | Some l -> List.iter (fun canon -> add_dep_other (sprintf "%s.v" canon); let deps = find_dependencies canon in List.iter add_dep deps) l) | AddLoadPath _ | AddRecLoadPath _ -> (* TODO: will this be handled? *) () done; List.rev !dependencies with | Fin_fichier -> close_in chan; List.rev !dependencies | Syntax_error (i,j) -> close_in chan; error_cannot_parse f (i,j) with Sys_error msg -> error_cannot_open (basename ^ ".v") msg let write_vos = ref false let coq_dependencies () = List.iter (fun (name,_) -> let ename = escape name in let glob = if !option_noglob then "" else ename^".glob " in let deps = find_dependencies name in printf "%s.vo %s%s.v.beautified %s.required_vo: %s.v %s\n" ename glob ename ename ename (string_of_dependency_list ".vo" deps); printf "%s.vio: %s.v %s\n" ename ename (string_of_dependency_list ".vio" deps); if !write_vos then printf "%s.vos %s.vok %s.required_vos: %s.v %s\n" ename ename ename ename (string_of_dependency_list ".vos" deps); printf "%!") (List.rev !vAccu) (** Compute the suffixes of a logical path together with the length of the missing part *) let rec suffixes full = function | [] -> assert false | [name] -> [full,[name]] | dir::suffix as l -> (full,l)::suffixes false suffix (** Compute all the pairs [(from,suffs] such that a logical path decomposes into [from @ ... @ suff] for some [suff] in [suffs], i.e. such that once [from] is fixed, [From from Require suff] refers (in the absence of ambiguity) to this logical path for exactly the [suff] in [suffs] *) let rec cuts recur = function | [] -> [] | [dir] -> [[],[true,[dir]]] | dir::tail as l -> ([],if recur then suffixes true l else [true,l]) :: List.map (fun (fromtail,suffixes) -> (dir::fromtail,suffixes)) (cuts true tail) let add_caml_known _ phys_dir _ f = let basename,suff = get_extension f [".mllib"; ".mlpack"] in match suff with | ".mllib" -> add_mllib_known basename (Some phys_dir) suff | ".mlpack" -> add_mlpack_known basename (Some phys_dir) suff | _ -> () let add_paths recur root table phys_dir log_dir basename = let name = log_dir@[basename] in let file = phys_dir//basename in let paths = cuts recur name in let iter n = safe_add table root (n, file) in List.iter iter paths let add_coqlib_known recur root phys_dir log_dir f = let root = (phys_dir, log_dir) in match get_extension f [".vo"; ".vio"; ".vos"] with | (basename, (".vo" | ".vio" | ".vos")) -> add_paths recur root coqlibKnown phys_dir log_dir basename | _ -> () let add_v_known recur root phys_dir log_dir f = match get_extension f [".v"; ".vo"; ".vio"; ".vos"] with | (basename,".v") -> add_paths recur root vKnown phys_dir log_dir basename | (basename, (".vo" | ".vio" | ".vos")) when not(!option_boot) -> add_paths recur root vKnown phys_dir log_dir basename | _ -> () (** Visit all the directories under [dir], including [dir], in the same order as for [coqc]/[coqtop] in [System.all_subdirs], that is, assuming Sys.readdir to have this structure: ├── B │ └── E.v │ └── C1 │ │ └── E.v │ │ └── D1 │ │ └── E.v │ │ └── F.v │ │ └── D2 │ │ └── E.v │ │ └── G.v │ └── F.v │ └── C2 │ │ └── E.v │ │ └── D1 │ │ └── E.v │ │ └── F.v │ │ └── D2 │ │ └── E.v │ │ └── G.v │ └── G.v it goes in this (reverse) order: B.C2.D1.E, B.C2.D2.E, B.C2.E, B.C2.F, B.C2.G B.C1.D1.E, B.C1.D2.E, B.C1.E, B.C1.F, B.C1.G, B.E, B.F, B.G, (see discussion at PR #14718) *) let add_directory recur add_file phys_dir log_dir = let root = (phys_dir, log_dir) in let stack = ref [] in let curdirfiles = ref [] in let subdirfiles = ref [] in let rec aux phys_dir log_dir = if exists_dir phys_dir then begin register_dir_logpath phys_dir log_dir; let f = function | FileDir (phys_f,f) -> if recur then begin stack := (!curdirfiles, !subdirfiles) :: !stack; curdirfiles := []; subdirfiles := []; aux phys_f (log_dir @ [f]); let curdirfiles', subdirfiles' = List.hd !stack in subdirfiles := subdirfiles' @ !subdirfiles @ !curdirfiles; curdirfiles := curdirfiles'; stack := List.tl !stack end | FileRegular f -> curdirfiles := (phys_dir, log_dir, f) :: !curdirfiles in process_directory f phys_dir end else warning_cannot_open_dir phys_dir in aux phys_dir log_dir; List.iter (fun (phys_dir, log_dir, f) -> add_file root phys_dir log_dir f) !subdirfiles; List.iter (fun (phys_dir, log_dir, f) -> add_file root phys_dir log_dir f) !curdirfiles (** Simply add this directory and imports it, no subdirs. This is used by the implicit adding of the current path (which is not recursive). *) let add_norec_dir_import add_file phys_dir log_dir = add_directory false (add_file true) phys_dir log_dir (** -Q semantic: go in subdirs but only full logical paths are known. *) let add_rec_dir_no_import add_file phys_dir log_dir = add_directory true (add_file false) phys_dir log_dir (** -R semantic: go in subdirs and suffixes of logical paths are known. *) let add_rec_dir_import add_file phys_dir log_dir = add_directory true (add_file true) phys_dir log_dir (** -I semantic: do not go in subdirs. *) let add_caml_dir phys_dir = add_directory false add_caml_known phys_dir [] exception Cannot_stat_file of string * Unix.error let rec treat_file old_dirname old_name = let name = Filename.basename old_name and new_dirname = Filename.dirname old_name in let dirname = match (old_dirname,new_dirname) with | (d, ".") -> d | (None,d) -> Some d | (Some d1,d2) -> Some (d1//d2) in let complete_name = file_name name dirname in let stat_res = try stat complete_name with Unix_error(error, _, _) -> raise (Cannot_stat_file (complete_name, error)) in match stat_res.st_kind with | S_DIR -> (if name.[0] <> '.' then let newdirname = match dirname with | None -> name | Some d -> d//name in Array.iter (treat_file (Some newdirname)) (Sys.readdir complete_name)) | S_REG -> (match get_extension name [".v"] with | base,".v" -> let name = file_name base dirname and absname = absolute_file_name base dirname in addQueue vAccu (name, absname) | _ -> ()) | _ -> () let treat_file_command_line old_name = try treat_file None old_name with Cannot_stat_file (f, msg) -> error_cannot_stat f msg let treat_file_coq_project where old_name = try treat_file None old_name with Cannot_stat_file (f, msg) -> error_cannot_stat_in where f msg (* "[sort]" outputs `.v` files required by others *) let sort () = let seen = Hashtbl.create 97 in let rec loop file = let file = canonize file in if not (Hashtbl.mem seen file) then begin Hashtbl.add seen file (); let cin = open_in (file ^ ".v") in let lb = Lexing.from_channel cin in try while true do match coq_action lb with | Require (from, sl) -> List.iter (fun s -> match safe_assoc from false file s with | None -> () | Some l -> List.iter loop l) sl | _ -> () done with Fin_fichier -> close_in cin; printf "%s.v " file end in List.iter (fun (name,_) -> loop name) !vAccu let usage () = eprintf " usage: coqdep [options] +\n"; eprintf " options:\n"; eprintf " -boot : For coq developers, prints dependencies over coq library files (omitted by default).\n"; eprintf " -sort : output the given file name ordered by dependencies\n"; eprintf " -noglob | -no-glob : \n"; eprintf " -noinit : currently no effect\n"; eprintf " -f file : read -I, -Q, -R and filenames from _CoqProject-formatted FILE.\n"; eprintf " -I dir : add (non recursively) dir to ocaml path\n"; eprintf " -R dir logname : add and import dir recursively to coq load path under logical name logname\n"; eprintf " -Q dir logname : add (recursively) and open (non recursively) dir to coq load path under logical name logname\n"; eprintf " -vos : also output dependencies about .vos files\n"; eprintf " -exclude-dir dir : skip subdirectories named 'dir' during -R/-Q search\n"; eprintf " -coqlib dir : set the coq standard library directory\n"; eprintf " -dyndep (opt|byte|both|no|var) : set how dependencies over ML modules are printed\n"; exit 1 let option_sort = ref false let split_period = Str.split (Str.regexp (Str.quote ".")) let add_q_include path l = add_rec_dir_no_import add_v_known path (split_period l) let add_r_include path l = add_rec_dir_import add_v_known path (split_period l) let treat_coqproject f = let open CoqProject_file in let iter_sourced f = List.iter (fun {thing} -> f thing) in let warning_fn x = coqdep_warning "%s" x in let project = try read_project_file ~warning_fn f with | Parsing_error msg -> error_cannot_parse_project_file f msg | UnableToOpenProjectFile msg -> error_cannot_open_project_file msg in iter_sourced (fun { path } -> add_caml_dir path) project.ml_includes; iter_sourced (fun ({ path }, l) -> add_q_include path l) project.q_includes; iter_sourced (fun ({ path }, l) -> add_r_include path l) project.r_includes; iter_sourced (fun f' -> treat_file_coq_project f f') (all_files project) let rec parse = function | "-boot" :: ll -> option_boot := true; parse ll | "-sort" :: ll -> option_sort := true; parse ll | "-vos" :: ll -> write_vos := true; parse ll | ("-noglob" | "-no-glob") :: ll -> option_noglob := true; parse ll | "-noinit" :: ll -> (* do nothing *) parse ll | "-f" :: f :: ll -> treat_coqproject f; parse ll | "-I" :: r :: ll -> add_caml_dir r; parse ll | "-I" :: [] -> usage () | "-R" :: r :: ln :: ll -> add_r_include r ln; parse ll | "-Q" :: r :: ln :: ll -> add_q_include r ln; parse ll | "-R" :: ([] | [_]) -> usage () | "-exclude-dir" :: r :: ll -> System.exclude_directory r; parse ll | "-exclude-dir" :: [] -> usage () | "-coqlib" :: r :: ll -> Boot.Env.set_coqlib r; parse ll | "-coqlib" :: [] -> usage () | "-dyndep" :: "no" :: ll -> option_dynlink := No; parse ll | "-dyndep" :: "opt" :: ll -> option_dynlink := Opt; parse ll | "-dyndep" :: "byte" :: ll -> option_dynlink := Byte; parse ll | "-dyndep" :: "both" :: ll -> option_dynlink := Both; parse ll | "-dyndep" :: "var" :: ll -> option_dynlink := Variable; parse ll | ("-h"|"--help"|"-help") :: _ -> usage () | opt :: ll when String.length opt > 0 && opt.[0] = '-' -> coqdep_warning "unknown option %s" opt; parse ll | f :: ll -> treat_file_command_line f; parse ll | [] -> () let init () = if Array.length Sys.argv < 2 then usage (); if not Coq_config.has_natdynlink then option_dynlink := No; parse (List.tl (Array.to_list Sys.argv)) coq-8.15.0/tools/coqdep_common.mli000066400000000000000000000044221417001151100170460ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a type dirname = string type basename = string type filename = string type dirpath = string list type root = filename * dirpath (** [find_dir_logpath dir] Return the logical path of directory [dir] if it has been given one. Raise [Not_found] otherwise. In particular we can check if "." has been attributed a logical path after processing all options and silently give the default one if it hasn't. We may also use this to warn if ap hysical path is met twice.*) val find_dir_logpath: dirname -> dirpath (** Options *) val option_sort : bool ref val option_boot : bool ref (** ML-related manipulation *) val coq_dependencies : unit -> unit val add_v_known : bool -> root -> dirname -> dirpath -> basename -> unit val add_coqlib_known : bool -> root -> dirname -> dirpath -> basename -> unit (** Simply add this directory and imports it, no subdirs. This is used by the implicit adding of the current path. *) val add_norec_dir_import : (bool -> root -> dirname -> dirpath -> basename -> unit) -> dirname -> dirpath -> unit (** -Q semantic: go in subdirs but only full logical paths are known. *) val add_rec_dir_no_import : (bool -> root -> dirname -> dirpath -> basename -> unit) -> dirname -> dirpath -> unit (** -R semantic: go in subdirs and suffixes of logical paths are known. *) val add_rec_dir_import : (bool -> root -> dirname -> dirpath -> basename -> unit) -> dirname -> dirpath -> unit val sort : unit -> unit (** Init coqdep, including argument parsing *) val init : unit -> unit coq-8.15.0/tools/coqdep_lexer.mli000066400000000000000000000020031417001151100166660ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* coq_token coq-8.15.0/tools/coqdep_lexer.mll000066400000000000000000000156701417001151100167070ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* s | Some _ -> syntax_error lexbuf let get_ident lexbuf = let s = Lexing.lexeme lexbuf in check_valid lexbuf s let get_field_name lexbuf = let s = Lexing.lexeme lexbuf in check_valid lexbuf (String.sub s 1 (String.length s - 1)) } let space = [' ' '\t' '\n' '\r'] let lowercase = ['a'-'z'] let uppercase = ['A'-'Z'] let identchar = ['A'-'Z' 'a'-'z' '_' '\'' '0'-'9'] let caml_up_ident = uppercase identchar* let caml_low_ident = lowercase identchar* (* This is an overapproximation, we check correctness afterwards *) let coq_ident = ['A'-'Z' 'a'-'z' '_' '\128'-'\255'] ['A'-'Z' 'a'-'z' '_' '\'' '0'-'9' '\128'-'\255']* let coq_field = '.' coq_ident let dot = '.' ( space+ | eof) rule coq_action = parse | "Require" space+ { require_modifiers None lexbuf } | "Local" space+ "Declare" space+ "ML" space+ "Module" space+ { modules [] lexbuf } | "Declare" space+ "ML" space+ "Module" space+ { modules [] lexbuf } | "Load" space+ { load_file lexbuf } | "Add" space+ "LoadPath" space+ { add_loadpath lexbuf } | "Time" space+ { coq_action lexbuf } | "Timeout" space+ ['0'-'9']+ space+ { coq_action lexbuf } | "From" space+ { from_rule lexbuf } | space+ { coq_action lexbuf } | "(*" { comment lexbuf; coq_action lexbuf } | eof { raise Fin_fichier} | _ { skip_to_dot lexbuf; coq_action lexbuf } and from_rule = parse | "(*" { comment lexbuf; from_rule lexbuf } | space+ { from_rule lexbuf } | coq_ident { let from = coq_qual_id_tail [get_ident lexbuf] lexbuf in consume_require (Some from) lexbuf } | eof { syntax_error lexbuf } | _ { syntax_error lexbuf } and require_modifiers from = parse | "(*" { comment lexbuf; require_modifiers from lexbuf } | "Import" space+ { require_file from lexbuf } | "Export" space+ { require_file from lexbuf } | space+ { require_modifiers from lexbuf } | eof { syntax_error lexbuf } | _ { backtrack lexbuf ; require_file from lexbuf } and consume_require from = parse | "(*" { comment lexbuf; consume_require from lexbuf } | space+ { consume_require from lexbuf } | "Require" space+ { require_modifiers from lexbuf } | _ { syntax_error lexbuf } and add_loadpath = parse | "(*" { comment lexbuf; add_loadpath lexbuf } | space+ { add_loadpath lexbuf } | eof { syntax_error lexbuf } | '"' [^ '"']* '"' (*'"'*) { add_loadpath_as (unquote_string (lexeme lexbuf)) lexbuf } and add_loadpath_as path = parse | "(*" { comment lexbuf; add_loadpath_as path lexbuf } | space+ { add_loadpath_as path lexbuf } | "as" { let qid = coq_qual_id lexbuf in skip_to_dot lexbuf; AddRecLoadPath (path, qid) } | dot { AddLoadPath path } and comment = parse | "(*" { comment lexbuf; comment lexbuf } | "*)" { () } | "'" [^ '\\' '\''] "'" { comment lexbuf } | "'" '\\' ['\\' '\'' 'n' 't' 'b' 'r'] "'" { comment lexbuf } | "'" '\\' ['0'-'9'] ['0'-'9'] ['0'-'9'] "'" { comment lexbuf } | eof { raise Fin_fichier } | _ { comment lexbuf } and load_file = parse | '"' [^ '"']* '"' (*'"'*) { let s = lexeme lexbuf in parse_dot lexbuf; Load (Physical (unquote_vfile_string s)) } | coq_ident { let s = get_ident lexbuf in skip_to_dot lexbuf; Load (Logical s) } | eof { syntax_error lexbuf } | _ { syntax_error lexbuf } and require_file from = parse | "(*" { comment lexbuf; require_file from lexbuf } | space+ { require_file from lexbuf } | coq_ident { let name = coq_qual_id_tail [get_ident lexbuf] lexbuf in let qid = coq_qual_id_list [name] lexbuf in parse_dot lexbuf; Require (from, qid) } | eof { syntax_error lexbuf } | _ { syntax_error lexbuf } and skip_to_dot = parse | "(*" { comment lexbuf; skip_to_dot lexbuf } | dot { () } | eof { syntax_error lexbuf } | _ { skip_to_dot lexbuf } and parse_dot = parse | dot { () } | eof { syntax_error lexbuf } | _ { syntax_error lexbuf } and coq_qual_id = parse | "(*" { comment lexbuf; coq_qual_id lexbuf } | space+ { coq_qual_id lexbuf } | coq_ident { coq_qual_id_tail [get_ident lexbuf] lexbuf } | _ { syntax_error lexbuf } and coq_qual_id_tail module_name = parse | "(*" { comment lexbuf; coq_qual_id_tail module_name lexbuf } | space+ { coq_qual_id_tail module_name lexbuf } | coq_field { coq_qual_id_tail (get_field_name lexbuf :: module_name) lexbuf } | eof { syntax_error lexbuf } | _ { backtrack lexbuf; List.rev module_name } and coq_qual_id_list module_names = parse | "(*" { comment lexbuf; coq_qual_id_list module_names lexbuf } | space+ { coq_qual_id_list module_names lexbuf } | coq_ident { let name = coq_qual_id_tail [get_ident lexbuf] lexbuf in coq_qual_id_list (name :: module_names) lexbuf } | eof { syntax_error lexbuf } | _ { backtrack lexbuf; List.rev module_names } and modules mllist = parse | space+ { modules mllist lexbuf } | "(*" { comment lexbuf; modules mllist lexbuf } | '"' [^'"']* '"' { let lex = (Lexing.lexeme lexbuf) in let str = String.sub lex 1 (String.length lex - 2) in modules (str :: mllist) lexbuf} | eof { syntax_error lexbuf } | _ { Declare (List.rev mllist) } coq-8.15.0/tools/coqdoc/000077500000000000000000000000001417001151100147665ustar00rootroot00000000000000coq-8.15.0/tools/coqdoc/alpha.ml000066400000000000000000000031661417001151100164130ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'A' | '\199' -> 'C' | '\200'..'\203' -> 'E' | '\204'..'\207' -> 'I' | '\209' -> 'N' | '\210'..'\214' -> 'O' | '\217'..'\220' -> 'U' | '\221' -> 'Y' | c -> c let norm_char_utf8 c = Char.uppercase_ascii c let norm_char c = if !utf8 then norm_char_utf8 c else if !latin1 then norm_char_latin1 c else Char.uppercase_ascii c let norm_string = String.map (fun s -> norm_char s) let compare_char c1 c2 = match norm_char c1, norm_char c2 with | ('A'..'Z' as c1), ('A'..'Z' as c2) -> compare c1 c2 | 'A'..'Z', _ -> -1 | _, 'A'..'Z' -> 1 | '_', _ -> -1 | _, '_' -> 1 | c1, c2 -> compare c1 c2 let compare_string s1 s2 = let n1 = String.length s1 in let n2 = String.length s2 in let rec cmp i = if i == n1 || i == n2 then n1 - n2 else let c = compare_char s1.[i] s2.[i] in if c == 0 then cmp (succ i) else c in cmp 0 coq-8.15.0/tools/coqdoc/alpha.mli000066400000000000000000000015631417001151100165630ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* char -> int val compare_string : string -> string -> int (* Alphabetic normalization. *) val norm_char : char -> char val norm_string : string -> string coq-8.15.0/tools/coqdoc/cdglobals.ml000066400000000000000000000057461417001151100172660ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* "" && Filename.is_relative f then if not (Sys.file_exists !output_dir) then (Printf.eprintf "No such directory: %s\n" !output_dir; exit 1) else !output_dir / f else f let open_out_file f = out_channel := try open_out (coqdoc_out f) with Sys_error s -> Printf.eprintf "%s\n" s; exit 1 let close_out_file () = close_out !out_channel (*s Manipulations of paths and path aliases *) let normalize_path p = (* We use the Unix subsystem to normalize a physical path (relative or absolute) and get rid of symbolic links, relative links (like ./ or ../ in the middle of the path; it's tricky but it works... *) (* Rq: Sys.getcwd () returns paths without '/' at the end *) let orig = Sys.getcwd () in Sys.chdir p; let res = Sys.getcwd () in Sys.chdir orig; res let normalize_filename f = let basename = Filename.basename f in let dirname = Filename.dirname f in normalize_path dirname, basename let header_trailer = ref true let header_file = ref "" let header_file_spec = ref false let footer_file = ref "" let footer_file_spec = ref false let quiet = ref true let light = ref false let gallina = ref false let short = ref false let index = ref true let multi_index = ref false let index_name = ref "index" let toc = ref false let page_title = ref "" let title = ref "" let externals = ref true let coqlib_url = ref Coq_config.wwwstdlib let raw_comments = ref false let parse_comments = ref false let plain_comments = ref false let toc_depth = (ref None : int option ref) let lib_name = ref "Library" let lib_subtitles = ref false let interpolate = ref false let inline_notmono = ref false let charset = ref "iso-8859-1" let inputenc = ref "" let latin1 = ref false let utf8 = ref false let set_latin1 () = charset := "iso-8859-1"; inputenc := "latin1"; latin1 := true let set_utf8 () = charset := "utf-8"; inputenc := "utf8x"; utf8 := true (* Parsing options *) type coq_module = string type file = | Vernac_file of string * coq_module | Latex_file of string coq-8.15.0/tools/coqdoc/cdglobals.mli000066400000000000000000000040071417001151100174240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string -> string val coqdoc_out : string -> string val open_out_file : string -> unit val close_out_file : unit -> unit val normalize_path : string -> string val normalize_filename : string -> string * string val header_trailer : bool ref val header_file : string ref val header_file_spec : bool ref val footer_file : string ref val footer_file_spec : bool ref val quiet : bool ref val light : bool ref val gallina : bool ref val short : bool ref val index : bool ref val multi_index : bool ref val index_name : string ref val toc : bool ref val page_title : string ref val title : string ref val externals : bool ref val coqlib_url : string ref val raw_comments : bool ref val parse_comments : bool ref val plain_comments : bool ref val toc_depth : int option ref val lib_name : string ref val lib_subtitles : bool ref val interpolate : bool ref val inline_notmono : bool ref val charset : string ref val inputenc : string ref val latin1 : bool ref val utf8 : bool ref val set_latin1 : unit -> unit val set_utf8 : unit -> unit type coq_module = string type file = Vernac_file of string * coq_module | Latex_file of string coq-8.15.0/tools/coqdoc/coqdoc.css000066400000000000000000000137731417001151100167630ustar00rootroot00000000000000body { padding: 0px 0px; margin: 0px 0px; background-color: white } #page { display: block; padding: 0px; margin: 0px; padding-bottom: 10px; } #header { display: block; position: relative; padding: 0; margin: 0; vertical-align: middle; border-bottom-style: solid; border-width: thin } #header h1 { padding: 0; margin: 0;} /* Contents */ #main{ display: block; padding: 10px; font-family: sans-serif; font-size: 100%; line-height: 100% } #main h1 { line-height: 95% } /* allow for multi-line headers */ #main a.idref:visited {color : #416DFF; text-decoration : none; } #main a.idref:link {color : #416DFF; text-decoration : none; } #main a.idref:hover {text-decoration : none; } #main a.idref:active {text-decoration : none; } #main a.modref:visited {color : #416DFF; text-decoration : none; } #main a.modref:link {color : #416DFF; text-decoration : none; } #main a.modref:hover {text-decoration : none; } #main a.modref:active {text-decoration : none; } #main .keyword { color : #cf1d1d } #main { color: black } .section { background-color: rgb(60%,60%,100%); padding-top: 13px; padding-bottom: 13px; padding-left: 3px; margin-top: 5px; margin-bottom: 5px; font-size : 175% } h2.section { background-color: rgb(80%,80%,100%); padding-left: 3px; padding-top: 12px; padding-bottom: 10px; font-size : 130% } h3.section { background-color: rgb(90%,90%,100%); padding-left: 3px; padding-top: 7px; padding-bottom: 7px; font-size : 115% } h4.section { /* background-color: rgb(80%,80%,80%); max-width: 20em; padding-left: 5px; padding-top: 5px; padding-bottom: 5px; */ background-color: white; padding-left: 0px; padding-top: 0px; padding-bottom: 0px; font-size : 100%; font-weight : bold; text-decoration : underline; } #main .doc { margin: 0px; font-family: sans-serif; font-size: 100%; line-height: 125%; max-width: 40em; color: black; padding: 10px; background-color: #90bdff } .inlinecode { display: inline; /* font-size: 125%; */ color: #666666; font-family: monospace } .doc .inlinecode { display: inline; font-size: 120%; color: rgb(30%,30%,70%); font-family: monospace } .doc .inlinecode .id { color: rgb(30%,30%,70%); } .inlinecodenm { display: inline; color: #444444; } .doc .code { display: inline; font-size: 120%; color: rgb(30%,30%,70%); font-family: monospace } .comment { display: inline; font-family: monospace; color: rgb(50%,50%,80%); } .code { display: block; /* padding-left: 15px; */ font-size: 110%; font-family: monospace; } table.infrule { border: 0px; margin-left: 50px; margin-top: 10px; margin-bottom: 10px; } td.infrule { font-family: monospace; text-align: center; /* color: rgb(35%,35%,70%); */ padding: 0px; line-height: 100%; } tr.infrulemiddle hr { margin: 1px 0 1px 0; } .infrulenamecol { color: rgb(60%,60%,60%); font-size: 80%; padding-left: 1em; padding-bottom: 0.1em } /* Pied de page */ #footer { font-size: 65%; font-family: sans-serif; } /* Identifiers: ) */ .id { display: inline; } .id[title="constructor"] { color: rgb(60%,0%,0%); } .id[title="var"] { color: rgb(40%,0%,40%); } .id[title="variable"] { color: rgb(40%,0%,40%); } .id[title="definition"] { color: rgb(0%,40%,0%); } .id[title="abbreviation"] { color: rgb(0%,40%,0%); } .id[title="lemma"] { color: rgb(0%,40%,0%); } .id[title="instance"] { color: rgb(0%,40%,0%); } .id[title="projection"] { color: rgb(0%,40%,0%); } .id[title="method"] { color: rgb(0%,40%,0%); } .id[title="inductive"] { color: rgb(0%,0%,80%); } .id[title="record"] { color: rgb(0%,0%,80%); } .id[title="class"] { color: rgb(0%,0%,80%); } .id[title="keyword"] { color : #cf1d1d; /* color: black; */ } /* Deprecated rules using the 'type' attribute of (not xhtml valid) */ .id[type="constructor"] { color: rgb(60%,0%,0%); } .id[type="var"] { color: rgb(40%,0%,40%); } .id[type="variable"] { color: rgb(40%,0%,40%); } .id[title="binder"] { color: rgb(40%,0%,40%); } .id[type="definition"] { color: rgb(0%,40%,0%); } .id[type="abbreviation"] { color: rgb(0%,40%,0%); } .id[type="lemma"] { color: rgb(0%,40%,0%); } .id[type="instance"] { color: rgb(0%,40%,0%); } .id[type="projection"] { color: rgb(0%,40%,0%); } .id[type="method"] { color: rgb(0%,40%,0%); } .id[type="inductive"] { color: rgb(0%,0%,80%); } .id[type="record"] { color: rgb(0%,0%,80%); } .id[type="class"] { color: rgb(0%,0%,80%); } .id[type="keyword"] { color : #cf1d1d; /* color: black; */ } .inlinecode .id { color: rgb(0%,0%,0%); } /* TOC */ #toc h2 { padding: 10px; background-color: rgb(60%,60%,100%); } #toc li { padding-bottom: 8px; } /* Index */ #index { margin: 0; padding: 0; width: 100%; } #index #frontispiece { margin: 1em auto; padding: 1em; width: 60%; } .booktitle { font-size : 140% } .authors { font-size : 90%; line-height: 115%; } .moreauthors { font-size : 60% } #index #entrance { text-align: center; } #index #entrance .spacer { margin: 0 30px 0 30px; } #index #footer { position: absolute; bottom: 0; } .paragraph { height: 0.75em; } ul.doclist { margin-top: 0em; margin-bottom: 0em; } .code :target { border: 2px solid #D4D4D4; background-color: #e5eecc; } coq-8.15.0/tools/coqdoc/coqdoc.sty000066400000000000000000000126441417001151100170060ustar00rootroot00000000000000 % This is coqdoc.sty, by Jean-Christophe Filliâtre % This LaTeX package is used by coqdoc (http://www.lri.fr/~filliatr/coqdoc) % % You can modify the following macros to customize the appearance % of the document. \NeedsTeXFormat{LaTeX2e} \ProvidesPackage{coqdoc}[2002/02/11] % % Headings % \usepackage{fancyhdr} % \newcommand{\coqdocleftpageheader}{\thepage\ -- \today} % \newcommand{\coqdocrightpageheader}{\today\ -- \thepage} % \pagestyle{fancyplain} % %BEGIN LATEX % \headsep 8mm % \renewcommand{\plainheadrulewidth}{0.4pt} % \renewcommand{\plainfootrulewidth}{0pt} % \lhead[\coqdocleftpageheader]{\leftmark} % \rhead[\leftmark]{\coqdocrightpageheader} % \cfoot{} % %END LATEX % Hevea puts to much space with \medskip and \bigskip %HEVEA\renewcommand{\medskip}{} %HEVEA\renewcommand{\bigskip}{} %HEVEA\newcommand{\lnot}{\coqwkw{not}} %HEVEA\newcommand{\lor}{\coqwkw{or}} %HEVEA\newcommand{\land}{\&} % own name \newcommand{\coqdoc}{\textsf{coqdoc}} % pretty underscores (the package fontenc causes ugly underscores) %BEGIN LATEX \def\_{\kern.08em\vbox{\hrule width.35em height.6pt}\kern.08em} %END LATEX % macro for typesetting keywords \newcommand{\coqdockw}[1]{\texttt{#1}} % macro for typesetting variable identifiers \newcommand{\coqdocvar}[1]{\textit{#1}} % macro for typesetting constant identifiers \newcommand{\coqdoccst}[1]{\textsf{#1}} % macro for typesetting module identifiers \newcommand{\coqdocmod}[1]{\textsc{\textsf{#1}}} % macro for typesetting module constant identifiers (e.g. Parameters in % module types) \newcommand{\coqdocax}[1]{\textsl{\textsf{#1}}} % macro for typesetting inductive type identifiers \newcommand{\coqdocind}[1]{\textbf{\textsf{#1}}} % macro for typesetting constructor identifiers \newcommand{\coqdocconstr}[1]{\textsf{#1}} % macro for typesetting tactic identifiers \newcommand{\coqdoctac}[1]{\texttt{#1}} % These are the real macros used by coqdoc, their typesetting is % based on the above macros by default. \newcommand{\coqdoclibrary}[1]{\coqdoccst{#1}} \newcommand{\coqdocinductive}[1]{\coqdocind{#1}} \newcommand{\coqdocdefinition}[1]{\coqdoccst{#1}} \newcommand{\coqdocvariable}[1]{\coqdocvar{#1}} \newcommand{\coqdocbinder}[1]{\coqdocvar{#1}} \newcommand{\coqdocconstructor}[1]{\coqdocconstr{#1}} \newcommand{\coqdoclemma}[1]{\coqdoccst{#1}} \newcommand{\coqdocclass}[1]{\coqdocind{#1}} \newcommand{\coqdocinstance}[1]{\coqdoccst{#1}} \newcommand{\coqdocmethod}[1]{\coqdoccst{#1}} \newcommand{\coqdocabbreviation}[1]{\coqdoccst{#1}} \newcommand{\coqdocrecord}[1]{\coqdocind{#1}} \newcommand{\coqdocprojection}[1]{\coqdoccst{#1}} \newcommand{\coqdocnotation}[1]{\coqdockw{#1}} \newcommand{\coqdocsection}[1]{\coqdoccst{#1}} \newcommand{\coqdocaxiom}[1]{\coqdocax{#1}} \newcommand{\coqdocmodule}[1]{\coqdocmod{#1}} % Environment encompassing code fragments % !!! CAUTION: This environment may have empty contents \newenvironment{coqdoccode}{}{} % Environment for comments \newenvironment{coqdoccomment}{\tt(*}{*)} % newline and indentation %BEGIN LATEX % Base indentation length \newlength{\coqdocbaseindent} \setlength{\coqdocbaseindent}{0em} % Beginning of a line without any Coq indentation \newcommand{\coqdocnoindent}{\noindent\kern\coqdocbaseindent} % Beginning of a line with a given Coq indentation \newcommand{\coqdocindent}[1]{\noindent\kern\coqdocbaseindent\noindent\kern#1} % End-of-the-line \newcommand{\coqdoceol}{\hspace*{\fill}\setlength\parskip{0pt}\par} % Empty lines (in code only) \newcommand{\coqdocemptyline}{\vskip 0.4em plus 0.1em minus 0.1em} \usepackage{ifpdf} \ifpdf \RequirePackage{hyperref} \hypersetup{raiselinks=true,colorlinks=true,linkcolor=black} % To do indexing, use something like: % \usepackage{multind} % \newcommand{\coqdef}[3]{\hypertarget{coq:#1}{\index{coq}{#1@#2|hyperpage}#3}} \newcommand{\coqdef}[3]{\phantomsection\hypertarget{coq:#1}{#3}} \newcommand{\coqref}[2]{\hyperlink{coq:#1}{#2}} \newcommand{\coqexternalref}[3]{\href{#1.html\##2}{#3}} \newcommand{\identref}[2]{\hyperlink{coq:#1}{\textsf {#2}}} \newcommand{\coqlibrary}[3]{\cleardoublepage\phantomsection \hypertarget{coq:#1}{\chapter{#2\texorpdfstring{\coqdoccst}{}{#3}}}} \else \newcommand{\coqdef}[3]{#3} \newcommand{\coqref}[2]{#2} \newcommand{\coqexternalref}[3]{#3} \newcommand{\texorpdfstring}[2]{#1} \newcommand{\identref}[2]{\textsf{#2}} \newcommand{\coqlibrary}[3]{\cleardoublepage\chapter{#2\coqdoccst{#3}}} \fi \usepackage{xr} \newif\if@coqdoccolors \@coqdoccolorsfalse \DeclareOption{color}{\@coqdoccolorstrue} \ProcessOptions \if@coqdoccolors \RequirePackage{xcolor} \definecolor{varpurple}{rgb}{0.4,0,0.4} \definecolor{constrmaroon}{rgb}{0.6,0,0} \definecolor{defgreen}{rgb}{0,0.4,0} \definecolor{indblue}{rgb}{0,0,0.8} \definecolor{kwred}{rgb}{0.8,0.1,0.1} \def\coqdocvarcolor{varpurple} \def\coqdockwcolor{kwred} \def\coqdoccstcolor{defgreen} \def\coqdocindcolor{indblue} \def\coqdocconstrcolor{constrmaroon} \def\coqdocmodcolor{defgreen} \def\coqdocaxcolor{varpurple} \def\coqdoctaccolor{black} \def\coqdockw#1{{\color{\coqdockwcolor}{\texttt{#1}}}} \def\coqdocvar#1{{\color{\coqdocvarcolor}{\textit{#1}}}} \def\coqdoccst#1{{\color{\coqdoccstcolor}{\textrm{#1}}}} \def\coqdocind#1{{\color{\coqdocindcolor}{\textsf{#1}}}} \def\coqdocconstr#1{{\color{\coqdocconstrcolor}{\textsf{#1}}}} \def\coqdocmod#1{{{\color{\coqdocmodcolor}{\textsc{\textsf{#1}}}}}} \def\coqdocax#1{{{\color{\coqdocaxcolor}{\textsl{\textrm{#1}}}}}} \def\coqdoctac#1{{\color{\coqdoctaccolor}{\texttt{#1}}}} \fi \endinput coq-8.15.0/tools/coqdoc/cpretty.mli000066400000000000000000000014401417001151100171620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Cdglobals.coq_module -> unit val detect_subtitle : string -> Cdglobals.coq_module -> string option coq-8.15.0/tools/coqdoc/cpretty.mll000066400000000000000000001324401417001151100171720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* [] | (l :: ls) -> l :: (take (n-1) ls) (* count the number of spaces at the beginning of a string *) let count_spaces s = let n = String.length s in let rec count c i = if i == n then c,i else match s.[i] with | '\t' -> count (c + (8 - (c mod 8))) (i + 1) | ' ' -> count (c + 1) (i + 1) | _ -> c,i in count 0 0 let count_newlines s = let len = String.length s in let n = ref 0 in String.iteri (fun i c -> match c with (* skip "\r\n" *) | '\r' when i + 1 = len || s.[i+1] = '\n' -> incr n | '\n' -> incr n | _ -> ()) s; !n (* Whether a string starts with a newline (used on strings that might match the [nl] regexp) *) let is_nl s = String.length s = 0 || let c = s.[0] in c = '\n' || c = '\r' let remove_newline s = let n = String.length s in let rec count i = if i == n || s.[i] <> '\n' then i else count (i + 1) in let i = count 0 in i, String.sub s i (n - i) let count_dashes s = let c = ref 0 in for i = 0 to String.length s - 1 do if s.[i] = '-' then incr c done; !c let cut_head_tail_spaces s = let n = String.length s in let rec look_up i = if i == n || s.[i] <> ' ' then i else look_up (i+1) in let rec look_dn i = if i == -1 || s.[i] <> ' ' then i else look_dn (i-1) in let l = look_up 0 in let r = look_dn (n-1) in if l <= r then String.sub s l (r-l+1) else s let sec_title s = let rec count lev i = if s.[i] = '*' then count (succ lev) (succ i) else let t = String.sub s i (String.length s - i) in lev, cut_head_tail_spaces t in count 0 (String.index s '*') let strip_eol s = let eol = s.[String.length s - 1] = '\n' in (eol, if eol then String.sub s 1 (String.length s - 1) else s) let is_none x = match x with | None -> true | Some _ -> false let formatted : position option ref = ref None let brackets = ref 0 let comment_level = ref 0 let in_proof = ref None let in_env start stop = let r = ref false in let start_env () = r := true; start () in let stop_env () = if !r then stop (); r := false in (fun x -> !r), start_env, stop_env let _, start_emph, stop_emph = in_env Output.start_emph Output.stop_emph let in_quote, start_quote, stop_quote = in_env Output.start_quote Output.stop_quote let url_buffer = Buffer.create 40 let url_name_buffer = Buffer.create 40 let backtrack lexbuf = lexbuf.lex_curr_pos <- lexbuf.lex_start_pos; lexbuf.lex_curr_p <- lexbuf.lex_start_p let backtrack_past_newline lexbuf = let buf = lexeme lexbuf in let splits = Str.bounded_split_delim (Str.regexp "['\n']") buf 2 in match splits with | [] -> () | (_ :: []) -> () | (s1 :: rest :: _) -> let length_skip = 1 + String.length s1 in lexbuf.lex_curr_pos <- lexbuf.lex_start_pos + length_skip (* saving/restoring the PP state *) type state = { st_gallina : bool; st_light : bool } let state_stack = Stack.create () let save_state () = Stack.push { st_gallina = !Cdglobals.gallina; st_light = !Cdglobals.light } state_stack let restore_state () = let s = Stack.pop state_stack in Cdglobals.gallina := s.st_gallina; Cdglobals.light := s.st_light let begin_show () = save_state (); Cdglobals.gallina := false; Cdglobals.light := false let end_show () = restore_state () let begin_details s = save_state (); Cdglobals.gallina := false; Cdglobals.light := false; Output.start_details s let end_details () = Output.stop_details (); restore_state () (* Reset the globals *) let reset () = formatted := None; brackets := 0; comment_level := 0 (* erasing of Section/End *) let section_re = Str.regexp "[ \t]*Section" let end_re = Str.regexp "[ \t]*End" let is_section s = Str.string_match section_re s 0 let is_end s = Str.string_match end_re s 0 let sections_to_close = ref 0 let section_or_end s = if is_section s then begin incr sections_to_close; true end else if is_end s then begin if !sections_to_close > 0 then begin decr sections_to_close; true end else false end else true (* for item lists *) type list_compare = | Before | StartLevel of int | InLevel of int * bool (* Before : we're before any levels StartLevel : at the same column as the dash in a level InLevel : after the dash of this level, but before any deeper dashes. bool is true if this is the last level *) let find_level levels cur_indent = match levels with | [] -> Before | (l::ls) -> if cur_indent < l then Before else (* cur_indent will never be less than the head of the list *) let rec findind ls n = match ls with | [] -> InLevel (n,true) | (l :: []) -> if cur_indent = l then StartLevel n else InLevel (n,true) | (l1 :: l2 :: ls) -> if cur_indent = l1 then StartLevel n else if cur_indent < l2 then InLevel (n,false) else findind (l2 :: ls) (n+1) in findind (l::ls) 1 type is_start_list = | Rule | List of int | Neither let check_start_list str = let n_dashes = count_dashes str in let (n_spaces,_) = count_spaces str in if n_dashes >= 4 && not !Cdglobals.plain_comments then Rule else if n_dashes = 1 && not !Cdglobals.plain_comments then List n_spaces else Neither (* examine a string for subtitleness *) let subtitle m s = match Str.split_delim (Str.regexp ":") s with | [] -> false | (name::_) -> if (cut_head_tail_spaces name) = m then true else false (* tokens pretty-print *) let token_buffer = Buffer.create 1024 let token_re = Str.regexp "[ \t]*(\\*\\*[ \t]+printing[ \t]+\\([^ \t]+\\)" let printing_token_re = Str.regexp "[ \t]*\\(\\(%\\([^%]*\\)%\\)\\|\\(\\$[^$]*\\$\\)\\)?[ \t]*\\(#\\(\\(&#\\|[^#]\\)*\\)#\\)?" let add_printing_token toks pps = try if Str.string_match token_re toks 0 then let tok = Str.matched_group 1 toks in if Str.string_match printing_token_re pps 0 then let pp = (try Some (Str.matched_group 3 pps) with _ -> try Some (Str.matched_group 4 pps) with _ -> None), (try Some (Str.matched_group 6 pps) with _ -> None) in Output.add_printing_token tok pp with _ -> () let remove_token_re = Str.regexp "[ \t]*(\\*\\*[ \t]+remove[ \t]+printing[ \t]+\\([^ \t]+\\)[ \t]*\\*)" let remove_printing_token toks = try if Str.string_match remove_token_re toks 0 then let tok = Str.matched_group 1 toks in Output.remove_printing_token tok with _ -> () let output_indented_keyword s lexbuf = let nbsp,isp = count_spaces s in Output.indentation nbsp; let s = String.sub s isp (String.length s - isp) in Output.keyword s (lexeme_start lexbuf + isp) let only_gallina () = !Cdglobals.gallina && !in_proof <> None let parse_comments () = !Cdglobals.parse_comments && not (only_gallina ()) (* Advance lexbuf by n lines. Equivalent to calling [Lexing.new_line lexbuf] n times *) let new_lines n lexbuf = let lcp = lexbuf.lex_curr_p in if lcp != dummy_pos then lexbuf.lex_curr_p <- { lcp with pos_lnum = lcp.pos_lnum + n; pos_bol = lcp.pos_cnum } let print_position_p chan p = Printf.fprintf chan "%s%d, character %d" (if p.pos_fname = "" then "Line " else "File \"" ^ p.pos_fname ^ "\", line ") p.pos_lnum (p.pos_cnum - p.pos_bol) let print_position chan {lex_start_p = p} = print_position_p chan p let warn msg lexbuf = eprintf "%a, warning: %s\n" print_position lexbuf msg; flush stderr exception MismatchPreformatted of position (* let debug lexbuf msg = Printf.printf "%a %s\n" print_position lexbuf.lex_start_p msg *) } (*s Regular expressions *) let space = [' ' '\t'] let nl = "\r\n" | '\n' | '\r' let space_nl = space | nl let firstchar = ['A'-'Z' 'a'-'z' '_'] | (* superscript 1 *) '\194' '\185' | (* utf-8 latin 1 supplement *) '\195' ['\128'-'\150'] | '\195' ['\152'-'\182'] | '\195' ['\184'-'\191'] | (* utf-8 letterlike symbols *) '\206' (['\145'-'\161'] | ['\163'-'\191']) | '\207' (['\145'-'\191']) | '\226' ('\130' [ '\128'-'\137' ] (* subscripts *) | '\129' [ '\176'-'\187' ] (* superscripts *) | '\132' ['\128'-'\191'] | '\133' ['\128'-'\143']) let identchar = firstchar | ['\'' '0'-'9' '@' ] let id = firstchar identchar* let pfx_id = (id '.')* let identifier = id | pfx_id id (* This misses unicode stuff, and it adds "[" and "]". It's only an approximation of idents - used for detecting whether an underscore is part of an identifier or meant to indicate emphasis *) let nonidentchar = [^ 'A'-'Z' 'a'-'z' '_' '[' ']' '\'' '0'-'9' '@' '\"' '\'' '`'] let printing_token = [^ ' ' '\t']* let thm_token = "Theorem" | "Lemma" | "Fact" | "Remark" | "Corollary" | "Proposition" | "Property" | "Goal" let prf_token = "Next" space+ "Obligation" | "Proof" (space* "." | space+ "with" | space+ "using") let immediate_prf_token = (* Approximation of a proof term, if not in the prf_token case *) (* To be checked after prf_token *) "Proof" space* [^ '.' 'w' 'u'] let def_token = "Definition" | "Let" | "Let" space* "Fixpoint" | "Let" space* "CoFixpoint" | "Class" | "SubClass" | "Example" | "Fixpoint" | "Function" | "Boxed" | "CoFixpoint" | "Record" | "Variant" | "Structure" | "Scheme" | "Combined" space+ "Scheme" | "Inductive" | "CoInductive" | "Equations" | "Instance" | "Existing" space+ ("Instance" | "Instances" | "Class") | "Declare" space+ "Instance" | "Global" space+ "Instance" | "Functional" space+ "Scheme" let decl_token = "Hypothesis" | "Hypotheses" | "Parameter" 's'? | "Axiom" 's'? | "Conjecture" | "Primitive" | "Constraint" | "Universe" | "Universes" | "Register" let gallina_ext = "Module" | "Include" space+ "Type" | "Include" | "Declare" space+ "Module" | "Transparent" | "Opaque" | "Typeclasses" space+ "Transparent" | "Typeclasses" space+ "Opaque" | "Canonical" | "Coercion" | "Identity" | "Implicit" | "Tactic" space+ "Notation" | "Section" | "Context" | "Variable" 's'? | ("Hypothesis" | "Hypotheses") | "End" let notation_kw = "Notation" | "Infix" | "Reserved" space+ "Notation" | "Reserved" space+ "Infix" let commands = "Pwd" | "Cd" | "Drop" | "ProtectedLoop" | "Quit" | "Restart" | "Load" | "Add" | "Remove" space+ "Loadpath" | "Print" | "Inspect" | "About" | "SearchAbout" | "SearchRewrite" | "Search" | "Locate" | "Eval" | "Reset" | "Check" | "Type" | "Section" | "Chapter" | "Variable" 's'? | ("Hypothesis" | "Hypotheses") | "End" let end_kw = immediate_prf_token | "Qed" | "Defined" | "Save" | "Admitted" | "Abort" let extraction = "Extraction" | "Recursive" space+ "Extraction" | "Extract" let gallina_kw = thm_token | def_token | decl_token | gallina_ext | commands | extraction let prog_kw = "Program" space+ gallina_kw | "Obligation" | "Obligations" | "Solve" let hint_kw = "Extern" | "Rewrite" | "Resolve" | "Immediate" | "Transparent" | "Opaque" | "Unfold" | "Constructors" let set_kw = "Printing" space+ ("Coercions" | "Universes" | "All") | "Implicit" space+ "Arguments" let gallina_kw_to_hide = "Implicit" space+ "Arguments" | "Arguments" | ("Local" space+)? "Ltac" | "From" | "Require" | "Import" | "Export" | "Load" | "Hint" space+ hint_kw | "Create" space+ "HintDb" | "Removed" space+ "Hints" | "Open" | "Close" | "Delimit" | "Undelimit" | "Declare" space+ "Scope" | "Bind" space+ "Scope" | "Format" | "Transparent" | "Opaque" | "Strategy" | "Derive" | "Generalizable" space+ ("All" space+ "Variables" | "No" space+ "Variables" | "Variable" | "Variables") | ("Declare" space+ ("Morphism" | "Step") ) | ("Set" | "Unset") space+ set_kw | "Declare" space+ ("Left" | "Right") space+ "Step" | "Debug" space+ ("On" | "Off") | "Collection" let section = "*" | "**" | "***" | "****" let item_space = " " let begin_hide = "(*" space* "begin" space+ "hide" space* "*)" space* let end_hide = "(*" space* "end" space+ "hide" space* "*)" space* let begin_show = "(*" space* "begin" space+ "show" space* "*)" space* let end_show = "(*" space* "end" space+ "show" space* "*)" space* let begin_details = "(*" space* "begin" space+ "details" space* let end_details = "(*" space* "end" space+ "details" space* "*)" space* (* let begin_verb = "(*" space* "begin" space+ "verb" space* "*)" let end_verb = "(*" space* "end" space+ "verb" space* "*)" *) (*s Scanning Coq, at beginning of line *) rule coq_bol = parse | space* (nl+ as s) { new_lines (String.length s) lexbuf; if not (!in_proof <> None && (!Cdglobals.gallina || !Cdglobals.light)) then Output.empty_line_of_code (); coq_bol lexbuf } | space* "(**" (space_nl as s) { if is_nl s then new_lines 1 lexbuf; Output.end_coq (); Output.start_doc (); let eol = doc_bol lexbuf in Output.end_doc (); Output.start_coq (); if eol then coq_bol lexbuf else coq lexbuf } | space* "Comments" (space_nl as s) { if is_nl s then new_lines 1 lexbuf; Output.end_coq (); Output.start_doc (); comments lexbuf; Output.end_doc (); Output.start_coq (); coq lexbuf } | space* begin_hide nl { new_lines 1 lexbuf; skip_hide lexbuf; coq_bol lexbuf } | space* begin_show nl { new_lines 1 lexbuf; begin_show (); coq_bol lexbuf } | space* end_show nl { new_lines 1 lexbuf; end_show (); coq_bol lexbuf } | space* begin_details (* At this point, the comment remains open, and will be closed by [details_body] *) { let s = details_body lexbuf in Output.end_coq (); begin_details s; Output.start_coq (); coq_bol lexbuf } | space* end_details nl { new_lines 1 lexbuf; Output.end_coq (); end_details (); Output.start_coq (); coq_bol lexbuf } | space* (("Local"|"Global") space+)? gallina_kw_to_hide { let s = lexeme lexbuf in if !Cdglobals.light && section_or_end s then let eol = skip_to_dot lexbuf in if eol then (coq_bol lexbuf) else coq lexbuf else begin output_indented_keyword s lexbuf; let eol = body lexbuf in if eol then coq_bol lexbuf else coq lexbuf end } | space* thm_token { let s = lexeme lexbuf in output_indented_keyword s lexbuf; let eol = body lexbuf in in_proof := Some eol; if eol then coq_bol lexbuf else coq lexbuf } | space* prf_token { in_proof := Some true; let eol = if not !Cdglobals.gallina then begin backtrack lexbuf; body_bol lexbuf end else let s = lexeme lexbuf in if s.[String.length s - 1] = '.' then false else skip_to_dot lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space* end_kw { let eol = if not (only_gallina ()) then begin backtrack lexbuf; body_bol lexbuf end else skip_to_dot lexbuf in in_proof := None; if eol then coq_bol lexbuf else coq lexbuf } | space* gallina_kw { in_proof := None; let s = lexeme lexbuf in output_indented_keyword s lexbuf; let eol= body lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space* prog_kw { in_proof := None; let s = lexeme lexbuf in output_indented_keyword s lexbuf; let eol= body lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space* notation_kw { let s = lexeme lexbuf in output_indented_keyword s lexbuf; let eol= start_notation_string lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space* "(**" space+ "printing" space+ printing_token space+ { let tok = lexeme lexbuf in let s = printing_token_body lexbuf in add_printing_token tok s; coq_bol lexbuf } | space* "(**" space+ "printing" space+ { warn "bad 'printing' command" lexbuf; comment_level := 1; ignore (comment lexbuf); coq_bol lexbuf } | space* "(**" space+ "remove" space+ "printing" space+ printing_token space* "*)" { remove_printing_token (lexeme lexbuf); coq_bol lexbuf } | space* "(**" space+ "remove" space+ "printing" space+ { warn "bad 'remove printing' command" lexbuf; comment_level := 1; ignore (comment lexbuf); coq_bol lexbuf } | space* "(*" { comment_level := 1; let eol = if parse_comments () then begin let s = lexeme lexbuf in let nbsp, isp = count_spaces s in Output.indentation nbsp; Output.start_comment (); comment lexbuf end else skipped_comment lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space* "#[" { let eol = begin backtrack lexbuf; body_bol lexbuf end in if eol then coq_bol lexbuf else coq lexbuf } | eof { () } | _ { let eol = if not !Cdglobals.gallina then begin backtrack lexbuf; body_bol lexbuf end else skip_to_dot_or_brace lexbuf in if eol then coq_bol lexbuf else coq lexbuf } (*s Scanning Coq elsewhere *) and coq = parse | nl { new_lines 1 lexbuf; if not (only_gallina ()) then Output.line_break(); coq_bol lexbuf } | "(**" (space_nl as s) { if is_nl s then new_lines 1 lexbuf; Output.end_coq (); Output.start_doc (); let eol = doc_bol lexbuf in Output.end_doc (); Output.start_coq (); if eol then coq_bol lexbuf else coq lexbuf } | "(*" { comment_level := 1; let eol = if parse_comments () then begin Output.start_comment (); comment lexbuf end else skipped_comment lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | (nl+ as s) space* "]]" { new_lines (count_newlines s) lexbuf; if is_none !formatted then begin (* Isn't this an anomaly *) let s = lexeme lexbuf in let nlsp,s = remove_newline s in let nbsp,isp = count_spaces s in Output.indentation nbsp; let loc = lexeme_start lexbuf + isp + nlsp in Output.sublexer ']' loc; Output.sublexer ']' (loc+1); coq lexbuf end } | eof { () } | (("Local"|"Global") space+)? gallina_kw_to_hide { let s = lexeme lexbuf in if !Cdglobals.light && section_or_end s then begin let eol = skip_to_dot lexbuf in if eol then coq_bol lexbuf else coq lexbuf end else begin Output.ident s None; let eol=body lexbuf in if eol then coq_bol lexbuf else coq lexbuf end } | prf_token { let eol = if not !Cdglobals.gallina then begin backtrack lexbuf; body lexbuf end else let s = lexeme lexbuf in let eol = if s.[String.length s - 1] = '.' then false else skip_to_dot lexbuf in eol in if eol then coq_bol lexbuf else coq lexbuf } | end_kw { let eol = if not !Cdglobals.gallina then begin backtrack lexbuf; body lexbuf end else let eol = skip_to_dot lexbuf in if !in_proof <> Some true && eol then Output.line_break (); eol in in_proof := None; if eol then coq_bol lexbuf else coq lexbuf } | gallina_kw { let s = lexeme lexbuf in Output.ident s None; let eol = body lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | notation_kw { let s = lexeme lexbuf in Output.ident s None; let eol= start_notation_string lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | prog_kw { let s = lexeme lexbuf in Output.ident s None; let eol = body lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | "#[" { ignore(lexeme lexbuf); Output.char '#'; Output.char '['; let eol = body lexbuf in if eol then coq_bol lexbuf else coq lexbuf } | space+ { Output.char ' '; coq lexbuf } | eof { () } | _ { let eol = if not !Cdglobals.gallina then begin backtrack lexbuf; body lexbuf end else skip_to_dot_or_brace lexbuf in if eol then coq_bol lexbuf else coq lexbuf} (*s Scanning documentation, at beginning of line *) and doc_bol = parse | space* section space+ ([^'\n' '\r' '*'] | '*'+ [^'\n' '\r' ')' '*'])* ('*'+ (nl as s))? { if not (is_none s) then new_lines 1 lexbuf; let eol, lex = strip_eol (lexeme lexbuf) in let lev, s = sec_title lex in if (!Cdglobals.lib_subtitles) && (subtitle (Output.get_module false) s) then () else Output.section lev (fun () -> ignore (doc None (from_string s))); if eol then doc_bol lexbuf else doc None lexbuf } | ((space_nl* nl)? as s) (space* '-'+ as line) { let nl_count = count_newlines s in match check_start_list line with | Neither -> backtrack_past_newline lexbuf; new_lines 1 lexbuf; doc None lexbuf | List n -> new_lines nl_count lexbuf; if nl_count > 0 then Output.paragraph (); Output.item 1; doc (Some [n]) lexbuf | Rule -> new_lines nl_count lexbuf; Output.rule (); doc None lexbuf } | (space_nl* nl) as s { new_lines (count_newlines s) lexbuf; Output.paragraph (); doc_bol lexbuf } | "<<" space* nl { new_lines 1 lexbuf; Output.start_verbatim false; verbatim_block lexbuf; doc_bol lexbuf } | "<<" { Output.start_verbatim true; verbatim_inline lexbuf; doc None lexbuf } | eof { true } | '_' { if !Cdglobals.plain_comments then Output.char '_' else start_emph (); doc None lexbuf } | "" { doc None lexbuf } (*s Scanning lists - using whitespace *) and doc_list_bol indents = parse | space* '-' { let (n_spaces,_) = count_spaces (lexeme lexbuf) in match find_level indents n_spaces with | Before -> backtrack lexbuf; doc_bol lexbuf | StartLevel n -> Output.item n; doc (Some (take n indents)) lexbuf | InLevel (n,true) -> let items = List.length indents in Output.item (items+1); doc (Some (List.append indents [n_spaces])) lexbuf | InLevel (_,false) -> backtrack lexbuf; doc_bol lexbuf } | "<<" space* nl { new_lines 1 lexbuf; Output.start_verbatim false; verbatim_block lexbuf; doc_list_bol indents lexbuf } | "<<" space* { Output.start_verbatim true; verbatim_inline lexbuf; doc (Some indents) lexbuf } | "[[" nl { new_lines 1 lexbuf; formatted := Some lexbuf.lex_start_p; Output.start_inline_coq_block (); ignore(body_bol lexbuf); Output.end_inline_coq_block (); formatted := None; doc_list_bol indents lexbuf } | "[[[" nl { new_lines 1 lexbuf; inf_rules (Some indents) lexbuf } | space* nl space* '-' { (* Like in the doc_bol production, these two productions exist only to deal properly with whitespace *) new_lines 1 lexbuf; Output.paragraph (); backtrack_past_newline lexbuf; doc_list_bol indents lexbuf } | space* nl space* _ { new_lines 1 lexbuf; let buf' = lexeme lexbuf in let buf = let bufs = Str.split_delim (Str.regexp "['\n']") buf' in match bufs with | (_ :: s :: []) -> s | (_ :: _ :: s :: _) -> s | _ -> eprintf "Internal error bad_split2 - please report\n"; exit 1 in let (n_spaces,_) = count_spaces buf in match find_level indents n_spaces with | StartLevel 1 | Before -> (* Here we were at the beginning of a line, and it was blank. The next line started before any list items. So: insert a paragraph for the empty line, rewind to whatever's just after the newline, then toss over to doc_bol for whatever comes next. *) Output.stop_item (); Output.paragraph (); backtrack_past_newline lexbuf; doc_bol lexbuf | StartLevel _ | InLevel _ -> Output.paragraph (); backtrack_past_newline lexbuf; doc_list_bol indents lexbuf } | space* _ { let (n_spaces,_) = count_spaces (lexeme lexbuf) in match find_level indents n_spaces with | Before -> Output.stop_item (); backtrack lexbuf; doc_bol lexbuf | StartLevel n -> Output.reach_item_level (n-1); backtrack lexbuf; doc (Some (take (n-1) indents)) lexbuf | InLevel (n,_) -> Output.reach_item_level n; backtrack lexbuf; doc (Some (take n indents)) lexbuf } (*s Scanning documentation elsewhere *) and doc indents = parse | nl { new_lines 1 lexbuf; Output.char '\n'; match indents with | Some ls -> doc_list_bol ls lexbuf | None -> doc_bol lexbuf } | "[[" nl { new_lines 1 lexbuf; if !Cdglobals.plain_comments then (Output.char '['; Output.char '['; doc indents lexbuf) else (formatted := Some lexbuf.lex_start_p; Output.start_inline_coq_block (); let eol = body_bol lexbuf in Output.end_inline_coq_block (); formatted := None; if eol then match indents with | Some ls -> doc_list_bol ls lexbuf | None -> doc_bol lexbuf else doc indents lexbuf)} | "[[[" nl { new_lines 1 lexbuf; inf_rules indents lexbuf } | "[]" { Output.proofbox (); doc indents lexbuf } | "{{" { url lexbuf; doc indents lexbuf } | "[" { if !Cdglobals.plain_comments then Output.char '[' else (brackets := 1; Output.start_inline_coq (); escaped_coq lexbuf; Output.end_inline_coq ()); doc indents lexbuf } | "(*" { backtrack lexbuf ; let bol_parse = match indents with | Some is -> doc_list_bol is | None -> doc_bol in let eol = if !Cdglobals.parse_comments then comment lexbuf else skipped_comment lexbuf in if eol then bol_parse lexbuf else doc indents lexbuf } | '*'* "*)" (space_nl* as s) "(**" { let nl_count = count_newlines s in new_lines nl_count lexbuf; (match indents with | Some _ -> Output.stop_item () | None -> ()); (* this says - if there is a blank line between the two comments, insert one in the output too *) if nl_count > 1 then Output.paragraph (); doc_bol lexbuf } | '*'* "*)" space* nl { new_lines 1 lexbuf; Output.char '\n'; true } | '*'* "*)" { false } | "$" { if !Cdglobals.plain_comments then Output.char '$' else (Output.start_latex_math (); escaped_math_latex lexbuf); doc indents lexbuf } | "$$" { if !Cdglobals.plain_comments then Output.char '$'; Output.char '$'; doc indents lexbuf } | "%" { if !Cdglobals.plain_comments then Output.char '%' else escaped_latex lexbuf; doc indents lexbuf } | "%%" { if !Cdglobals.plain_comments then Output.char '%'; Output.char '%'; doc indents lexbuf } | "#" { if !Cdglobals.plain_comments then Output.char '#' else escaped_html lexbuf; doc indents lexbuf } | "##" { if !Cdglobals.plain_comments then Output.char '#'; Output.char '#'; doc indents lexbuf } | nonidentchar '_' nonidentchar { List.iter (fun x -> Output.char (lexeme_char lexbuf x)) [0;1;2]; doc indents lexbuf} | nonidentchar '_' { Output.char (lexeme_char lexbuf 0); if !Cdglobals.plain_comments then Output.char '_' else start_emph () ; doc indents lexbuf } | '_' nonidentchar { if !Cdglobals.plain_comments then Output.char '_' else stop_emph () ; Output.char (lexeme_char lexbuf 1); doc indents lexbuf } | "<<" space* { Output.start_verbatim true; verbatim_inline lexbuf; doc indents lexbuf } | '"' { if !Cdglobals.plain_comments then Output.char '"' else if in_quote () then stop_quote () else start_quote (); doc indents lexbuf } | eof { false } | _ { Output.char (lexeme_char lexbuf 0); doc indents lexbuf } (*s Various escapings *) and escaped_math_latex = parse | "$" { Output.stop_latex_math () } | eof { Output.stop_latex_math () } | "*)" { Output.stop_latex_math (); backtrack lexbuf } | _ { Output.latex_char (lexeme_char lexbuf 0); escaped_math_latex lexbuf } and escaped_latex = parse | "%" { () } | eof { () } | "*)" { backtrack lexbuf } | _ { Output.latex_char (lexeme_char lexbuf 0); escaped_latex lexbuf } and escaped_html = parse | "#" { () } | "&#" { Output.html_char '&'; Output.html_char '#'; escaped_html lexbuf } | "##" { Output.html_char '#'; escaped_html lexbuf } | eof { () } | "*)" { backtrack lexbuf } | _ { Output.html_char (lexeme_char lexbuf 0); escaped_html lexbuf } and verbatim_block = parse | nl ">>" space* nl { new_lines 2 lexbuf; Output.verbatim_char false '\n'; Output.stop_verbatim false } | nl ">>" { new_lines 1 lexbuf; warn "missing newline after \">>\" block" lexbuf; Output.verbatim_char false '\n'; Output.stop_verbatim false } | eof { warn "unterminated \">>\" block" lexbuf; Output.stop_verbatim false } | nl { new_lines 1 lexbuf; Output.verbatim_char false (lexeme_char lexbuf 0); verbatim_block lexbuf } | _ { Output.verbatim_char false (lexeme_char lexbuf 0); verbatim_block lexbuf } and verbatim_inline = parse | nl { new_lines 1 lexbuf; warn "unterminated inline \">>\"" lexbuf; Output.char '\n'; Output.stop_verbatim true } | ">>" { Output.stop_verbatim true } | eof { warn "unterminated inline \">>\"" lexbuf; Output.stop_verbatim true } | _ { Output.verbatim_char true (lexeme_char lexbuf 0); verbatim_inline lexbuf } and url = parse | "}}" { Output.url (Buffer.contents url_buffer) None; Buffer.clear url_buffer } | "}" { url_name lexbuf } | _ { Buffer.add_char url_buffer (lexeme_char lexbuf 0); url lexbuf } and url_name = parse | "}" { Output.url (Buffer.contents url_buffer) (Some (Buffer.contents url_name_buffer)); Buffer.clear url_buffer; Buffer.clear url_name_buffer } | _ { Buffer.add_char url_name_buffer (lexeme_char lexbuf 0); url_name lexbuf } (*s Coq, inside quotations *) and escaped_coq = parse | "]" { decr brackets; if !brackets > 0 then (Output.sublexer_in_doc ']'; escaped_coq lexbuf) else Tokens.flush_sublexer () } | "[" { incr brackets; Output.sublexer_in_doc '['; escaped_coq lexbuf } | "(*" { Tokens.flush_sublexer (); comment_level := 1; ignore (if !Cdglobals.parse_comments then comment lexbuf else skipped_comment lexbuf); escaped_coq lexbuf } | "*)" { (* likely to be a syntax error *) warn "unterminated \"]\"" lexbuf; backtrack lexbuf } | eof { Tokens.flush_sublexer () } | identifier { Tokens.flush_sublexer(); Output.ident (lexeme lexbuf) None; escaped_coq lexbuf } | space_nl* { let str = lexeme lexbuf in Tokens.flush_sublexer(); (if !Cdglobals.inline_notmono then () else Output.end_inline_coq ()); String.iter Output.char str; (if !Cdglobals.inline_notmono then () else Output.start_inline_coq ()); escaped_coq lexbuf } | _ { Output.sublexer_in_doc (lexeme_char lexbuf 0); escaped_coq lexbuf } (*s Coq "Comments" command. *) and comments = parse | space_nl+ { Output.char ' '; comments lexbuf } | '"' [^ '"']* '"' { let s = lexeme lexbuf in let s = String.sub s 1 (String.length s - 2) in ignore (doc None (from_string s)); comments lexbuf } | ([^ '.' '"'] | '.' [^ ' ' '\t' '\n'])+ { escaped_coq (from_string (lexeme lexbuf)); comments lexbuf } | "." (space_nl | eof) { () } | eof { () } | _ { Output.char (lexeme_char lexbuf 0); comments lexbuf } and skipped_comment = parse | "(*" { incr comment_level; skipped_comment lexbuf } | "*)" space* nl { new_lines 1 lexbuf; decr comment_level; if !comment_level > 0 then skipped_comment lexbuf else true } | "*)" { decr comment_level; if !comment_level > 0 then skipped_comment lexbuf else false } | eof { false } | _ { skipped_comment lexbuf } and comment = parse | "(*" { incr comment_level; Output.start_comment (); comment lexbuf } | "*)" space* nl { new_lines 1 lexbuf; Output.end_comment (); Output.line_break (); decr comment_level; if !comment_level > 0 then comment lexbuf else true } | "*)" { Output.end_comment (); decr comment_level; if !comment_level > 0 then comment lexbuf else false } | "[" { if !Cdglobals.plain_comments then Output.char '[' else (brackets := 1; Output.start_inline_coq (); escaped_coq lexbuf; Output.end_inline_coq ()); comment lexbuf } | "[[" nl { new_lines 1 lexbuf; if !Cdglobals.plain_comments then (Output.char '['; Output.char '[') else (formatted := Some lexbuf.lex_start_p; Output.start_inline_coq_block (); let _ = body_bol lexbuf in Output.end_inline_coq_block (); formatted := None); comment lexbuf } | "$" { if !Cdglobals.plain_comments then Output.char '$' else (Output.start_latex_math (); escaped_math_latex lexbuf); comment lexbuf } | "$$" { if !Cdglobals.plain_comments then Output.char '$'; Output.char '$'; comment lexbuf } | "%" { if !Cdglobals.plain_comments then Output.char '%' else escaped_latex lexbuf; comment lexbuf } | "%%" { if !Cdglobals.plain_comments then Output.char '%'; Output.char '%'; comment lexbuf } | "#" { if !Cdglobals.plain_comments then Output.char '#' else escaped_html lexbuf; comment lexbuf } | "##" { if !Cdglobals.plain_comments then Output.char '#'; Output.char '#'; comment lexbuf } | eof { false } | space+ { Output.indentation (fst (count_spaces (lexeme lexbuf))); comment lexbuf } | nl { new_lines 1 lexbuf; Output.line_break (); comment lexbuf } | _ { Output.char (lexeme_char lexbuf 0); comment lexbuf } and skip_to_dot = parse | '.' space* nl { new_lines 1 lexbuf; true } | eof | '.' space+ { false } | "(*" { comment_level := 1; ignore (skipped_comment lexbuf); skip_to_dot lexbuf } | _ { skip_to_dot lexbuf } and skip_to_dot_or_brace = parse | '.' space* nl { new_lines 1 lexbuf; true } | eof | '.' space+ { false } | "(*" { comment_level := 1; ignore (skipped_comment lexbuf); skip_to_dot_or_brace lexbuf } | "}" space* nl { new_lines 1 lexbuf; true } | "}" { false } | space* { skip_to_dot_or_brace lexbuf } | _ { skip_to_dot lexbuf } and body_bol = parse | space+ { Output.indentation (fst (count_spaces (lexeme lexbuf))); body lexbuf } | "" { Output.indentation 0; body lexbuf } and body = parse | nl { Tokens.flush_sublexer(); Output.line_break(); new_lines 1 lexbuf; body_bol lexbuf} | (nl+ as s) space* "]]" space* nl { new_lines (count_newlines s + 1) lexbuf; Tokens.flush_sublexer(); if is_none !formatted then begin let s = lexeme lexbuf in let nlsp,s = remove_newline s in let _,isp = count_spaces s in let loc = lexeme_start lexbuf + nlsp + isp in Output.sublexer ']' loc; Output.sublexer ']' (loc+1); Tokens.flush_sublexer(); body lexbuf end else begin Output.paragraph (); true end } | "]]" space* nl { Tokens.flush_sublexer(); new_lines 1 lexbuf; if is_none !formatted then begin let loc = lexeme_start lexbuf in Output.sublexer ']' loc; Output.sublexer ']' (loc+1); Tokens.flush_sublexer(); Output.line_break(); body lexbuf end else begin Output.paragraph (); true end } | eof { Tokens.flush_sublexer(); match !formatted with | None -> false | Some p -> raise (MismatchPreformatted p) } | '.' space* (nl as s | eof) { if not (is_none s) then new_line lexbuf; Tokens.flush_sublexer(); Output.char '.'; Output.line_break(); if is_none !formatted then true else body_bol lexbuf } | '.' space* nl "]]" space* nl { new_lines 2 lexbuf; Tokens.flush_sublexer(); Output.char '.'; if is_none !formatted then begin eprintf "Error: stray ]] at %d\n" (lexeme_start lexbuf); flush stderr; exit 1 end else begin Output.paragraph (); true end } | '.' space+ { Tokens.flush_sublexer(); Output.char '.'; Output.char ' '; if is_none !formatted then false else body lexbuf } | "(**" (space_nl as s) { if is_nl s then new_line lexbuf; Tokens.flush_sublexer(); Output.end_coq (); Output.start_doc (); let eol = doc_bol lexbuf in Output.end_doc (); Output.start_coq (); if eol then body_bol lexbuf else body lexbuf } | "(*" { Tokens.flush_sublexer(); comment_level := 1; let eol = if parse_comments () then begin Output.start_comment (); comment lexbuf end else begin let eol = skipped_comment lexbuf in if eol then Output.line_break(); eol end in if eol then body_bol lexbuf else body lexbuf } | "where" { Tokens.flush_sublexer(); Output.ident (lexeme lexbuf) None; start_notation_string lexbuf } | identifier { Tokens.flush_sublexer(); Output.ident (lexeme lexbuf) (Some (lexeme_start lexbuf)); body lexbuf } | ".." { Tokens.flush_sublexer(); Output.char '.'; Output.char '.'; body lexbuf } | '"' { Tokens.flush_sublexer(); Output.char '"'; string lexbuf; body lexbuf } | space { Tokens.flush_sublexer(); Output.char (lexeme_char lexbuf 0); body lexbuf } | _ { let c = lexeme_char lexbuf 0 in Output.sublexer c (lexeme_start lexbuf); body lexbuf } and start_notation_string = parse | space { Tokens.flush_sublexer(); Output.char (lexeme_char lexbuf 0); start_notation_string lexbuf } | '"' (* a true notation *) { Output.sublexer '"' (lexeme_start lexbuf); notation_string lexbuf; body lexbuf } | _ (* an abbreviation *) { backtrack lexbuf; body lexbuf } and notation_string = parse | "\"\"" { Output.char '"'; Output.char '"'; (* Unlikely! *) notation_string lexbuf } | '"' { Tokens.flush_sublexer(); Output.char '"' } | _ { let c = lexeme_char lexbuf 0 in Output.sublexer c (lexeme_start lexbuf); notation_string lexbuf } and string = parse | "\"\"" { Output.char '"'; Output.char '"'; string lexbuf } | '"' { Output.char '"' } | _ { let c = lexeme_char lexbuf 0 in Output.char c; string lexbuf } and skip_hide = parse | eof | end_hide nl { new_lines 1 lexbuf; () } | _ { skip_hide lexbuf } (*s Reading token pretty-print *) and printing_token_body = parse | "*)" (nl as s)? | eof { if not (is_none s) then new_lines 1 lexbuf; let s = Buffer.contents token_buffer in Buffer.clear token_buffer; s } | (nl | _) as s { if is_nl s then new_lines 1 lexbuf; Buffer.add_string token_buffer (lexeme lexbuf); printing_token_body lexbuf } and details_body = parse | "*)" space* (nl as s)? | eof { if not (is_none s) then new_lines 1 lexbuf; None } | ":" space* { details_body_rec lexbuf } and details_body_rec = parse | "*)" space* (nl as s)? | eof { if not (is_none s) then new_lines 1 lexbuf; let s = Buffer.contents token_buffer in Buffer.clear token_buffer; Some s } | _ { Buffer.add_string token_buffer (lexeme lexbuf); details_body_rec lexbuf } (*s These handle inference rules, parsing the body segments of things enclosed in [[[ ]]] brackets *) and inf_rules indents = parse | space* nl (* blank line, before or between definitions *) { new_lines 1 lexbuf; inf_rules indents lexbuf } | "]]]" nl (* end of the inference rules block *) { new_lines 1 lexbuf; match indents with | Some ls -> doc_list_bol ls lexbuf | None -> doc_bol lexbuf } | _ { backtrack lexbuf; (* anything else must be the first line in a rule *) inf_rules_assumptions indents [] lexbuf} (* The inference rule parsing just collects the inference rule and then calls the output function once, instead of doing things incrementally like the rest of the lexer. If only there were a real parsing phase... *) and inf_rules_assumptions indents assumptions = parse | space* "---" '-'* [^ '\n']* nl (* hit the horizontal line *) { new_lines 1 lexbuf; let line = lexeme lexbuf in let (spaces,_) = count_spaces line in let dashes_and_name = cut_head_tail_spaces (String.sub line 0 (String.length line - 1)) in let ldn = String.length dashes_and_name in let (dashes,name) = try (let i = String.index dashes_and_name ' ' in let d = String.sub dashes_and_name 0 i in let n = cut_head_tail_spaces (String.sub dashes_and_name (i+1) (ldn-i-1)) in (d, Some n)) with _ -> (dashes_and_name, None) in inf_rules_conclusion indents (List.rev assumptions) (spaces, dashes, name) [] lexbuf } | [^ '\n']* nl (* if it's not the horizontal line, it's an assumption *) { new_lines 1 lexbuf; let line = lexeme lexbuf in let (spaces,_) = count_spaces line in let assumption = cut_head_tail_spaces (String.sub line 0 (String.length line - 1)) in inf_rules_assumptions indents ((spaces,assumption)::assumptions) lexbuf } (*s The conclusion is required to come immediately after the horizontal bar. It is allowed to contain multiple lines of text, like the assumptions. The conclusion ends when we spot a blank line or a ']]]'. *) and inf_rules_conclusion indents assumptions middle conclusions = parse | space* nl | space* "]]]" nl (* end of conclusions. *) { new_lines 2 lexbuf; backtrack lexbuf; Output.inf_rule assumptions middle (List.rev conclusions); inf_rules indents lexbuf } | space* [^ '\n']+ nl (* this is a line in the conclusion *) { new_lines 1 lexbuf; let line = lexeme lexbuf in let (spaces,_) = count_spaces line in let conc = cut_head_tail_spaces (String.sub line 0 (String.length line - 1)) in inf_rules_conclusion indents assumptions middle ((spaces,conc) :: conclusions) lexbuf } (*s A small scanner to support the chapter subtitle feature *) and st_start m = parse | "(*" "*"+ space+ "*" space+ { st_modname m lexbuf } | _ { None } and st_modname m = parse | identifier space* ":" space* { if subtitle m (lexeme lexbuf) then st_subtitle lexbuf else None } | _ { None } and st_subtitle = parse | [^ '\n']* '\n' { let st = lexeme lexbuf in let i = try Str.search_forward (Str.regexp "\\**)") st 0 with Not_found -> (eprintf "unterminated comment at beginning of file\n"; exit 1) in Some (cut_head_tail_spaces (String.sub st 0 i)) } | _ { None } (*s Applying the scanners to files *) { (* coq_bol with error handling *) let coq_bol' f lb = try coq_bol lb with | MismatchPreformatted p -> Printf.eprintf "%a: mismatched \"[[\"\n" print_position_p p; exit 1 let coq_file f m = reset (); let c = open_in f in let lb = from_channel c in let lb = { lb with lex_curr_p = { lb.lex_curr_p with pos_fname = f }; lex_start_p = { lb.lex_start_p with pos_fname = f } } in (Index.current_library := m; Output.initialize (); Output.start_module (); Output.start_coq (); coq_bol' f lb; Output.end_coq (); close_in c) let detect_subtitle f m = let c = open_in f in let lb = from_channel c in let sub = st_start m lb in close_in c; sub } coq-8.15.0/tools/coqdoc/dune000066400000000000000000000006551417001151100156520ustar00rootroot00000000000000(install (section lib) (package coq-core) (files (coqdoc.css as tools/coqdoc/coqdoc.css) (coqdoc.sty as tools/coqdoc/coqdoc.sty))) ; File needs to be here too. (install (section share_root) (package coq-core) (files (coqdoc.sty as texmf/tex/latex/misc/coqdoc.sty))) (executable (name main) (public_name coqdoc) (package coq-core) (libraries str coq-core.boot coq-core.config coq-core.clib)) (ocamllex cpretty) coq-8.15.0/tools/coqdoc/fileUtil.ml000066400000000000000000000035141417001151100171000ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* close_out cout; close_in cin with Sys_error e -> Printf.eprintf "%s\n" e; exit 1 let check_if_file_exists f = if not (Sys.file_exists f) then begin Printf.eprintf "coqdoc: %s: no such file\n" f; exit 1 end (* [files_from_file f] returns the list of file names contained in the file named [f]. These file names must be separated by spaces, tabulations or newlines. *) let files_from_file f = let files_from_channel ch = let buf = Buffer.create 80 in let l = ref [] in try while true do match input_char ch with | ' ' | '\t' | '\n' -> if Buffer.length buf > 0 then l := Buffer.contents buf :: !l; Buffer.clear buf | c -> Buffer.add_char buf c done; [] with End_of_file -> List.rev !l in try check_if_file_exists f; let ch = open_in f in let l = files_from_channel ch in close_in ch; l with Sys_error s -> Printf.eprintf "coqdoc: cannot read from file %s (%s)\n" f s; exit 1 coq-8.15.0/tools/coqdoc/fileUtil.mli000066400000000000000000000020031417001151100172410ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string -> unit (** [files_from_file f] returns the list of file names contained in the file named [f]. These file names must be separated by spaces, tabulations or newlines. *) val files_from_file : string -> string list (** Version of [Sys.file_exists] but will exit on error *) val check_if_file_exists : string -> unit coq-8.15.0/tools/coqdoc/glob_file.ml000066400000000000000000000071721417001151100172510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Definition | "prf" | "thm" -> Lemma | "ind" | "variant" | "coind" -> Inductive | "constr" -> Constructor | "indrec" | "rec" | "corec" -> Record | "proj" -> Projection | "class" -> Class | "meth" -> Method | "inst" -> Instance | "var" -> Variable | "defax" | "prfax" | "ax" -> Axiom | "syndef" -> Abbreviation | "not" -> Notation | "lib" -> Library | "mod" | "modtype" -> Module | "tac" -> TacticDefinition | "sec" -> Section | "binder" -> Binder | s -> invalid_arg ("type_of_string:" ^ s) let ill_formed_glob_file f = eprintf "Warning: ill-formed file %s (links will not be available)\n" f let outdated_glob_file f = eprintf "Warning: %s not consistent with corresponding .v file (links will not be \ available)\n" f let correct_file vfile f c = let s = input_line c in if String.length s < 7 || String.sub s 0 7 <> "DIGEST " then ( ill_formed_glob_file f; false ) else let s = String.sub s 7 (String.length s - 7) in match (vfile, s) with | None, "NO" -> true | Some _, "NO" -> ill_formed_glob_file f; false | None, _ -> ill_formed_glob_file f; false | Some vfile, s -> s = Digest.to_hex (Digest.file vfile) || (outdated_glob_file f; false) let read_glob vfile f = let c = open_in f in if correct_file vfile f c then let cur_mod = ref "" in try while true do let s = input_line c in let n = String.length s in if n > 0 then match s.[0] with | 'F' -> cur_mod := String.sub s 1 (n - 1); current_library := !cur_mod | 'R' -> ( try Scanf.sscanf s "R%d:%d %s %s %s %s" (fun loc1 loc2 lib_dp sp id ty -> for loc = loc1 to loc2 do add_ref !cur_mod loc lib_dp sp id (type_of_string ty); (* Also add an entry for each module mentioned in [lib_dp], * to use in interpolation. *) ignore (List.fold_right (fun thisPiece priorPieces -> let newPieces = match priorPieces with | "" -> thisPiece | _ -> thisPiece ^ "." ^ priorPieces in add_ref !cur_mod loc "" "" newPieces Library; newPieces) (Str.split (Str.regexp_string ".") lib_dp) "") done) with _ -> () ) | _ -> ( try Scanf.sscanf s "%s %d:%d %s %s" (fun ty loc1 loc2 sp id -> add_def loc1 loc2 (type_of_string ty) sp id) with Scanf.Scan_failure _ -> () ) done; assert false with End_of_file -> close_in c coq-8.15.0/tools/coqdoc/glob_file.mli000066400000000000000000000015041417001151100174130ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string -> unit coq-8.15.0/tools/coqdoc/index.ml000066400000000000000000000202041417001151100164250ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* "<>" then if id <> "<>" then sp ^ "." ^ id else sp else if id <> "<>" then id else "" let add_def loc1 loc2 ty sp id = let fullid = full_ident sp id in let def = Def (fullid, ty) in for loc = loc1 to loc2 do Hashtbl.add reftable (!current_library, loc) def done; Hashtbl.add deftable !current_library (fullid, ty); Hashtbl.add byidtable id (!current_library, fullid, ty) let add_ref m loc m' sp id ty = let fullid = full_ident sp id in if Hashtbl.mem reftable (m, loc) then () else Hashtbl.add reftable (m, loc) (Ref (m', fullid, ty)); let idx = if id = "<>" then m' else id in if Hashtbl.mem byidtable idx then () else Hashtbl.add byidtable idx (m', fullid, ty) let find m l = Hashtbl.find reftable (m, l) let find_string s = let (m,s,t) = Hashtbl.find byidtable s in Ref (m,s,t) (* Coq modules *) let split_sp s = try let i = String.rindex s '.' in String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1) with Not_found -> "", s let modules = Hashtbl.create 97 let local_modules = Hashtbl.create 97 let add_module m = let _,id = split_sp m in Hashtbl.add modules id m; Hashtbl.add local_modules m () type module_kind = Local | External of string | Unknown let external_libraries = ref [] let add_external_library logicalpath url = external_libraries := (logicalpath,url) :: !external_libraries let find_external_library logicalpath = let rec aux = function | [] -> raise Not_found | (l,u)::rest -> if String.length logicalpath > String.length l && String.sub logicalpath 0 (String.length l + 1) = l ^"." then u else aux rest in aux !external_libraries let init_coqlib_library () = add_external_library "Coq" !coqlib_url let find_module m = if Hashtbl.mem local_modules m then Local else try External (find_external_library m ^ "/" ^ m) with Not_found -> Unknown (* Building indexes *) type 'a index = { idx_name : string; idx_entries : (char * (string * 'a) list) list; idx_size : int } let map f i = { i with idx_entries = List.map (fun (c,l) -> (c, List.map (fun (s,x) -> (s,f s x)) l)) i.idx_entries } let compare_entries (s1,_) (s2,_) = Alpha.compare_string s1 s2 let sort_entries el = let t = Hashtbl.create 97 in List.iter (fun c -> Hashtbl.add t c []) ['A'; 'B'; 'C'; 'D'; 'E'; 'F'; 'G'; 'H'; 'I'; 'J'; 'K'; 'L'; 'M'; 'N'; 'O'; 'P'; 'Q'; 'R'; 'S'; 'T'; 'U'; 'V'; 'W'; 'X'; 'Y'; 'Z'; '_'; '*']; List.iter (fun ((s,_) as e) -> let c = Alpha.norm_char s.[0] in let c,l = try c,Hashtbl.find t c with Not_found -> '*',Hashtbl.find t '*' in Hashtbl.replace t c (e :: l)) el; let res = ref [] in Hashtbl.iter (fun c l -> res := (c, List.sort compare_entries l) :: !res) t; List.sort (fun (c1,_) (c2,_) -> Alpha.compare_char c1 c2) !res let display_letter c = if c = '*' then "other" else String.make 1 c let type_name = function | Library -> let ln = !lib_name in if ln <> "" then String.lowercase_ascii ln else "library" | Module -> "module" | Definition -> "definition" | Inductive -> "inductive" | Constructor -> "constructor" | Lemma -> "lemma" | Record -> "record" | Projection -> "projection" | Instance -> "instance" | Class -> "class" | Method -> "method" | Variable -> "variable" | Axiom -> "axiom" | TacticDefinition -> "tactic" | Abbreviation -> "abbreviation" | Notation -> "notation" | Section -> "section" | Binder -> "binder" let prepare_entry s = function | Notation -> (* We decode the encoding done in Dumpglob.cook_notation of coqtop *) (* Encoded notations have the form section:entry:sc:x_'++'_x *) (* where: *) (* - the section, if any, ends with a "." *) (* - the scope can be empty *) (* - tokens are separated with "_" *) (* - non-terminal symbols are conventionally represented by "x" *) (* - terminals are enclosed within simple quotes *) (* - existing simple quotes (that necessarily are parts of *) (* terminals) are doubled *) (* (as a consequence, when a terminal contains "_" or "x", these *) (* necessarily appear enclosed within non-doubled simple quotes) *) (* - non-printable characters < 32 are left encoded so that they *) (* are human-readable in index files *) (* Example: "x ' %x _% y %'x %'_' z" is encoded as *) (* "x_''''_'%x'_'_%'_x_'%''x'_'%''_'''_x" *) let err () = eprintf "Invalid notation in globalization file\n"; exit 1 in let h = try String.index_from s 0 ':' with _ -> err () in let i = try String.index_from s (h+1) ':' with _ -> err () in let m = try String.index_from s (i+1) ':' with _ -> err () in let entry = String.sub s (h+1) (i-h-1) in let sc = String.sub s (i+1) (m-i-1) in let ntn = Bytes.make (String.length s - m) ' ' in let k = ref 0 in let j = ref (m+1) in let quoted = ref false in let l = String.length s - 1 in while !j <= l do if not !quoted then begin (match s.[!j] with | '_' -> Bytes.set ntn !k ' '; incr k | 'x' -> Bytes.set ntn !k '_'; incr k | '\'' -> quoted := true | _ -> assert false) end else if s.[!j] = '\'' then if (!j = l || s.[!j+1] = '_') then quoted := false else (incr j; Bytes.set ntn !k s.[!j]; incr k) else begin Bytes.set ntn !k s.[!j]; incr k end; incr j done; let ntn = Bytes.sub_string ntn 0 !k in let ntn = if sc = "" then ntn else ntn ^ " (" ^ sc ^ ")" in if entry = "" then ntn else entry ^ ":" ^ ntn | _ -> s let all_entries () = let gl = ref [] in let add_g s m t = gl := (s,(m,t)) :: !gl in let bt = Hashtbl.create 11 in let add_bt t s m = let l = try Hashtbl.find bt t with Not_found -> [] in Hashtbl.replace bt t ((s,m) :: l) in let classify m (s,t) = (add_g s m t; add_bt t s m) in Hashtbl.iter classify deftable; Hashtbl.iter (fun id m -> add_g id m Library; add_bt Library id m) modules; { idx_name = "global"; idx_entries = sort_entries !gl; idx_size = List.length !gl }, Hashtbl.fold (fun t e l -> (t, { idx_name = type_name t; idx_entries = sort_entries e; idx_size = List.length e }) :: l) bt [] coq-8.15.0/tools/coqdoc/index.mli000066400000000000000000000043221417001151100166010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string type index_entry = | Def of string * entry_type | Ref of coq_module * string * entry_type (* Find what symbol coqtop said is located at loc in the source file *) val find : coq_module -> loc -> index_entry (* Find what data is referred to by some string in some coq module *) val find_string : string -> index_entry (** [add_ref cur_mod loc lib_dp sp id entry_type] *) val add_ref : string -> int -> coq_module -> coq_module -> string -> entry_type -> unit (** [add_def loc1 loc2 entry_type sp id] *) val add_def : int -> int -> entry_type -> coq_module -> string -> unit (* Add a Coq module *) val add_module : coq_module -> unit type module_kind = Local | External of coq_module | Unknown val find_module : coq_module -> module_kind val init_coqlib_library : unit -> unit val add_external_library : string -> coq_module -> unit (*s Indexes *) type 'a index = { idx_name : string; idx_entries : (char * (string * 'a) list) list; idx_size : int } val current_library : string ref val display_letter : char -> string val prepare_entry : string -> entry_type -> string val all_entries : unit -> (coq_module * entry_type) index * (entry_type * coq_module index) list val map : (string -> 'a -> 'b) -> 'a index -> 'b index coq-8.15.0/tools/coqdoc/latexCompiler.ml000066400000000000000000000060351417001151100201340ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Sys.chdir cwd; raise e let clean_temp_files basefile = let remove f = try Sys.remove f with _ -> () in remove (basefile ^ ".tex"); remove (basefile ^ ".log"); remove (basefile ^ ".aux"); remove (basefile ^ ".toc"); remove (basefile ^ ".dvi"); remove (basefile ^ ".ps"); remove (basefile ^ ".pdf"); remove (basefile ^ ".haux"); remove (basefile ^ ".html") let clean_and_exit file res = clean_temp_files file; exit res let cat file = let c = open_in file in try while true do print_char (input_char c) done with End_of_file -> close_in c type otype = Dvi | Ps | Pdf let compile ~otypes ~produce_document fl = let texfile = Filename.temp_file "coqdoc" ".tex" in let basefile = Filename.chop_suffix texfile ".tex" in let final_out_to = !out_to in out_to := File texfile; output_dir := Filename.dirname texfile; produce_document fl; let latexexe = if List.mem Pdf otypes then "pdflatex" else "latex" in let latexcmd = let file = Filename.basename texfile in let file = if !quiet then sprintf "'\\nonstopmode\\input{%s}'" file else file in sprintf "%s %s && %s %s 1>&2 %s" latexexe file latexexe file (if !quiet then "> /dev/null" else "") in let res = locally (Filename.dirname texfile) Sys.command latexcmd in if res <> 0 then begin eprintf "Couldn't run LaTeX successfully\n"; clean_and_exit basefile res end; let dvifile = basefile ^ ".dvi" in ( if List.mem Dvi otypes then match final_out_to with | MultFiles | StdOut -> cat dvifile | File f -> FileUtil.copy dvifile f ); let pdffile = basefile ^ ".pdf" in ( if List.mem Pdf otypes then match final_out_to with | MultFiles | StdOut -> cat pdffile | File f -> FileUtil.copy pdffile f ); if List.mem Ps otypes then begin let psfile = basefile ^ ".ps" in let command = sprintf "dvips %s -o %s %s" dvifile psfile (if !quiet then "> /dev/null 2>&1" else "") in let res = Sys.command command in if res <> 0 then begin eprintf "Couldn't run dvips successfully\n"; clean_and_exit basefile res end; match final_out_to with | MultFiles | StdOut -> cat psfile | File f -> FileUtil.copy psfile f end; clean_temp_files basefile coq-8.15.0/tools/coqdoc/latexCompiler.mli000066400000000000000000000014751417001151100203100ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* produce_document:(Cdglobals.file list -> unit) -> Cdglobals.file list -> unit coq-8.15.0/tools/coqdoc/main.ml000066400000000000000000000434221417001151100162510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* "; prerr_endline " --html produce a HTML document (default)"; prerr_endline " --latex produce a LaTeX document"; prerr_endline " --texmacs produce a TeXmacs document"; prerr_endline " --raw produce a text document"; prerr_endline " --dvi output the DVI"; prerr_endline " --ps output the PostScript"; prerr_endline " --pdf output the Pdf"; prerr_endline " --stdout write output to stdout"; prerr_endline " -o write output in file "; prerr_endline " -d output files into directory "; prerr_endline " -g (gallina) skip proofs"; prerr_endline " -s (short) no titles for files"; prerr_endline " -l light mode (only defs and statements)"; prerr_endline " -t give a title to the document"; prerr_endline " --body-only suppress LaTeX/HTML header and trailer"; prerr_endline " --with-header prepend as html reader"; prerr_endline " --with-footer append as html footer"; prerr_endline " --no-index do not output the index"; prerr_endline " --multi-index index split in multiple files"; prerr_endline " --index set index name (default is index)"; prerr_endline " --toc output a table of contents"; prerr_endline " --vernac consider as a .v file"; prerr_endline " --tex consider as a .tex file"; prerr_endline " -p insert in LaTeX preamble"; prerr_endline " --files-from read file names to process in "; prerr_endline " --glob-from read globalization information from "; prerr_endline " --no-glob don't use any globalization information (no links will be inserted at identifiers)"; prerr_endline " --quiet quiet mode (default)"; prerr_endline " --verbose verbose mode"; prerr_endline " --no-externals no links to Coq standard library"; prerr_endline " --external set URL for external library d"; prerr_endline " --coqlib_url set URL for Coq standard library"; prerr_endline (" (default is " ^ Coq_config.wwwstdlib ^ ")"); prerr_endline " --boot run in boot mode (no-op)"; prerr_endline " --coqlib set the path where Coq files are installed"; prerr_endline " -R map physical dir to Coq dir"; prerr_endline " -Q map physical dir to Coq dir"; prerr_endline " --latin1 set ISO-8859-1 mode"; prerr_endline " --utf8 set UTF-8 mode"; prerr_endline " --charset set HTML charset"; prerr_endline " --inputenc set LaTeX input encoding"; prerr_endline " --interpolate try to typeset identifiers in comments using definitions in the same module"; prerr_endline " --parse-comments parse regular comments"; prerr_endline " --plain-comments consider comments as non-literate text"; prerr_endline " --toc-depth don't include TOC entries for sections below level "; prerr_endline " --no-lib-name don't display \"Library\" before library names in the toc"; prerr_endline " --lib-name call top level toc entries instead of \"Library\""; prerr_endline " --lib-subtitles first line comments of the form (** * ModuleName : text *) will be interpreted as subtitles"; prerr_endline " --inline-notmono use a proportional width font for inline code (possibly with a different color)"; prerr_endline ""; exit 1 (*s \textbf{Banner.} Always printed. Notice that it is printed on error output, so that when the output of [coqdoc] is redirected this header is not (unless both standard and error outputs are redirected, of course). *) let banner () = eprintf "This is coqdoc version %s\n" Coq_config.version; flush stderr let target_full_name f = match !Cdglobals.target_language with | HTML -> f ^ ".html" | Raw -> f ^ ".txt" | _ -> f ^ ".tex" (*s \textbf{Separation of files.} Files given on the command line are separated according to their type, which is determined by their suffix. Coq files have suffixe \verb!.v! or \verb!.g! and \LaTeX\ files have suffix \verb!.tex!. *) (* [paths] maps a physical path to a name *) let paths = ref [] let add_path dir name = let p = normalize_path dir in paths := (p,name) :: !paths (* turn A/B/C into A.B.C *) let rec name_of_path p name dirname suffix = if p = dirname then String.concat "." (if name = "" then suffix else (name::suffix)) else let subdir = Filename.dirname dirname in if subdir = dirname then raise Not_found else name_of_path p name subdir (Filename.basename dirname::suffix) let coq_module filename = let bfname = Filename.chop_extension filename in let dirname, fname = normalize_filename bfname in let _ = match Unicode.ident_refutation fname with | Some err -> eprintf "\ncoqdoc: not a valid filename %s.v\n" fname; exit 1 | None -> () in let rec change_prefix = function (* Follow coqc: if in scope of -R, substitute logical name *) (* otherwise, keep only base name *) | [] -> fname | (p, name) :: rem -> try name_of_path p name dirname [fname] with Not_found -> change_prefix rem in change_prefix !paths let what_file f = FileUtil.check_if_file_exists f; if Filename.check_suffix f ".v" || Filename.check_suffix f ".g" then Vernac_file (f, coq_module f) else if Filename.check_suffix f ".tex" then Latex_file f else (eprintf "\ncoqdoc: don't know what to do with %s\n" f; exit 1) (*s \textbf{Parsing of the command line.} *) let compile_targets = ref [] type glob_source_t = NoGlob | DotGlob | GlobFile of string let glob_source = ref DotGlob let warn_on_option_renaming_for_url d = try let https_prefix = String.sub d 0 4 in if String.equal https_prefix "http" then Format.eprintf "warning: use --coqlib_url to specify stdlib's URL starting with Coq 8.14@\n%!" else () with Invalid_argument _ -> () let parse () = let files = ref [] in let add_file f = files := f :: !files in let rec parse_rec = function | [] -> () | ("-nopreamble" | "--nopreamble" | "--no-preamble" | "-bodyonly" | "--bodyonly" | "--body-only") :: rem -> header_trailer := false; parse_rec rem | ("-with-header" | "--with-header") :: f ::rem -> header_trailer := true; header_file_spec := true; header_file := f; parse_rec rem | ("-with-header" | "--with-header") :: [] -> usage () | ("-with-footer" | "--with-footer") :: f ::rem -> header_trailer := true; footer_file_spec := true; footer_file := f; parse_rec rem | ("-with-footer" | "--with-footer") :: [] -> usage () | ("-p" | "--preamble") :: s :: rem -> Output.push_in_preamble s; parse_rec rem | ("-p" | "--preamble") :: [] -> usage () | ("-noindex" | "--noindex" | "--no-index") :: rem -> index := false; parse_rec rem | ("-multi-index" | "--multi-index") :: rem -> multi_index := true; parse_rec rem | ("-index" | "--index") :: s :: rem -> Cdglobals.index_name := s; parse_rec rem | ("-index" | "--index") :: [] -> usage () | ("-toc" | "--toc" | "--table-of-contents") :: rem -> toc := true; parse_rec rem | ("-stdout" | "--stdout") :: rem -> out_to := StdOut; parse_rec rem | ("-o" | "--output") :: f :: rem -> out_to := File (Filename.basename f); output_dir := Filename.dirname f; parse_rec rem | ("-o" | "--output") :: [] -> usage () | ("-d" | "--directory") :: dir :: rem -> output_dir := dir; parse_rec rem | ("-d" | "--directory") :: [] -> usage () | ("-s" | "--short") :: rem -> short := true; parse_rec rem | ("-l" | "-light" | "--light") :: rem -> gallina := true; light := true; parse_rec rem | ("-g" | "-gallina" | "--gallina") :: rem -> gallina := true; parse_rec rem | ("-t" | "-title" | "--title") :: s :: rem -> title := s; parse_rec rem | ("-t" | "-title" | "--title") :: [] -> usage () | ("-latex" | "--latex") :: rem -> Cdglobals.target_language := LaTeX; parse_rec rem | ("-pdf" | "--pdf") :: rem -> Cdglobals.target_language := LaTeX; compile_targets := LatexCompiler.Pdf :: !compile_targets; parse_rec rem | ("-dvi" | "--dvi") :: rem -> Cdglobals.target_language := LaTeX; compile_targets := LatexCompiler.Dvi :: !compile_targets; parse_rec rem | ("-ps" | "--ps") :: rem -> Cdglobals.target_language := LaTeX; compile_targets := LatexCompiler.Ps :: !compile_targets; parse_rec rem | ("-html" | "--html") :: rem -> Cdglobals.target_language := HTML; parse_rec rem | ("-texmacs" | "--texmacs") :: rem -> Cdglobals.target_language := TeXmacs; parse_rec rem | ("-raw" | "--raw") :: rem -> Cdglobals.target_language := Raw; parse_rec rem | ("-charset" | "--charset") :: s :: rem -> Cdglobals.charset := s; parse_rec rem | ("-charset" | "--charset") :: [] -> usage () | ("-inputenc" | "--inputenc") :: s :: rem -> Cdglobals.inputenc := s; parse_rec rem | ("-inputenc" | "--inputenc") :: [] -> usage () | ("-raw-comments" | "--raw-comments") :: rem -> Cdglobals.raw_comments := true; parse_rec rem | ("-parse-comments" | "--parse-comments") :: rem -> Cdglobals.parse_comments := true; parse_rec rem | ("-plain-comments" | "--plain-comments") :: rem -> Cdglobals.plain_comments := true; parse_rec rem | ("-interpolate" | "--interpolate") :: rem -> Cdglobals.interpolate := true; parse_rec rem | ("-toc-depth" | "--toc-depth") :: [] -> usage () | ("-toc-depth" | "--toc-depth") :: ds :: rem -> let d = try int_of_string ds with Failure _ -> (eprintf "--toc-depth must be followed by an integer\n"; exit 1) in Cdglobals.toc_depth := Some d; parse_rec rem | ("-no-lib-name" | "--no-lib-name") :: rem -> Cdglobals.lib_name := ""; parse_rec rem | ("-lib-name" | "--lib-name") :: ds :: rem -> Cdglobals.lib_name := ds; parse_rec rem | ("-lib-subtitles" | "--lib-subtitles") :: rem -> Cdglobals.lib_subtitles := true; parse_rec rem | ("-inline-notmono" | "--inline-notmono") :: rem -> Cdglobals.inline_notmono := true; parse_rec rem | ("-latin1" | "--latin1") :: rem -> Cdglobals.set_latin1 (); parse_rec rem | ("-utf8" | "--utf8") :: rem -> Cdglobals.set_utf8 (); parse_rec rem | ("-q" | "-quiet" | "--quiet") :: rem -> quiet := true; parse_rec rem | ("-v" | "-verbose" | "--verbose") :: rem -> quiet := false; parse_rec rem | ("-h" | "-help" | "-?" | "--help") :: rem -> banner (); usage () | ("-V" | "-version" | "--version") :: _ -> banner (); exit 0 | ("-vernac-file" | "--vernac-file") :: f :: rem -> FileUtil.check_if_file_exists f; add_file (Vernac_file (f, coq_module f)); parse_rec rem | ("-vernac-file" | "--vernac-file") :: [] -> usage () | ("-tex-file" | "--tex-file") :: f :: rem -> add_file (Latex_file f); parse_rec rem | ("-tex-file" | "--tex-file") :: [] -> usage () | ("-files" | "--files" | "--files-from") :: f :: rem -> List.iter (fun f -> add_file (what_file f)) (FileUtil.files_from_file f); parse_rec rem | ("-files" | "--files") :: [] -> usage () | "-R" :: path :: log :: rem -> add_path path log; parse_rec rem | "-R" :: ([] | [_]) -> usage () | "-Q" :: path :: log :: rem -> add_path path log; parse_rec rem | "-Q" :: ([] | [_]) -> usage () | ("-glob-from" | "--glob-from") :: f :: rem -> glob_source := GlobFile f; parse_rec rem | ("-glob-from" | "--glob-from") :: [] -> usage () | ("-no-glob" | "--no-glob") :: rem -> glob_source := NoGlob; parse_rec rem | ("--no-externals" | "-no-externals" | "-noexternals") :: rem -> Cdglobals.externals := false; parse_rec rem | ("--external" | "-external") :: u :: logicalpath :: rem -> Index.add_external_library logicalpath u; parse_rec rem | ("--coqlib_url" | "-coqlib_url") :: u :: rem -> Cdglobals.coqlib_url := u; parse_rec rem | ("--coqlib_url" | "-coqlib_url") :: [] -> usage () | ("--boot" | "-boot") :: rem -> (* XXX: This is useless it seems *) parse_rec rem | ("--coqlib" | "-coqlib") :: d :: rem -> warn_on_option_renaming_for_url d; Boot.Env.set_coqlib d; parse_rec rem | ("--coqlib" | "-coqlib") :: [] -> usage () | f :: rem -> add_file (what_file f); parse_rec rem in parse_rec (List.tl (Array.to_list Sys.argv)); List.rev !files (*s The following function produces the output. The default output is the \LaTeX\ document: in that case, we just call [Web.produce_document]. If option \verb!-dvi!, \verb!-ps! or \verb!-html! is invoked, then we make calls to \verb!latex! or \verb!dvips! or \verb!pdflatex! accordingly. *) (*s Functions for generating output files *) let gen_one_file l = let file = function | Vernac_file (f,m) -> let sub = if !lib_subtitles then Cpretty.detect_subtitle f m else None in Output.set_module m sub; Cpretty.coq_file f m | Latex_file _ -> () in if (!header_trailer) then Output.header (); if !toc then Output.make_toc (); List.iter file l; if !index then Output.make_index(); if (!header_trailer) then Output.trailer () let gen_mult_files l = let file = function | Vernac_file (f,m) -> let sub = if !lib_subtitles then Cpretty.detect_subtitle f m else None in let hf = target_full_name m in Output.set_module m sub; open_out_file hf; if (!header_trailer) then Output.header (); Cpretty.coq_file f m; if (!header_trailer) then Output.trailer (); close_out_file() | Latex_file _ -> () in List.iter file l; if (!index && !target_language=HTML) then begin if (!multi_index) then Output.make_multi_index (); open_out_file (!index_name^".html"); page_title := (if !title <> "" then !title else "Index"); if (!header_trailer) then Output.header (); Output.make_index (); if (!header_trailer) then Output.trailer (); close_out_file() end; if (!toc && !target_language=HTML) then begin open_out_file "toc.html"; page_title := (if !title <> "" then !title else "Table of contents"); if (!header_trailer) then Output.header (); if !title <> "" then printf "

%s

\n" !title; Output.make_toc (); if (!header_trailer) then Output.trailer (); close_out_file() end (* NB: for latex and texmacs, a separated toc or index is meaningless... *) let read_glob_file vfile f = try Glob_file.read_glob vfile f with Sys_error s -> eprintf "Warning: %s (links will not be available)\n" s let read_glob_file_of = function | Vernac_file (f,_) -> read_glob_file (Some f) (Filename.chop_extension f ^ ".glob") | Latex_file _ -> () let index_module = function | Vernac_file (f,m) -> Index.add_module m | Latex_file _ -> () module E = Boot.Env let copy_style_file file = (* We give preference to coqlib in case it is overriden *) let env = E.init () in let coqdoc = E.tool env "coqdoc" in let sty_file = E.Path.relative coqdoc file in if not (E.Path.exists sty_file) then begin let sty_file = E.Path.to_string sty_file in eprintf "coqdoc: cannot find coqdoc style file: %s\n" sty_file; exit 1 end; let sty_file_s = E.Path.to_string sty_file in let dst = coqdoc_out file in FileUtil.copy sty_file_s dst let produce_document l = if !target_language=HTML then copy_style_file "coqdoc.css"; if !target_language=LaTeX then copy_style_file "coqdoc.sty"; (match !glob_source with | NoGlob -> () | DotGlob -> List.iter read_glob_file_of l | GlobFile f -> read_glob_file None f); List.iter index_module l; match !out_to with | StdOut -> Cdglobals.out_channel := stdout; gen_one_file l | File f -> open_out_file f; gen_one_file l; close_out_file() | MultFiles -> gen_mult_files l let produce_output fl = if List.length !compile_targets = 0 then produce_document fl else let otypes = !compile_targets in LatexCompiler.compile ~otypes ~produce_document fl (*s \textbf{Main program.} Print the banner, parse the command line, read the files and then call [produce_document] from module [Web]. *) let _ = let files = parse () in Index.init_coqlib_library (); if not !quiet then banner (); if files <> [] then produce_output files coq-8.15.0/tools/coqdoc/main.mli000066400000000000000000000012431417001151100164150ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Hashtbl.add h key ()) l; function s -> try Hashtbl.find h s; true with Not_found -> false let is_keyword = build_table [ "About"; "Axiom"; "Abort"; "Chapter"; "Check"; "Coercion"; "Compute"; "CoFixpoint"; "CoInductive"; "Corollary"; "Defined"; "Definition"; "End"; "Eval"; "Example"; "Export"; "Fact"; "Fix"; "Fixpoint"; "From"; "Function"; "Generalizable"; "Global"; "Grammar"; "Guarded"; "Goal"; "Hint"; "Debug"; "On"; "Hypothesis"; "Hypotheses"; "Resolve"; "Unfold"; "Immediate"; "Extern"; "Constructors"; "Rewrite"; "Implicit"; "Import"; "Inductive"; "Infix"; "Lemma"; "Let"; "Load"; "Local"; "Locate"; "Ltac"; "Module"; "Module Type"; "Declare Module"; "Include"; "Mutual"; "Parameter"; "Parameters"; "Print"; "Printing"; "All"; "Proof"; "Proof with"; "Qed"; "Record"; "Recursive"; "Remark"; "Require"; "Save"; "Scheme"; "Assumptions"; "Axioms"; "Universes"; "Induction"; "for"; "Sort"; "Section"; "Show"; "Structure"; "Syntactic"; "Syntax"; "Tactic"; "Theorem"; "Search"; "SearchPattern"; "SearchRewrite"; "Set"; "Types"; "Undo"; "Unset"; "Variable"; "Variables"; "Context"; "Notation"; "Reserved Notation"; "Tactic Notation"; "Delimit"; "Bind"; "Open"; "Scope"; "Inline"; "Implicit Arguments"; "Add"; "Strict"; "Typeclasses"; "Instance"; "Global Instance"; "Class"; "Instantiation"; "goal"; "goals"; "vm_compute"; "Opaque"; "Transparent"; "Time"; "Extraction"; "Extract"; "Variant"; (* Program *) "Program Definition"; "Program Example"; "Program Fixpoint"; "Program Lemma"; "Obligation"; "Obligations"; "Solve"; "using"; "Next Obligation"; "Next"; "Program Instance"; "Equations"; "Equations_nocomp"; (*i (* coq terms *) *) "forall"; "match"; "as"; "in"; "return"; "with"; "end"; "let"; "fun"; "if"; "then"; "else"; "Prop"; "Set"; "Type"; ":="; "where"; "struct"; "wf"; "measure"; "fix"; "cofix"; "is"; (* Ltac *) "before"; "after"; "constr"; "ltac"; "goal"; "context"; "beta"; "delta"; "iota"; "zeta"; "lazymatch"; "type"; "of"; "rec"; (* Notations *) "level"; "associativity"; "no" ] let is_tactic = build_table [ "intro"; "intros"; "apply"; "rewrite"; "refine"; "case"; "clear"; "injection"; "elimtype"; "progress"; "setoid_rewrite"; "left"; "right"; "constructor"; "econstructor"; "decide equality"; "abstract"; "exists"; "cbv"; "simple destruct"; "info"; "field"; "specialize"; "evar"; "solve"; "instantiate"; "info_auto"; "info_eauto"; "quote"; "eexact"; "autorewrite"; "destruct"; "destruction"; "destruct_call"; "dependent"; "elim"; "extensionality"; "f_equal"; "generalize"; "generalize_eqs"; "generalize_eqs_vars"; "induction"; "rename"; "move"; "set"; "assert"; "do"; "repeat"; "cut"; "assumption"; "exact"; "split"; "subst"; "try"; "discriminate"; "simpl"; "unfold"; "red"; "compute"; "at"; "in"; "by"; "reflexivity"; "symmetry"; "transitivity"; "replace"; "setoid_replace"; "inversion"; "inversion_clear"; "pattern"; "intuition"; "congruence"; "fail"; "fresh"; "trivial"; "tauto"; "firstorder"; "ring"; "clapply"; "program_simpl"; "program_simplify"; "eapply"; "auto"; "eauto"; "change"; "fold"; "hnf"; "lazy"; "simple"; "eexists"; "debug"; "idtac"; "first"; "type of"; "pose"; "eval"; "instantiate"; "until" ] (*s Current Coq module *) let current_module : (string * string option) ref = ref ("",None) let get_module withsub = let (m,sub) = !current_module in if withsub then match sub with | None -> m | Some sub -> m ^ ": " ^ sub else m let set_module m sub = current_module := (m,sub); page_title := get_module true (*s Common to both LaTeX and HTML *) let item_level = ref 0 let in_doc = ref false (*s Customized and predefined pretty-print *) let initialize_texmacs () = let ensuremath x = sprintf ">" x in List.fold_right (fun (s,t) tt -> Tokens.ttree_add tt s t) [ "*", ensuremath "times"; "->", ensuremath "rightarrow"; "<-", ensuremath "leftarrow"; "<->", ensuremath "leftrightarrow"; "=>", ensuremath "Rightarrow"; "<=", ensuremath "le"; ">=", ensuremath "ge"; "<>", ensuremath "noteq"; "~", ensuremath "lnot"; "/\\", ensuremath "land"; "\\/", ensuremath "lor"; "|-", ensuremath "vdash" ] Tokens.empty_ttree let token_tree_texmacs = ref (initialize_texmacs ()) let token_tree_latex = ref Tokens.empty_ttree let token_tree_html = ref Tokens.empty_ttree let initialize_tex_html () = let if_utf8 = if !Cdglobals.utf8 then fun x -> Some x else fun _ -> None in let (tree_latex, tree_html) = List.fold_right (fun (s,l,l') (tt,tt') -> (Tokens.ttree_add tt s l, match l' with None -> tt' | Some l' -> Tokens.ttree_add tt' s l')) [ "*" , "\\ensuremath{\\times}", if_utf8 "×"; "|", "\\ensuremath{|}", None; "->", "\\ensuremath{\\rightarrow}", if_utf8 "→"; "->~", "\\ensuremath{\\rightarrow\\lnot}", None; "->~~", "\\ensuremath{\\rightarrow\\lnot\\lnot}", None; "<-", "\\ensuremath{\\leftarrow}", None; "<->", "\\ensuremath{\\leftrightarrow}", if_utf8 "↔"; "=>", "\\ensuremath{\\Rightarrow}", if_utf8 "⇒"; "<=", "\\ensuremath{\\le}", if_utf8 "≤"; ">=", "\\ensuremath{\\ge}", if_utf8 "≥"; "<>", "\\ensuremath{\\not=}", if_utf8 "≠"; "~", "\\ensuremath{\\lnot}", if_utf8 "¬"; "/\\", "\\ensuremath{\\land}", if_utf8 "∧"; "\\/", "\\ensuremath{\\lor}", if_utf8 "∨"; "|-", "\\ensuremath{\\vdash}", None; "forall", "\\ensuremath{\\forall}", if_utf8 "∀"; "exists", "\\ensuremath{\\exists}", if_utf8 "∃"; "Π", "\\ensuremath{\\Pi}", if_utf8 "Π"; "λ", "\\ensuremath{\\lambda}", if_utf8 "λ"; (* "fun", "\\ensuremath{\\lambda}" ? *) ] (Tokens.empty_ttree,Tokens.empty_ttree) in token_tree_latex := tree_latex; token_tree_html := tree_html let add_printing_token s (t1,t2) = (match t1 with None -> () | Some t1 -> token_tree_latex := Tokens.ttree_add !token_tree_latex s t1); (match t2 with None -> () | Some t2 -> token_tree_html := Tokens.ttree_add !token_tree_html s t2) let remove_printing_token s = token_tree_latex := Tokens.ttree_remove !token_tree_latex s; token_tree_html := Tokens.ttree_remove !token_tree_html s (*s Table of contents *) type toc_entry = | Toc_library of string * string option | Toc_section of int * (unit -> unit) * string let (toc_q : toc_entry Queue.t) = Queue.create () let add_toc_entry e = Queue.add e toc_q let new_label = let r = ref 0 in fun () -> incr r; "lab" ^ string_of_int !r (*s LaTeX output *) module Latex = struct let in_title = ref false (*s Latex preamble *) let (preamble : string Queue.t) = Queue.create () let push_in_preamble s = Queue.add s preamble let utf8x_extra_support () = printf "\n"; printf "%%Warning: tipa declares many non-standard macros used by utf8x to\n"; printf "%%interpret utf8 characters but extra packages might have to be added\n"; printf "%%such as \"textgreek\" for Greek letters not already in tipa\n"; printf "%%or \"stmaryrd\" for mathematical symbols.\n"; printf "%%Utf8 codes missing a LaTeX interpretation can be defined by using\n"; printf "%%\\DeclareUnicodeCharacter{code}{interpretation}.\n"; printf "%%Use coqdoc's option -p to add new packages or declarations.\n"; printf "\\usepackage{tipa}\n"; printf "\n" let header () = if !header_trailer then begin printf "\\documentclass[12pt]{report}\n"; if !inputenc != "" then printf "\\usepackage[%s]{inputenc}\n" !inputenc; if !inputenc = "utf8x" then utf8x_extra_support (); printf "\\usepackage[T1]{fontenc}\n"; printf "\\usepackage{fullpage}\n"; printf "\\usepackage{coqdoc}\n"; printf "\\usepackage{amsmath,amssymb}\n"; printf "\\usepackage{url}\n"; (match !toc_depth with | None -> () | Some n -> printf "\\setcounter{tocdepth}{%i}\n" n); Queue.iter (fun s -> printf "%s\n" s) preamble; printf "\\begin{document}\n" end; output_string "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; output_string "%% This file has been automatically generated with the command\n"; output_string "%% "; Array.iter (fun s -> printf "%s " s) Sys.argv; printf "\n"; output_string "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n" let trailer () = if !header_trailer then begin printf "\\end{document}\n" end (*s Latex low-level translation *) let nbsp () = output_char '~' let char c = match c with | '\\' -> printf "\\symbol{92}" | '$' | '#' | '%' | '&' | '{' | '}' | '_' -> output_char '\\'; output_char c | '^' | '~' -> output_char '\\'; output_char c; printf "{}" | _ -> output_char c let label_char c = match c with | '_' -> output_char ' ' | '\\' | '$' | '#' | '%' | '&' | '{' | '}' | '^' | '~' -> printf "x%X" (Char.code c) | _ -> if c >= '\x80' then printf "x%X" (Char.code c) else output_char c let label_ident s = for i = 0 to String.length s - 1 do label_char s.[i] done let latex_char = output_char let latex_string = output_string let html_char _ = () let html_string _ = () (*s Latex char escaping *) let escaped = let buff = Buffer.create 5 in fun s -> Buffer.clear buff; for i = 0 to String.length s - 1 do match s.[i] with | '\\' -> Buffer.add_string buff "\\symbol{92}" | '$' | '#' | '%' | '&' | '{' | '}' | '_' as c -> Buffer.add_char buff '\\'; Buffer.add_char buff c | '^' | '~' as c -> Buffer.add_char buff '\\'; Buffer.add_char buff c; Buffer.add_string buff "{}" | '\'' -> if i < String.length s - 1 && s.[i+1] = '\'' then begin Buffer.add_char buff '\''; Buffer.add_char buff '{'; Buffer.add_char buff '}' end else Buffer.add_char buff '\'' | c -> Buffer.add_char buff c done; Buffer.contents buff (*s Latex reference and symbol translation *) let start_module () = let ln = !lib_name in if not !short then begin printf "\\coqlibrary{"; label_ident (get_module false); printf "}{"; if ln <> "" then printf "%s " ln; printf "}{%s}\n\n" (escaped (get_module true)) end let start_latex_math () = output_char '$' let stop_latex_math () = output_char '$' let start_quote () = output_char '`'; output_char '`' let stop_quote () = output_char '\''; output_char '\'' let start_verbatim inline = if inline then printf "\\texttt{" else printf "\\begin{verbatim}\n" let stop_verbatim inline = if inline then printf "}" else printf "\\end{verbatim}\n" let url addr name = printf "%s\\footnote{\\url{%s}}" (match name with | None -> "" | Some n -> n) addr let indentation n = if n == 0 then printf "\\coqdocnoindent\n" else let space = 0.5 *. (float n) in printf "\\coqdocindent{%2.2fem}\n" space let ident_ref m fid typ s = let id = if fid <> "" then (m ^ "." ^ fid) else m in match find_module m with | Local -> printf "\\coqref{"; label_ident id; printf "}{\\coqdoc%s{%s}}" (type_name typ) s | External m when !externals -> printf "\\coqexternalref{"; label_ident fid; printf "}{%s}{\\coqdoc%s{%s}}" (escaped m) (type_name typ) s | External _ | Unknown -> printf "\\coqdoc%s{%s}" (type_name typ) s let defref m id ty s = if ty <> Notation then (printf "\\coqdef{"; label_ident (m ^ "." ^ id); printf "}{%s}{\\coqdoc%s{%s}}" s (type_name ty) s) else (* Glob file still not able to say the exact extent of the definition *) (* so we currently renounce to highlight the notation location *) (printf "\\coqdef{"; label_ident (m ^ "." ^ id); printf "}{%s}{%s}" s s) let reference s = function | Def (fullid,typ) -> defref (get_module false) fullid typ s | Ref (m,fullid,typ) -> ident_ref m fullid typ s (*s The sublexer buffers symbol characters and attached uninterpreted ident and try to apply special translation such as, predefined, translation "->" to "\ensuremath{\rightarrow}" or, virtually, a user-level translation from "=_h" to "\ensuremath{=_{h}}" *) let output_sublexer_string doescape issymbchar tag s = let s = if doescape then escaped s else s in match tag with | Some ref -> reference s ref | None -> if issymbchar then output_string s else printf "\\coqdocvar{%s}" s let last_was_in = ref false let sublexer c loc = if c = '*' && !last_was_in then begin Tokens.flush_sublexer (); output_char '*' end else begin let tag = try Some (Index.find (get_module false) loc) with Not_found -> None in Tokens.output_tagged_symbol_char tag c end; last_was_in := false let sublexer_in_doc c = if c = '*' && !last_was_in then begin Tokens.flush_sublexer (); output_char '*' end else Tokens.output_tagged_symbol_char None c; last_was_in := false let initialize () = initialize_tex_html (); Tokens.token_tree := token_tree_latex; Tokens.outfun := output_sublexer_string (*s Interpreting ident with fallback on sublexer if unknown ident *) let translate s = match Tokens.translate s with Some s -> s | None -> escaped s let keyword s loc = printf "\\coqdockw{%s}" (translate s) let ident s loc = last_was_in := s = "in"; try match loc with | None -> raise Not_found | Some loc -> let tag = Index.find (get_module false) loc in reference (translate s) tag with Not_found -> if is_tactic s then printf "\\coqdoctac{%s}" (translate s) else if is_keyword s then printf "\\coqdockw{%s}" (translate s) else if !Cdglobals.interpolate && !in_doc (* always a var otherwise *) then try let tag = Index.find_string s in reference (translate s) tag with _ -> Tokens.output_tagged_ident_string s else Tokens.output_tagged_ident_string s let ident s l = if !in_title then ( printf "\\texorpdfstring{\\protect"; ident s l; printf "}{%s}" (translate s)) else ident s l (*s Translating structure *) let proofbox () = printf "\\ensuremath{\\Box}" let rec reach_item_level n = if !item_level < n then begin printf "\n\\begin{itemize}\n\\item "; incr item_level; reach_item_level n end else if !item_level > n then begin printf "\n\\end{itemize}\n"; decr item_level; reach_item_level n end let item n = let old_level = !item_level in reach_item_level n; if n <= old_level then printf "\n\\item " let stop_item () = reach_item_level 0 let start_doc () = in_doc := true let end_doc () = in_doc := false; stop_item () (* This is broken if we are in math mode, but coqdoc currently isn't tracking that *) let start_emph () = printf "\\textit{" let stop_emph () = printf "}" let start_details _ = () let stop_details () = () let start_comment () = printf "\\begin{coqdoccomment}\n" let end_comment () = printf "\\end{coqdoccomment}\n" let start_coq () = printf "\\begin{coqdoccode}\n" let end_coq () = printf "\\end{coqdoccode}\n" let section_kind = function | 1 -> "\\section{" | 2 -> "\\subsection{" | 3 -> "\\subsubsection{" | 4 -> "\\paragraph{" | _ -> assert false let section lev f = stop_item (); output_string (section_kind lev); in_title := true; f (); in_title := false; printf "}\n\n" let rule () = printf "\\par\n\\noindent\\hrulefill\\par\n\\noindent{}" let paragraph () = printf "\n\n" let line_break () = printf "\\coqdoceol\n" let empty_line_of_code () = printf "\\coqdocemptyline\n" let start_inline_coq_block () = line_break (); empty_line_of_code () let end_inline_coq_block () = empty_line_of_code () let start_inline_coq () = () let end_inline_coq () = () let make_multi_index () = () let make_index () = () let make_toc () = printf "\\tableofcontents\n" end (*s HTML output *) module Html = struct let header () = if !header_trailer then if !header_file_spec then let cin = open_in !header_file in try while true do let s = input_line cin in printf "%s\n" s done with End_of_file -> close_in cin else begin printf "\n"; printf "\n\n"; printf "\n" !charset; printf "\n"; printf "%s\n\n\n" !page_title; printf "\n\n
\n\n\n" end let start_module () = let ln = !lib_name in if not !short then begin let (m,sub) = !current_module in add_toc_entry (Toc_library (m,sub)); if ln = "" then printf "

%s

\n\n" (get_module true) else printf "

%s %s

\n\n" ln (get_module true) end let indentation n = for _i = 1 to n do printf " " done let line_break () = printf "
\n" let empty_line_of_code () = printf "\n
\n" let nbsp () = printf " " let char = function | '<' -> printf "<" | '>' -> printf ">" | '&' -> printf "&" | c -> output_char c let escaped = let buff = Buffer.create 5 in fun s -> Buffer.clear buff; for i = 0 to String.length s - 1 do match s.[i] with | '<' -> Buffer.add_string buff "<" | '>' -> Buffer.add_string buff ">" | '&' -> Buffer.add_string buff "&" | '\"' -> Buffer.add_string buff """ | c -> Buffer.add_char buff c done; Buffer.contents buff let sanitize_name s = let rec loop esc i = if i < 0 then if esc then escaped s else s else match s.[i] with | 'a'..'z' | 'A'..'Z' | '0'..'9' | '.' | '_' -> loop esc (i-1) | '<' | '>' | '&' | '\'' | '\"' -> loop true (i-1) | '-' | ':' -> loop esc (i-1) (* should be safe in HTML5 attribute name syntax *) | _ -> (* This name contains complex characters: this is probably a notation string, we simply hash it. *) Digest.to_hex (Digest.string s) in loop false (String.length s - 1) let latex_char _ = () let latex_string _ = () let html_char = output_char let html_string = output_string let start_latex_math () = () let stop_latex_math () = () let start_quote () = char '"' let stop_quote () = start_quote () let start_verbatim inline = if inline then printf "" else printf "
\n"

  let stop_verbatim inline =
    if inline then printf ""
    else printf "
\n" let url addr name = printf "%s" addr (match name with | Some n -> n | None -> addr) let ident_ref m fid typ s = match find_module m with | Local -> printf "" m (sanitize_name fid); printf "%s" typ s | External m when !externals -> printf "" m (sanitize_name fid); printf "%s" typ s | External _ | Unknown -> printf "%s" typ s let reference s r = match r with | Def (fullid,ty) -> let s' = sanitize_name fullid in printf "" s' s'; printf "%s" (type_name ty) s | Ref (m,fullid,ty) -> ident_ref m fullid (type_name ty) s let output_sublexer_string doescape issymbchar tag s = let s = if doescape then escaped s else s in match tag with | Some ref -> reference s ref | None -> if issymbchar then output_string s else printf "%s" s let sublexer c loc = let tag = try Some (Index.find (get_module false) loc) with Not_found -> None in Tokens.output_tagged_symbol_char tag c let sublexer_in_doc c = Tokens.output_tagged_symbol_char None c let initialize () = initialize_tex_html(); Tokens.token_tree := token_tree_html; Tokens.outfun := output_sublexer_string let translate s = match Tokens.translate s with Some s -> s | None -> escaped s let keyword s loc = printf "%s" (translate s) let ident s loc = try match loc with | None -> raise Not_found | Some loc -> reference (translate s) (Index.find (get_module false) loc) with Not_found -> if is_tactic s then printf "%s" (translate s) else if is_keyword s then printf "%s" (translate s) else if !Cdglobals.interpolate && !in_doc (* always a var otherwise *) then try reference (translate s) (Index.find_string s) with Not_found -> Tokens.output_tagged_ident_string s else Tokens.output_tagged_ident_string s let proofbox () = printf "" let rec reach_item_level n = if !item_level < n then begin printf "
    \n
  • "; incr item_level; reach_item_level n end else if !item_level > n then begin printf "\n
  • \n
\n"; decr item_level; reach_item_level n end let item n = let old_level = !item_level in reach_item_level n; if n <= old_level then printf "\n\n
  • " let stop_item () = reach_item_level 0 let start_coq () = if not !raw_comments then printf "
    \n" let end_coq () = if not !raw_comments then printf "
    \n" let start_doc () = in_doc := true; if not !raw_comments then printf "\n
    \n" let end_doc () = in_doc := false; stop_item (); if not !raw_comments then printf "
    \n" let start_emph () = printf "" let stop_emph () = printf "" let start_details = function | Some s -> printf "
    %s" s | _ -> printf "
    " let stop_details () = printf "
    " let start_comment () = printf "(*" let end_comment () = printf "*)" let start_inline_coq () = if !inline_notmono then printf "" else printf "" let end_inline_coq () = printf "" let start_inline_coq_block () = line_break (); start_inline_coq () let end_inline_coq_block () = end_inline_coq () let paragraph () = printf "\n
    \n\n" (* inference rules *) let inf_rule assumptions (_,_,midnm) conclusions = (* this first function replaces any occurrence of 3 or more spaces in a row with " "s. We do this to the assumptions so that people can put multiple rules on a line with nice formatting *) let replace_spaces str = let rec copy a n = match n with 0 -> [] | n -> (a :: copy a (n - 1)) in let results = Str.full_split (Str.regexp "[' '][' '][' ']+") str in let strs = List.map (fun r -> match r with | Str.Text s -> [s] | Str.Delim s -> copy " " (String.length s)) results in String.concat "" (List.concat strs) in let start_assumption line = (printf "\n"; printf " %s\n" (replace_spaces line)) in let end_assumption () = (printf " \n"; printf "\n") in let rec print_assumptions hyps = match hyps with | [] -> start_assumption "  " | [(_,hyp)] -> start_assumption hyp | ((_,hyp) :: hyps') -> (start_assumption hyp; end_assumption (); print_assumptions hyps') in printf "
    \n"; print_assumptions assumptions; printf " " | Some s -> printf " %s  \n " s); printf "\n"; printf "\n"; printf " \n"; printf "\n"; print_assumptions conclusions; end_assumption (); printf "
    \n"; (match midnm with | None -> printf "  \n

    " let section lev f = let lab = new_label () in let r = sprintf "%s.html#%s" (get_module false) lab in (match !toc_depth with | None -> add_toc_entry (Toc_section (lev, f, r)) | Some n -> if lev <= n then add_toc_entry (Toc_section (lev, f, r)) else ()); stop_item (); printf "" lab lev; f (); printf "\n" lev let rule () = printf "
    \n" (* make a HTML index from a list of triples (name,text,link) *) let index_ref i c = let idxc = sprintf "%s_%c" i.idx_name c in !index_name ^ (if !multi_index then "_" ^ idxc ^ ".html" else ".html#" ^ idxc) let letter_index category idx (c,l) = if l <> [] then begin let cat = if category && idx <> "global" then "(" ^ idx ^ ")" else "" in printf "

    %s %s

    \n" idx c (display_letter c) cat; List.iter (fun (id,(text,link,t)) -> let id' = escaped (prepare_entry id t) in printf "%s %s
    \n" link id' text) l; printf "

    " end let all_letters i = List.iter (letter_index false i.idx_name) i.idx_entries (* Construction d'une liste des index (1 index global, puis 1 index par catégorie) *) let format_global_index = Index.map (fun s (m,t) -> if t = Library then let ln = !lib_name in if ln <> "" then "[" ^ String.lowercase_ascii ln ^ "]", m ^ ".html", t else "[library]", m ^ ".html", t else sprintf "[%s, in %s]" (type_name t) m m , sprintf "%s.html#%s" m (sanitize_name s), t) let format_bytype_index = function | Library, idx -> Index.map (fun id m -> "", m ^ ".html", Library) idx | (t,idx) -> Index.map (fun s m -> let text = sprintf "[in %s]" m m in (text, sprintf "%s.html#%s" m (sanitize_name s), t)) idx (* Impression de la table d'index *) let print_index_table_item i = printf "\n%s Index\n" (String.capitalize_ascii i.idx_name); List.iter (fun (c,l) -> if l <> [] then printf "%s\n" (index_ref i c) (display_letter c) else printf "%s\n" (display_letter c)) i.idx_entries; let n = i.idx_size in printf "(%d %s)\n" n (if n > 1 then "entries" else "entry"); printf "\n" let print_index_table idxl = printf "\n"; List.iter print_index_table_item idxl; printf "
    \n" let make_one_multi_index prt_tbl i = (* Attn: make_one_multi_index crée un nouveau fichier... *) let idx = i.idx_name in let one_letter ((c,l) as cl) = open_out_file (sprintf "%s_%s_%c.html" !index_name idx c); if (!header_trailer) then header (); prt_tbl (); printf "
    "; letter_index true idx cl; if List.length l > 30 then begin printf "
    "; prt_tbl () end; if (!header_trailer) then trailer (); close_out_file () in List.iter one_letter i.idx_entries let make_multi_index () = let all_index = let glob,bt = Index.all_entries () in (format_global_index glob) :: (List.map format_bytype_index bt) in let print_table () = print_index_table all_index in List.iter (make_one_multi_index print_table) all_index let make_index () = let all_index = let glob,bt = Index.all_entries () in (format_global_index glob) :: (List.map format_bytype_index bt) in let print_table () = print_index_table all_index in let print_one_index i = if i.idx_size > 0 then begin printf "
    \n

    %s Index

    \n" (String.capitalize_ascii i.idx_name); all_letters i end in set_module "Index" None; if !title <> "" then printf "

    %s

    \n" !title; print_table (); if not (!multi_index) then begin List.iter print_one_index all_index; printf "
    "; print_table () end let make_toc () = let ln = !lib_name in let make_toc_entry = function | Toc_library (m,sub) -> stop_item (); let ms = match sub with | None -> m | Some s -> m ^ ": " ^ s in if ln = "" then printf "

    %s

    \n" m ms else printf "

    %s %s

    \n" m ln ms | Toc_section (n, f, r) -> item n; printf "" r; f (); printf "\n" in printf "
    \n"; Queue.iter make_toc_entry toc_q; stop_item (); printf "
    \n" end (*s TeXmacs-aware output *) module TeXmacs = struct (*s Latex preamble *) let (_ : string Queue.t) = in_doc := false; Queue.create () let header () = output_string "(*i This file has been automatically generated with the command \n"; output_string " "; Array.iter (fun s -> printf "%s " s) Sys.argv; printf " *)\n" let trailer () = () let nbsp () = output_char ' ' let char_true c = match c with | '\\' -> printf "\\\\" | '<' -> printf "\\<" | '|' -> printf "\\|" | '>' -> printf "\\>" | _ -> output_char c let char c = if !in_doc then char_true c else output_char c let latex_char = char_true let latex_string = String.iter latex_char let html_char _ = () let html_string _ = () let raw_ident s = for i = 0 to String.length s - 1 do char s.[i] done let start_module () = () let start_latex_math () = printf "' let start_verbatim inline = in_doc := true; printf "<\\verbatim>" let stop_verbatim inline = in_doc := false; printf "" let url addr name = printf "%s<\\footnote><\\url>%s" addr (match name with | None -> "" | Some n -> n) let start_quote () = output_char '`'; output_char '`' let stop_quote () = output_char '\''; output_char '\'' let indentation n = () let keyword s = printf "" let ident_true s = if is_keyword s then keyword s else raw_ident s let keyword s loc = keyword s let ident s _ = if !in_doc then ident_true s else raw_ident s let output_sublexer_string doescape issymbchar tag s = if doescape then raw_ident s else output_string s let sublexer c l = if !in_doc then Tokens.output_tagged_symbol_char None c else char c let sublexer_in_doc c = char c let initialize () = Tokens.token_tree := token_tree_texmacs; Tokens.outfun := output_sublexer_string let proofbox () = printf "QED" let rec reach_item_level n = if !item_level < n then begin printf "\n<\\itemize>\n"; incr item_level; reach_item_level n end else if !item_level > n then begin printf "\n"; decr item_level; reach_item_level n end let item n = let old_level = !item_level in reach_item_level n; if n <= old_level then printf "\n\n" let stop_item () = reach_item_level 0 let start_doc () = in_doc := true; printf "(** texmacs: " let end_doc () = stop_item (); in_doc := false; printf " *)" let start_coq () = () let end_coq () = () let start_emph () = printf "" let start_details _ = () let stop_details () = () let start_comment () = () let end_comment () = () let section_kind = function | 1 -> "section" | 2 -> "subsection" | 3 -> "subsubsection" | 4 -> "paragraph" | _ -> assert false let section lev f = stop_item (); printf "<"; output_string (section_kind lev); printf "|"; f (); printf ">\n\n" let rule () = printf "\n\n" let paragraph () = printf "\n\n" let line_break () = printf "\n" let empty_line_of_code () = printf "\n" let start_inline_coq () = printf "" let start_inline_coq_block () = line_break (); start_inline_coq () let end_inline_coq_block () = end_inline_coq () let make_multi_index () = () let make_index () = () let make_toc () = () end (*s Raw output *) module Raw = struct let header () = () let trailer () = () let nbsp () = output_char ' ' let char = output_char let latex_char = output_char let latex_string = output_string let html_char _ = () let html_string _ = () let raw_ident s = for i = 0 to String.length s - 1 do char s.[i] done let start_module () = () let start_latex_math () = () let stop_latex_math () = () let start_verbatim inline = () let stop_verbatim inline = () let url addr name = match name with | Some n -> printf "%s (%s)" n addr | None -> printf "%s" addr let start_quote () = printf "\"" let stop_quote () = printf "\"" let indentation n = for _i = 1 to n do printf " " done let keyword s loc = raw_ident s let ident s loc = raw_ident s let sublexer c l = char c let sublexer_in_doc c = char c let initialize () = Tokens.token_tree := ref Tokens.empty_ttree; Tokens.outfun := (fun _ _ _ _ -> failwith "Useless") let proofbox () = printf "[]" let item n = printf "- " let stop_item () = () let reach_item_level _ = () let start_doc () = printf "(** " let end_doc () = printf " *)\n" let start_emph () = printf "_" let stop_emph () = printf "_" let start_details _ = () let stop_details () = () let start_comment () = printf "(*" let end_comment () = printf "*)" let start_coq () = () let end_coq () = () let section_kind = function | 1 -> "* " | 2 -> "** " | 3 -> "*** " | 4 -> "**** " | _ -> assert false let section lev f = output_string (section_kind lev); f () let rule () = () let paragraph () = printf "\n\n" let line_break () = printf "\n" let empty_line_of_code () = printf "\n" let start_inline_coq () = () let end_inline_coq () = () let start_inline_coq_block () = line_break (); start_inline_coq () let end_inline_coq_block () = end_inline_coq () let make_multi_index () = () let make_index () = () let make_toc () = () end (*s Generic output *) let select f1 f2 f3 f4 x = match !target_language with LaTeX -> f1 x | HTML -> f2 x | TeXmacs -> f3 x | Raw -> f4 x let push_in_preamble = Latex.push_in_preamble let header = select Latex.header Html.header TeXmacs.header Raw.header let trailer = select Latex.trailer Html.trailer TeXmacs.trailer Raw.trailer let start_module = select Latex.start_module Html.start_module TeXmacs.start_module Raw.start_module let start_doc = select Latex.start_doc Html.start_doc TeXmacs.start_doc Raw.start_doc let end_doc = select Latex.end_doc Html.end_doc TeXmacs.end_doc Raw.end_doc let start_comment = select Latex.start_comment Html.start_comment TeXmacs.start_comment Raw.start_comment let end_comment = select Latex.end_comment Html.end_comment TeXmacs.end_comment Raw.end_comment let start_coq = select Latex.start_coq Html.start_coq TeXmacs.start_coq Raw.start_coq let end_coq = select Latex.end_coq Html.end_coq TeXmacs.end_coq Raw.end_coq let start_inline_coq = select Latex.start_inline_coq Html.start_inline_coq TeXmacs.start_inline_coq Raw.start_inline_coq let end_inline_coq = select Latex.end_inline_coq Html.end_inline_coq TeXmacs.end_inline_coq Raw.end_inline_coq let start_inline_coq_block = select Latex.start_inline_coq_block Html.start_inline_coq_block TeXmacs.start_inline_coq_block Raw.start_inline_coq_block let end_inline_coq_block = select Latex.end_inline_coq_block Html.end_inline_coq_block TeXmacs.end_inline_coq_block Raw.end_inline_coq_block let indentation = select Latex.indentation Html.indentation TeXmacs.indentation Raw.indentation let paragraph = select Latex.paragraph Html.paragraph TeXmacs.paragraph Raw.paragraph let line_break = select Latex.line_break Html.line_break TeXmacs.line_break Raw.line_break let empty_line_of_code = select Latex.empty_line_of_code Html.empty_line_of_code TeXmacs.empty_line_of_code Raw.empty_line_of_code let section = select Latex.section Html.section TeXmacs.section Raw.section let item = select Latex.item Html.item TeXmacs.item Raw.item let stop_item = select Latex.stop_item Html.stop_item TeXmacs.stop_item Raw.stop_item let reach_item_level = select Latex.reach_item_level Html.reach_item_level TeXmacs.reach_item_level Raw.reach_item_level let rule = select Latex.rule Html.rule TeXmacs.rule Raw.rule let nbsp = select Latex.nbsp Html.nbsp TeXmacs.nbsp Raw.nbsp let char = select Latex.char Html.char TeXmacs.char Raw.char let keyword = select Latex.keyword Html.keyword TeXmacs.keyword Raw.keyword let ident = select Latex.ident Html.ident TeXmacs.ident Raw.ident let sublexer = select Latex.sublexer Html.sublexer TeXmacs.sublexer Raw.sublexer let sublexer_in_doc = select Latex.sublexer_in_doc Html.sublexer_in_doc TeXmacs.sublexer_in_doc Raw.sublexer_in_doc let initialize = select Latex.initialize Html.initialize TeXmacs.initialize Raw.initialize let proofbox = select Latex.proofbox Html.proofbox TeXmacs.proofbox Raw.proofbox let latex_char = select Latex.latex_char Html.latex_char TeXmacs.latex_char Raw.latex_char let latex_string = select Latex.latex_string Html.latex_string TeXmacs.latex_string Raw.latex_string let html_char = select Latex.html_char Html.html_char TeXmacs.html_char Raw.html_char let html_string = select Latex.html_string Html.html_string TeXmacs.html_string Raw.html_string let start_emph = select Latex.start_emph Html.start_emph TeXmacs.start_emph Raw.start_emph let stop_emph = select Latex.stop_emph Html.stop_emph TeXmacs.stop_emph Raw.stop_emph let start_details = select Latex.start_details Html.start_details TeXmacs.start_details Raw.start_details let stop_details = select Latex.stop_details Html.stop_details TeXmacs.stop_details Raw.stop_details let start_latex_math = select Latex.start_latex_math Html.start_latex_math TeXmacs.start_latex_math Raw.start_latex_math let stop_latex_math = select Latex.stop_latex_math Html.stop_latex_math TeXmacs.stop_latex_math Raw.stop_latex_math let start_verbatim = select Latex.start_verbatim Html.start_verbatim TeXmacs.start_verbatim Raw.start_verbatim let stop_verbatim = select Latex.stop_verbatim Html.stop_verbatim TeXmacs.stop_verbatim Raw.stop_verbatim let verbatim_char inline = select (if inline then Latex.char else output_char) Html.char TeXmacs.char Raw.char let hard_verbatim_char = output_char let url = select Latex.url Html.url TeXmacs.url Raw.url let start_quote = select Latex.start_quote Html.start_quote TeXmacs.start_quote Raw.start_quote let stop_quote = select Latex.stop_quote Html.stop_quote TeXmacs.stop_quote Raw.stop_quote let inf_rule_dumb assumptions (midsp,midln,midnm) conclusions = start_verbatim false; let dumb_line = function (sp,ln) -> (String.iter char ((String.make sp ' ') ^ ln); char '\n') in (List.iter dumb_line assumptions; dumb_line (midsp, midln ^ (match midnm with | Some s -> " " ^ s | None -> "")); List.iter dumb_line conclusions); stop_verbatim false let inf_rule = select inf_rule_dumb Html.inf_rule inf_rule_dumb inf_rule_dumb let make_multi_index = select Latex.make_multi_index Html.make_multi_index TeXmacs.make_multi_index Raw.make_multi_index let make_index = select Latex.make_index Html.make_index TeXmacs.make_index Raw.make_index let make_toc = select Latex.make_toc Html.make_toc TeXmacs.make_toc Raw.make_toc coq-8.15.0/tools/coqdoc/output.mli000066400000000000000000000063271417001151100170410ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit val add_printing_token : string -> string option * string option -> unit val remove_printing_token : string -> unit val set_module : coq_module -> string option -> unit val get_module : bool -> string val header : unit -> unit val trailer : unit -> unit val push_in_preamble : string -> unit val start_module : unit -> unit val start_doc : unit -> unit val end_doc : unit -> unit val start_details : string option -> unit val stop_details : unit -> unit val start_emph : unit -> unit val stop_emph : unit -> unit val start_comment : unit -> unit val end_comment : unit -> unit val start_coq : unit -> unit val end_coq : unit -> unit val start_inline_coq : unit -> unit val end_inline_coq : unit -> unit val start_inline_coq_block : unit -> unit val end_inline_coq_block : unit -> unit val indentation : int -> unit val line_break : unit -> unit val paragraph : unit -> unit val empty_line_of_code : unit -> unit val section : int -> (unit -> unit) -> unit val item : int -> unit val stop_item : unit -> unit val reach_item_level : int -> unit val rule : unit -> unit val nbsp : unit -> unit val char : char -> unit val keyword : string -> loc -> unit val ident : string -> loc option -> unit val sublexer : char -> loc -> unit val sublexer_in_doc : char -> unit val proofbox : unit -> unit val latex_char : char -> unit val latex_string : string -> unit val html_char : char -> unit val html_string : string -> unit val verbatim_char : bool -> char -> unit val hard_verbatim_char : char -> unit val start_latex_math : unit -> unit val stop_latex_math : unit -> unit val start_verbatim : bool -> unit val stop_verbatim : bool -> unit val start_quote : unit -> unit val stop_quote : unit -> unit val url : string -> string option -> unit (* this outputs an inference rule in one go. You pass it the list of assumptions, then the middle line info, then the conclusion (which is allowed to span multiple lines). In each case, the int is the number of spaces before the start of the line's text and the string is the text of the line with the leading trailing space trimmed. For the middle rule, you can also optionally provide a name. We need the space info so that in modes where we aren't doing something smart we can just format the rule verbatim like the user did *) val inf_rule : (int * string) list -> (int * string * (string option)) -> (int * string) list -> unit val make_multi_index : unit -> unit val make_index : unit -> unit val make_toc : unit -> unit coq-8.15.0/tools/coqdoc/style.css000066400000000000000000000020131417001151100166340ustar00rootroot00000000000000a:visited {color : #416DFF; text-decoration : none; } a:link {color : #416DFF; text-decoration : none; font-weight : bold} a:hover {color : Red; text-decoration : underline; } a:active {color : Red; text-decoration : underline; } .keyword { font-weight : bold ; color : Red } .keywordsign { color : #C04600 } .superscript { font-size : 4 } .subscript { font-size : 4 } .comment { color : Green } .constructor { color : Blue } .string { color : Maroon } .warning { color : Red ; font-weight : bold } .info { margin-left : 3em; margin-right : 3em } .title1 { font-size : 20pt ; background-color : #416DFF } .title2 { font-size : 20pt ; background-color : #418DFF } .title3 { font-size : 20pt ; background-color : #41ADFF } .title4 { font-size : 20pt ; background-color : #41CDFF } .title5 { font-size : 20pt ; background-color : #41EDFF } .title6 { font-size : 20pt ; background-color : #41FFFF } body { background-color : White } tr { background-color : White } # .doc { background-color :#aaeeff } .doc { background-color :#66ff66 } coq-8.15.0/tools/coqdoc/tokens.ml000066400000000000000000000140001417001151100166160ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* None with | Some tt' -> CharMap.add c (insert tt' (i + 1)) (CharMap.remove c tt.branch) | None -> let tt' = {node = None; branch = CharMap.empty} in CharMap.add c (insert tt' (i + 1)) tt.branch in { node = tt.node; branch = br } in insert ttree 0 (* Removes a string from a dictionary: returns an equal dictionary if the word not present. *) let ttree_remove ttree str = let rec remove tt i = if i == String.length str then {node = None; branch = tt.branch} else let c = str.[i] in let br = match try Some (CharMap.find c tt.branch) with Not_found -> None with | Some tt' -> CharMap.add c (remove tt' (i + 1)) (CharMap.remove c tt.branch) | None -> tt.branch in { node = tt.node; branch = br } in remove ttree 0 let ttree_descend ttree c = CharMap.find c ttree.branch let ttree_find ttree str = let rec proc_rec tt i = if i == String.length str then tt else proc_rec (CharMap.find str.[i] tt.branch) (i+1) in proc_rec ttree 0 (*s Parameters of the translation automaton *) type out_function = bool -> bool -> Index.index_entry option -> string -> unit let token_tree = ref (ref empty_ttree) let outfun = ref (fun _ _ _ _ -> failwith "outfun not initialized") (*s Translation automaton *) let buff = Buffer.create 4 let flush_buffer was_symbolchar tag tok = let hastr = String.length tok <> 0 in if hastr then !outfun false was_symbolchar tag tok; if Buffer.length buff <> 0 then !outfun true (if hastr then not was_symbolchar else was_symbolchar) tag (Buffer.contents buff); Buffer.clear buff type sublexer_state = | Neutral | Buffering of bool * Index.index_entry option * string * ttree let translation_state = ref Neutral let buffer_char is_symbolchar ctag c = let rec aux = function | Neutral -> restart_buffering () | Buffering (was_symbolchar,tag,translated,tt) -> if tag <> ctag then (* A strong tag comes from Coq; if different Coq tags *) (* hence, we don't try to see the chars as part of a single token *) let translated = match tt.node with | Some tok -> Buffer.clear buff; tok | None -> translated in flush_buffer was_symbolchar tag translated; restart_buffering () else begin (* If we change the category of characters (symbol vs ident) *) (* we accept this as a possible token cut point and remember the *) (* translated token up to that point *) let translated = if is_symbolchar <> was_symbolchar then match tt.node with | Some tok -> Buffer.clear buff; tok | None -> translated else translated in (* We try to make a significant token from the current *) (* buffer and the new character *) try let tt = ttree_descend tt c in Buffer.add_char buff c; Buffering (is_symbolchar,ctag,translated,tt) with Not_found -> (* No existing translation for the given set of chars *) if is_symbolchar <> was_symbolchar then (* If we changed the category of character read, we accept it *) (* as a possible cut point and restart looking for a translation *) (flush_buffer was_symbolchar tag translated; restart_buffering ()) else (* If we did not change the category of character read, we do *) (* not want to cut arbitrarily in the middle of the sequence of *) (* symbol characters or identifier characters *) (Buffer.add_char buff c; Buffering (is_symbolchar,tag,translated,empty_ttree)) end and restart_buffering () = let tt = try ttree_descend !(!token_tree) c with Not_found -> empty_ttree in Buffer.add_char buff c; Buffering (is_symbolchar,ctag,"",tt) in translation_state := aux !translation_state let output_tagged_ident_string s = for i = 0 to String.length s - 1 do buffer_char false None s.[i] done let output_tagged_symbol_char tag c = buffer_char true tag c let flush_sublexer () = match !translation_state with | Neutral -> () | Buffering (was_symbolchar,tag,translated,tt) -> let translated = match tt.node with | Some tok -> Buffer.clear buff; tok | None -> translated in flush_buffer was_symbolchar tag translated; translation_state := Neutral (* Translation not using the automaton *) let translate s = try (ttree_find !(!token_tree) s).node with Not_found -> None coq-8.15.0/tools/coqdoc/tokens.mli000066400000000000000000000064071417001151100170030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string -> string -> ttree (* Remove a translation from a dictionary: returns an equal dictionary if the word not present *) val ttree_remove : ttree -> string -> ttree (* Translate a string *) val translate : string -> string option (* Sublexer automaton *) (* The sublexer buffers the chars it receives; if after some time, it recognizes that a sequence of chars has a translation in the current dictionary, it replaces the buffer by the translation *) (* Received chars can come with a "tag" (usually made from informations from the globalization file). A sequence of chars can be considered a word only, if all chars have the same "tag". Rules for cutting words are the following: - in a sequence like "**" where * is in the dictionary but not **, "**" is not translated; otherwise said, to be translated, a sequence must not be surrounded by other symbol-like chars - in a sequence like "<>_h*", where <>_h is in the dictionary, the translation is done because the switch from a letter to a symbol char is an acceptable cutting point - in a sequence like "<>_ha", where <>_h is in the dictionary, the translation is not done because it is considered that h and a are not separable (however, if h and a have different tags, and h has the same tags as <, > and _, the translation happens) - in a sequence like "<>_ha", where <> but not <>_h is in the dictionary, the translation is done for <> and _ha is considered independently because the switch from a symbol char to a letter is considered to be an acceptable cutting point - the longest-word rule applies: if both <> and <>_h are in the dictionary, "<>_h" is one word and gets translated *) (* Warning: do not output anything on output channel in between a call to [output_tagged_*] and [flush_sublexer]!! *) type out_function = bool (* needs escape *) -> bool (* it is a symbol, not a pure ident *) -> Index.index_entry option (* the index type of the token if any *) -> string -> unit (* This must be initialized before calling the sublexer *) val token_tree : ttree ref ref val outfun : out_function ref (* Process an ident part that might be a symbol part *) val output_tagged_ident_string : string -> unit (* Process a non-ident char (possibly equipped with a tag) *) val output_tagged_symbol_char : Index.index_entry option -> char -> unit (* Flush the buffered content of the lexer using [outfun] *) val flush_sublexer : unit -> unit coq-8.15.0/tools/coqwc.mll000066400000000000000000000221111417001151100153350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* printf " %s" f | _ -> ()); if !percentage then begin let s = sl + pl + dl in let p = if s > 0 then 100 * dl / s else 0 in printf " (%d%%)" p end; print_newline () let print_file fo = print_line !slines !plines !dlines fo let print_totals () = print_line !tslines !tplines !tdlines (Some "total") (*i*)}(*i*) (*s Shortcuts for regular expressions. The [rcs] regular expression is used to skip the CVS infos possibly contained in some comments, in order not to consider it as documentation. *) let space = [' ' '\t' '\r'] let character = "'" ([^ '\\' '\''] | '\\' (['\\' '\'' 'n' 't' 'b' 'r'] | ['0'-'9'] ['0'-'9'] ['0'-'9'])) "'" let rcs_keyword = "Author" | "Date" | "Header" | "Id" | "Name" | "Locker" | "Log" | "RCSfile" | "Revision" | "Source" | "State" let rcs = "\036" rcs_keyword [^ '$']* "\036" let stars = "(*" '*'* "*)" let dot = '.' (' ' | '\t' | '\n' | '\r' | eof) let proof_start = "Theorem" | "Lemma" | "Fact" | "Remark" | "Goal" | "Correctness" | "Obligation" space+ (['0' - '9'])+ | "Next" space+ "Obligation" let def_start = "Definition" | "Fixpoint" | "Instance" let proof_end = ("Save" | "Qed" | "Defined" | "Abort" | "Admitted") [^'.']* '.' (*s [spec] scans the specification. *) rule spec = parse | "(*" { comment lexbuf; spec lexbuf } | '"' { let n = string lexbuf in slines := !slines + n; seen_spec := true; spec lexbuf } | '\n' { newline (); spec lexbuf } | space+ | stars { spec lexbuf } | proof_start { seen_spec := true; spec_to_dot lexbuf; proof lexbuf } | def_start { seen_spec := true; definition lexbuf } | character | _ { seen_spec := true; spec lexbuf } | eof { () } (*s [spec_to_dot] scans a spec until a dot is reached and returns. *) and spec_to_dot = parse | "(*" { comment lexbuf; spec_to_dot lexbuf } | '"' { let n = string lexbuf in slines := !slines + n; seen_spec := true; spec_to_dot lexbuf } | '\n' { newline (); spec_to_dot lexbuf } | dot { () } | space+ | stars { spec_to_dot lexbuf } | character | _ { seen_spec := true; spec_to_dot lexbuf } | eof { () } (*s [definition] scans a definition; passes to [proof] if the body is absent, and to [spec] otherwise *) and definition = parse | "(*" { comment lexbuf; definition lexbuf } | '"' { let n = string lexbuf in slines := !slines + n; seen_spec := true; definition lexbuf } | '\n' { newline (); definition lexbuf } | ":=" { seen_spec := true; spec lexbuf } | dot { proof lexbuf } | space+ | stars { definition lexbuf } | character | _ { seen_spec := true; definition lexbuf } | eof { () } (*s Scans a proof, then returns to [spec]. *) and proof = parse | "(*" { comment lexbuf; proof lexbuf } | '"' { let n = string lexbuf in plines := !plines + n; seen_proof := true; proof lexbuf } | space+ | stars { proof lexbuf } | '\n' { newline (); proof lexbuf } | "Proof" space* '.' | "Proof" space+ "with" | "Proof" space+ "using" { seen_proof := true; proof lexbuf } | "Proof" space { proof_term lexbuf } | proof_end { seen_proof := true; spec lexbuf } | character | _ { seen_proof := true; proof lexbuf } | eof { () } and proof_term = parse | "(*" { comment lexbuf; proof_term lexbuf } | '"' { let n = string lexbuf in plines := !plines + n; seen_proof := true; proof_term lexbuf } | space+ | stars { proof_term lexbuf } | '\n' { newline (); proof_term lexbuf } | dot { spec lexbuf } | character | _ { seen_proof := true; proof_term lexbuf } | eof { () } (*s Scans a comment. *) and comment = parse | "(*" { comment lexbuf; comment lexbuf } | "*)" { () } | '"' { let n = string lexbuf in dlines := !dlines + n; seen_comment := true; comment lexbuf } | '\n' { newline (); comment lexbuf } | space+ | stars { comment lexbuf } | character | _ { seen_comment := true; comment lexbuf } | eof { () } (*s The entry [string] reads a string until its end and returns the number of newlines it contains. *) and string = parse | '"' { 0 } | '\\' ('\\' | 'n' | '"') { string lexbuf } | '\n' { succ (string lexbuf) } | _ { string lexbuf } | eof { 0 } (*s The following entry [read_header] is used to skip the possible header at the beginning of files (unless option \texttt{-e} is specified). It stops whenever it encounters an empty line or any character outside a comment. In this last case, it correctly resets the lexer position on that character (decreasing [lex_curr_pos] by 1). *) and read_header = parse | "(*" { skip_comment lexbuf; skip_until_nl lexbuf; read_header lexbuf } | "\n" { () } | space+ { read_header lexbuf } | _ { lexbuf.lex_curr_pos <- lexbuf.lex_curr_pos - 1 } | eof { () } and skip_comment = parse | "*)" { () } | "(*" { skip_comment lexbuf; skip_comment lexbuf } | _ { skip_comment lexbuf } | eof { () } and skip_until_nl = parse | '\n' { () } | _ { skip_until_nl lexbuf } | eof { () } (*i*){(*i*) (*s Processing files and channels. *) let process_channel ch = let lb = Lexing.from_channel ch in reset_counters (); if !skip_header then read_header lb; spec lb [@@@ocaml.warning "-52"] let process_file f = try let ch = open_in f in process_channel ch; close_in ch; print_file (Some f); update_totals () with | Sys_error "Is a directory" -> flush stdout; eprintf "coqwc: %s: Is a directory\n" f; flush stderr | Sys_error s -> flush stdout; eprintf "coqwc: %s\n" s; flush stderr [@@@ocaml.warning "+52"] (*s Parsing of the command line. *) let usage () = prerr_endline "usage: coqwc [options] [files]"; prerr_endline "Options are:"; prerr_endline " -p print percentage of comments"; prerr_endline " -s print only the spec size"; prerr_endline " -r print only the proof size"; prerr_endline " -e (everything) do not skip headers"; exit 1 let rec parse = function | [] -> [] | ("-h" | "-?" | "-help" | "--help") :: _ -> usage () | ("-s" | "--spec-only") :: args -> proof_only := false; spec_only := true; parse args | ("-r" | "--proof-only") :: args -> spec_only := false; proof_only := true; parse args | ("-p" | "--percentage") :: args -> percentage := true; parse args | ("-e" | "--header") :: args -> skip_header := false; parse args | f :: args -> f :: (parse args) (*s Main program. *) let _ = let files = parse (List.tl (Array.to_list Sys.argv)) in if not (!spec_only || !proof_only) then printf " spec proof comments\n"; match files with | [] -> process_channel stdin; print_file None | [f] -> process_file f | _ -> List.iter process_file files; print_totals () (*i*)}(*i*) coq-8.15.0/tools/coqworkmgr.ml000066400000000000000000000160661417001151100162540ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* s, string_of_inet_addr host ^":"^ string_of_int port | _ -> assert false module Queue : sig type t val is_empty : t -> bool val push : int * party -> t -> unit val pop : t -> int * party val create : unit -> t end = struct type t = (int * party) list ref let create () = ref [] let is_empty q = !q = [] let rec split acc = function | [] -> List.rev acc, [] | (_, { priority = Low }) :: _ as l -> List.rev acc, l | x :: xs -> split (x :: acc) xs let push (_,{ priority } as item) q = if priority = Low then q := !q @ [item] else let high, low = split [] !q in q := high @ (item :: low) let pop q = match !q with x :: xs -> q := xs; x | _ -> assert false end let read_fd fd s ~off ~len = let rec loop () = try Unix.read fd s off len with Unix.Unix_error(Unix.EAGAIN,_,_) -> loop () in loop () let really_read_fd fd s off len = let i = ref 0 in while !i < len do let off = off + !i in let len = len - !i in let r = read_fd fd s ~off ~len in if r = 0 then raise End_of_file; i := !i + r done let raw_input_line fd = try let b = Buffer.create 80 in let s = Bytes.make 1 '\000' in let endl = Bytes.of_string "\n" in let endr = Bytes.of_string "\r" in while Bytes.compare s endl <> 0 do really_read_fd fd s 0 1; if Bytes.compare s endl <> 0 && Bytes.compare s endr <> 0 then Buffer.add_bytes b s; done; Buffer.contents b with Unix.Unix_error _ -> raise End_of_file let accept s = let cs, _ = Unix.accept s in let cout = Unix.out_channel_of_descr cs in set_binary_mode_out cout true; match parse_request (raw_input_line cs) with | Hello p -> { sock=cs; cout; tokens=0; priority=p } | _ -> (try Unix.close cs with _ -> ()); raise End_of_file let parse s = () let parties = ref [] let max_tokens = ref 2 let cur_tokens = ref 0 let queue = Queue.create () let rec allocate n party = let extra = min n (!max_tokens - !cur_tokens) in cur_tokens := !cur_tokens + extra; party.tokens <- party.tokens + extra; answer party (Tokens extra) and de_allocate n party = let back = min party.tokens n in party.tokens <- party.tokens - back; cur_tokens := min (!cur_tokens - back) !max_tokens; eventually_dequeue () and eventually_dequeue () = if Queue.is_empty queue || !cur_tokens >= !max_tokens then () else let req, party = Queue.pop queue in if List.exists (fun { sock } -> sock = party.sock) !parties then allocate req party else eventually_dequeue () let chat s = let party = try List.find (fun { sock } -> sock = s) !parties with Not_found -> Printf.eprintf "Internal error"; exit 1 in try match parse_request (raw_input_line party.sock) with | Get n -> if !cur_tokens < !max_tokens then allocate n party else Queue.push (n,party) queue | TryGet n -> if !cur_tokens < !max_tokens then allocate n party else answer party Noluck | GiveBack n -> de_allocate n party | Ping -> answer party (Pong (!cur_tokens,!max_tokens,Unix.getpid ())); raise End_of_file | Hello _ -> raise End_of_file with Failure _ | ParseError | Sys_error _ | End_of_file -> (try Unix.close party.sock with _ -> ()); parties := List.filter (fun { sock } -> sock <> s) !parties; de_allocate party.tokens party; eventually_dequeue () let check_alive s = match CoqworkmgrApi.connect s with | Some s -> let cout = Unix.out_channel_of_descr s in set_binary_mode_out cout true; output_string cout (print_request (Hello Low)); flush cout; output_string cout (print_request Ping); flush cout; begin match Unix.select [s] [] [] 1.0 with | [s],_,_ -> let cin = Unix.in_channel_of_descr s in set_binary_mode_in cin true; begin match parse_response (input_line cin) with | Pong (n,m,pid) -> n, m, pid | _ -> raise Not_found end | _ -> raise Not_found end | _ -> raise Not_found let main () = let args = [ "-j",Arg.Set_int max_tokens, "max number of concurrent jobs"; "-d",Arg.Set debug, "do not detach (debug)"] in let usage = "Prints on stdout an env variable assignment to be picked up by coq\n"^ "instances in order to limit the maximum number of concurrent workers.\n"^ "The default value is 2.\n"^ "Usage:" in Arg.parse args (fun extra -> Arg.usage args ("Unexpected argument "^extra^".\n"^usage)) usage; try let sock = Sys.getenv "COQWORKMGR_SOCK" in if !debug then Printf.eprintf "Contacting %s\n%!" sock; let cur, max, pid = check_alive sock in Printf.printf "COQWORKMGR_SOCK=%s\n%!" sock; Printf.eprintf "coqworkmgr already up and running (pid=%d, socket=%s, j=%d/%d)\n%!" pid sock cur max; exit 0 with Not_found | Failure _ | Invalid_argument _ | Unix.Unix_error _ -> if !debug then Printf.eprintf "No running instance. Starting a new one\n%!"; let master, str = mk_socket_channel () in if not !debug then begin let pid = Unix.fork () in if pid <> 0 then begin Printf.printf "COQWORKMGR_SOCK=%s\n%!" str; exit 0 end else begin ignore(Unix.setsid ()); Unix.close Unix.stdin; Unix.close Unix.stdout; end; end else begin Printf.printf "COQWORKMGR_SOCK=%s\n%!" str; end; Sys.catch_break true; try while true do if !debug then Printf.eprintf "Status: #parties=%d tokens=%d/%d \n%!" (List.length !parties) !cur_tokens !max_tokens; let socks = master :: List.map (fun { sock } -> sock) !parties in let r, _, _ = Unix.select socks [] [] (-1.0) in List.iter (fun s -> if s = master then begin try parties := accept master :: !parties with _ -> () end else chat s) r done; exit 0 with Sys.Break -> if !parties <> [] then begin Printf.eprintf "Some coq processes still need me\n%!"; exit 1; end else exit 0 let () = main () coq-8.15.0/tools/dune000066400000000000000000000023431417001151100143760ustar00rootroot00000000000000(install (section lib) (package coq-core) (files (CoqMakefile.in as tools/CoqMakefile.in))) (install (section libexec) (package coq-core) (files (TimeFileMaker.py as tools/TimeFileMaker.py) (make-one-time-file.py as tools/make-one-time-file.py) (make-both-time-files.py as tools/make-both-time-files.py) (make-both-single-timing-files.py as tools/make-both-single-timing-files.py))) (executable (name coq_makefile) (public_name coq_makefile) (package coq-core) (modules coq_makefile) (libraries coq-core.boot coq-core.lib)) (executable (name coqworkmgr) (public_name coqworkmgr) (package coq-core) (modules coqworkmgr) (libraries coq-core.stm)) (executable (name coqdep) (public_name coqdep) (package coq-core) (modules coqdep_lexer coqdep_common coqdep) (libraries coq-core.boot coq-core.lib)) ; Bare-bones mllib/mlpack parser (executable (name ocamllibdep) (public_name ocamllibdep) (modules ocamllibdep) (package coq-core) (libraries unix)) (ocamllex coqdep_lexer ocamllibdep) (executable (name coqwc) (public_name coqwc) (package coq-core) (modules coqwc) (libraries)) (ocamllex coqwc) (executables (names coq_tex) (public_names coq-tex) (package coq-core) (modules coq_tex) (libraries str)) coq-8.15.0/tools/flock/000077500000000000000000000000001417001151100146145ustar00rootroot00000000000000coq-8.15.0/tools/flock/coq_flock.ml000066400000000000000000000037411417001151100171130ustar00rootroot00000000000000(* Simple OCaml version of flock *) let fail msg = Format.eprintf "coq_flock: %s@\n%!" msg; exit 1 (* operations for flock *) module Flock : sig type operation val lock_sh : operation val lock_ex : operation val lock_un : operation val flock : Unix.file_descr -> operation -> int end = struct type operation = int let lock_sh = 0 let lock_ex = 1 let lock_un = 2 external flock : Unix.file_descr -> int -> int = "coq_flock" end (* flock file command args, we parse the arguments and extract the command to be sent to Unix.create_process *) let parse_args () = let l = Array.length Sys.argv in if l < 3 then fail "wrong number of arguments, use 'coq_flock file command args'" else let file_lock = Sys.argv.(1) in let command = Sys.argv.(2) in let arguments = Array.make (l-2) "" in Array.set arguments 0 command; Array.blit Sys.argv 3 arguments 1 (l-3); file_lock, command, arguments let lock f = let fd = Unix.openfile f Unix.[O_CREAT;O_RDWR] 0o644 in let res = Flock.(flock fd lock_ex) in if res <> 0 then fail "flock syscall error!" else fd let unlock fid = let _res = Flock.(flock fid lock_un) in Unix.close fid let exec_program fid command args = let pid = Unix.create_process command args Unix.stdin Unix.stdout Unix.stderr in let _pid, status = Unix.waitpid [] pid in unlock fid; (* It is critical that we don't raise after this point, otherwise we will double-free the lock *) match status with | Unix.WEXITED 0 -> () | Unix.WSTOPPED _ -> () | Unix.WEXITED 127 -> (* per create_process documentation *) fail ("Couldn't execute program " ^ command) | Unix.WEXITED err -> exit err | Unix.WSIGNALED s -> Unix.kill (Unix.getpid ()) s let main () = let file_lock, command, args = parse_args () in let fid = lock file_lock in try exec_program fid command args with exn -> unlock fid; raise exn let _ = try main () with exn -> let msg = Printexc.to_string exn in fail msg coq-8.15.0/tools/flock/dune000066400000000000000000000001411417001151100154660ustar00rootroot00000000000000(executable (name coq_flock) (foreign_stubs (language c) (names flock)) (libraries unix)) coq-8.15.0/tools/flock/flock.c000066400000000000000000000014301417001151100160540ustar00rootroot00000000000000#include #include #define CAML_NAME_SPACE #include #include #include #include #include CAMLprim value coq_flock(value fd, value operation) { CAMLparam2 (fd, operation); int fd_i = Int_val(fd); int operation_i = Int_val(operation); // printf("stub_fd: %d ; op_id: %d", fd_i, operation_i); int operation_f = 0; switch (operation_i) { case 0: operation_f = LOCK_SH; break; case 1: operation_f = LOCK_EX; break; case 2: operation_f = LOCK_UN; break; default: caml_invalid_argument("Incorrect flock operation"); break; }; int res = flock(fd_i, operation_f); if (res != 0) { perror("flock: "); }; CAMLreturn ( Val_int(res) ); } coq-8.15.0/tools/make-both-single-timing-files.py000077500000000000000000000015051417001151100216070ustar00rootroot00000000000000#!/usr/bin/env python3 from TimeFileMaker import * if __name__ == '__main__': parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of two invocations of `coqc -time` into a sorted table''') add_sort_by(parser) add_user(parser, single_timing=True) add_fuzz(parser) add_after_file_name(parser) add_before_file_name(parser) add_output_file_name(parser) args = parser.parse_args() left_dict = get_single_file_times(args.AFTER_FILE_NAME, use_real=args.real) right_dict = get_single_file_times(args.BEFORE_FILE_NAME, use_real=args.real) left_dict, right_dict = adjust_fuzz(left_dict, right_dict, fuzz=args.fuzz) table = make_diff_table_string(left_dict, right_dict, tag="Code", sort_by=args.sort_by) print_or_write_table(table, args.OUTPUT_FILE_NAME) coq-8.15.0/tools/make-both-time-files.py000077500000000000000000000015561417001151100200050ustar00rootroot00000000000000#!/usr/bin/env python3 from TimeFileMaker import * if __name__ == '__main__': parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of two invocations of `make TIMED=1` into a sorted table.''') add_sort_by(parser) add_real(parser) add_include_mem(parser) add_sort_by_mem(parser) add_after_file_name(parser) add_before_file_name(parser) add_output_file_name(parser) args = parser.parse_args() left_dict = get_times_and_mems(args.AFTER_FILE_NAME, use_real=args.real, include_mem=args.include_mem) right_dict = get_times_and_mems(args.BEFORE_FILE_NAME, use_real=args.real, include_mem=args.include_mem) table = make_diff_table_string(left_dict, right_dict, sort_by=args.sort_by, include_mem=args.include_mem, sort_by_mem=args.sort_by_mem) print_or_write_table(table, args.OUTPUT_FILE_NAME) coq-8.15.0/tools/make-one-time-file.py000077500000000000000000000012171417001151100174410ustar00rootroot00000000000000#!/usr/bin/env python3 import sys from TimeFileMaker import * if __name__ == '__main__': parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of `make TIMED=1` into a sorted table.''') add_real(parser) add_file_name(parser) add_output_file_name(parser) add_include_mem(parser) add_sort_by_mem(parser) args = parser.parse_args() stats_dict = get_times_and_mems(args.FILE_NAME, use_real=args.real, include_mem=args.include_mem) table = make_table_string(stats_dict, include_mem=args.include_mem, sort_by_mem=args.sort_by_mem) print_or_write_table(table, args.OUTPUT_FILE_NAME) coq-8.15.0/tools/md5sum.ml000066400000000000000000000015641417001151100152700ustar00rootroot00000000000000let get_content file = let ic = open_in_bin file in let buf = Buffer.create 2048 in let rec fill () = match input_char ic with | '\r' -> fill () (* NOTE: handles the case on Windows where the git checkout has included return characters. See: https://github.com/coq/coq/pull/6305 *) | c -> Buffer.add_char buf c; fill () | exception End_of_file -> close_in ic; Buffer.contents buf in fill () let () = match Sys.argv with | [|_; file|] -> let content = get_content file in let md5 = Digest.to_hex (Digest.string content) in print_string (md5 ^ " " ^ file) | _ -> prerr_endline "Error: This program needs exactly one parameter."; prerr_endline "Usage: ocaml md5sum.ml "; prerr_endline "Print MD5 (128-bit) checksum of the file content modulo \\r."; exit 1 coq-8.15.0/tools/ocamllibdep.mll000066400000000000000000000207371417001151100165100ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Buffer.clear s'; for i = 0 to String.length s - 1 do let c = s.[i] in if c = ' ' || c = '#' || c = ':' (* separators and comments *) || c = '%' (* pattern *) || c = '?' || c = '[' || c = ']' || c = '*' (* expansion in filenames *) || i=0 && c = '~' && (String.length s = 1 || s.[1] = '/' || 'A' <= s.[1] && s.[1] <= 'Z' || 'a' <= s.[1] && s.[1] <= 'z') (* homedir expansion *) then begin let j = ref (i-1) in while !j >= 0 && s.[!j] = '\\' do Buffer.add_char s' '\\'; decr j (* escape all preceding '\' *) done; Buffer.add_char s' '\\'; end; if c = '$' then Buffer.add_char s' '$'; Buffer.add_char s' c done; Buffer.contents s' (* Filename.concat but always with a '/' *) let is_dir_sep s i = match Sys.os_type with | "Unix" -> s.[i] = '/' | "Cygwin" | "Win32" -> let c = s.[i] in c = '/' || c = '\\' || c = ':' | _ -> assert false let (//) dirname filename = let l = String.length dirname in if l = 0 || is_dir_sep dirname (l-1) then dirname ^ filename else dirname ^ "/" ^ filename (** [get_extension f l] checks whether [f] has one of the extensions listed in [l]. It returns [f] without its extension, alongside with the extension. When no extension match, [(f,"")] is returned *) let rec get_extension f = function | [] -> (f, "") | s :: _ when Filename.check_suffix f s -> (Filename.chop_suffix f s, s) | _ :: l -> get_extension f l let file_name s = function | None -> s | Some "." -> s | Some d -> d // s type dir = string option let add_directory add_file phys_dir = let open Unix in let files = Sys.readdir phys_dir in Array.iter (fun f -> (* we avoid all files starting by '.' *) if f.[0] <> '.' then let phys_f = if phys_dir = "." then f else phys_dir//f in match try (stat phys_f).st_kind with _ -> S_BLK with | S_REG -> add_file phys_dir f | _ -> ()) files let error_cannot_parse s (i,j) = Printf.eprintf "File \"%s\", characters %i-%i: Syntax error\n" s i j; exit 1 let error_unknown_extension name = Printf.eprintf "Don't know what to do with \"%s\"\n" name; Printf.eprintf "Usage: ocamllibdep [-I dir] [-c] [file.mllib] [file.mlpack]\n"; exit 1 let error_cannot_open msg = Format.eprintf "Error: @[%s@].@\n%!" msg; exit 1 let error_cannot_stat err name = Format.eprintf "Error: @[cannot stat %s (%s)@].@\n%!" name (Unix.error_message err); exit 1 let same_path_opt s s' = let nf s = (* ./foo/a.ml and foo/a.ml are the same file *) if Filename.is_implicit s then "." // s else s in let s = match s with None -> "." | Some s -> nf s in let s' = match s' with None -> "." | Some s' -> nf s' in s = s' let warning_ml_clash x s suff s' suff' = if suff = suff' && not (same_path_opt s s') then eprintf "*** Warning: %s%s already found in %s (discarding %s%s)\n" x suff (match s with None -> "." | Some d -> d) ((match s' with None -> "." | Some d -> d) // x) suff let mkknown () = let h = (Hashtbl.create 19 : (string, dir * string) Hashtbl.t) in let add x s suff = try let s',suff' = Hashtbl.find h x in warning_ml_clash x s' suff' s suff with Not_found -> Hashtbl.add h x (s,suff) and search x = try Some (fst (Hashtbl.find h x)) with Not_found -> None in add, search let add_ml_known, search_ml_known = mkknown () let add_mlpack_known, search_mlpack_known = mkknown () let mllibAccu = ref ([] : (string * dir) list) let mlpackAccu = ref ([] : (string * dir) list) let add_caml_known phys_dir f = let basename,suff = get_extension f [".ml";".mlg";".mlpack"] in match suff with | ".ml"|".mlg" -> add_ml_known basename (Some phys_dir) suff | ".mlpack" -> add_mlpack_known basename (Some phys_dir) suff | _ -> () let add_caml_dir phys_dir = try add_directory add_caml_known phys_dir with | Sys_error msg -> error_cannot_open msg | Unix.Unix_error (e, "stat", a) -> error_cannot_stat e a let treat_file_modules md ext = try let chan = open_in (md ^ ext) in let list = mllib_list (Lexing.from_channel chan) in List.fold_left (fun acc str -> match search_mlpack_known str with | Some mldir -> (file_name str mldir) :: acc | None -> match search_ml_known str with | Some mldir -> (file_name str mldir) :: acc | None -> acc) [] (List.rev list) with | Sys_error _ -> [] | Syntax_error (i,j) -> error_cannot_parse (md^ext) (i,j) let addQueue q v = q := v :: !q let treat_file old_name = let name = Filename.basename old_name in let dirname = Some (Filename.dirname old_name) in match get_extension name [".mllib";".mlpack"] with | (base,".mllib") -> addQueue mllibAccu (base,dirname) | (base,".mlpack") -> addQueue mlpackAccu (base,dirname) | _ -> error_unknown_extension old_name let mllib_dependencies () = List.iter (fun (name,dirname) -> let fullname = file_name name dirname in let deps = treat_file_modules fullname ".mllib" in let sdeps = String.concat " " deps in let efullname = escape fullname in printf "%s_MLLIB_DEPENDENCIES:=%s\n" efullname sdeps; printf "%s.cma:$(addsuffix .cmo,$(%s_MLLIB_DEPENDENCIES))\n" efullname efullname; printf "%s.cmxa:$(addsuffix .cmx,$(%s_MLLIB_DEPENDENCIES))\n" efullname efullname; flush stdout) (List.rev !mllibAccu) let coq_makefile_mode = ref false let print_for_pack modname d = if !coq_makefile_mode then printf "%s.cmx : FOR_PACK=-for-pack %s\n" d modname else printf "%s_FORPACK:= -for-pack %s\n" d modname let mlpack_dependencies () = List.iter (fun (name,dirname) -> let fullname = file_name name dirname in let modname = String.capitalize_ascii name in let deps = treat_file_modules fullname ".mlpack" in let sdeps = String.concat " " deps in let efullname = escape fullname in printf "%s_MLPACK_DEPENDENCIES:=%s\n" efullname sdeps; List.iter (print_for_pack modname) deps; printf "%s.cmo:$(addsuffix .cmo,$(%s_MLPACK_DEPENDENCIES))\n" efullname efullname; printf "%s.cmx:$(addsuffix .cmx,$(%s_MLPACK_DEPENDENCIES))\n" efullname efullname; flush stdout) (List.rev !mlpackAccu) let rec parse = function | "-c" :: r -> coq_makefile_mode := true; parse r | "-I" :: r :: ll -> (* To solve conflict (e.g. same filename in kernel and checker) we allow to state an explicit order *) add_caml_dir r; parse ll | f :: ll -> treat_file f; parse ll | [] -> () let main () = if Array.length Sys.argv < 2 then exit 1; parse (List.tl (Array.to_list Sys.argv)); mllib_dependencies (); mlpack_dependencies () let _ = Printexc.catch main () } coq-8.15.0/tools/update-require000077500000000000000000000042541417001151100164050ustar00rootroot00000000000000#!/bin/sh # This script fully qualifies all the 'Require' statements of the given # targets (or the current directory if none). # # It assumes that all the prerequisites are already installed. The # install location is found using the COQLIB, COQC, COQBIN variables if # set, 'coqc' otherwise. # # Option --exclude can be used to ignore a given user contribution. In # particular, it can be used to ignore the current set of files if it # happens to be already installed. # # Option --stdlib can be used to also qualify the files from the standard # library. if test ! "$COQLIB"; then if test ${COQBIN##*/}; then COQBIN=$COQBIN/; fi if test ! "$COQC"; then COQC=`which ${COQBIN}coqc`; fi COQLIB=`"$COQC" -where` fi stdlib=no exclude="" scan_dir () { (cd $1 ; find $3 -name '*.vo' | sed -e "s,^./,$2,;s,\([^./]*\)/,\1.,g;s,\([^.]*\).vo,\1,") } scan_all_dir () { if test $stdlib = yes; then scan_dir "$COQLIB/theories" "Coq." scan_dir "$COQLIB/plugins" "Coq." fi scan_dir "$COQLIB/user-contrib" "" "$exclude" } create_script () { echo "BEGIN {" scan_all_dir | while read file ; do echo $file | sed -e "s,\(.*\)[.]\([^.]*\), t[\"\2\"] = \"\1.\2\"," done cat <&2 exit 1;; *) dir="$dir $1";; esac shift done script=`tempfile` create_script > $script find $dir -name '*.v' | while read file; do mv $file $file.bak awk -f $script $file.bak > $file done coq-8.15.0/topbin/000077500000000000000000000000001417001151100136515ustar00rootroot00000000000000coq-8.15.0/topbin/coqc_bin.ml000066400000000000000000000013321417001151100157570ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit val default_root_prefix : DirPath.t val dirpath_of_string : string -> DirPath.t val locate_absolute_library : DirPath.t -> string end = struct let pr_dirpath dp = str (DirPath.to_string dp) let default_root_prefix = DirPath.empty let split_dirpath d = let l = DirPath.repr d in (DirPath.make (List.tl l), List.hd l) type logical_path = DirPath.t let load_paths = ref ([],[] : CUnix.physical_path list * logical_path list) let remove_load_path dir = let physical, logical = !load_paths in load_paths := List.filter2 (fun p d -> p <> dir) physical logical let add_load_path (phys_path,coq_path) = if CDebug.(get_flag misc) then Feedback.msg_notice (str "path: " ++ pr_dirpath coq_path ++ str " ->" ++ spc() ++ str phys_path); let phys_path = CUnix.canonical_path_name phys_path in let physical, logical = !load_paths in match List.filter2 (fun p d -> p = phys_path) physical logical with | _,[dir] -> if coq_path <> dir (* If this is not the default -I . to coqtop *) && not (phys_path = CUnix.canonical_path_name Filename.current_dir_name && coq_path = default_root_prefix) then begin (* Assume the user is concerned by library naming *) if dir <> default_root_prefix then Feedback.msg_warning (str phys_path ++ strbrk " was previously bound to " ++ pr_dirpath dir ++ strbrk "; it is remapped to " ++ pr_dirpath coq_path); remove_load_path phys_path; load_paths := (phys_path::fst !load_paths, coq_path::snd !load_paths) end | _,[] -> load_paths := (phys_path :: fst !load_paths, coq_path :: snd !load_paths) | _ -> anomaly (Pp.str ("Two logical paths are associated to "^phys_path^".")) let load_paths_of_dir_path dir = let physical, logical = !load_paths in fst (List.filter2 (fun p d -> d = dir) physical logical) let locate_absolute_library dir = (* Search in loadpath *) let pref, base = split_dirpath dir in let loadpath = load_paths_of_dir_path pref in if loadpath = [] then CErrors.user_err (str "No loadpath for " ++ DirPath.print pref); let name = Id.to_string base^".vo" in try let _, file = System.where_in_path ~warn:false loadpath name in file with Not_found -> CErrors.user_err (str "File " ++ str name ++ str " not found in loadpath") let dirpath_of_string s = match String.split_on_char '.' s with | [""] -> default_root_prefix | dir -> DirPath.make (List.rev_map Id.of_string dir) end module Library = struct type library_objects type compilation_unit_name = DirPath.t (* The [*_disk] types below must be kept in sync with the vo representation. *) type library_disk = { md_compiled : Safe_typing.compiled_library; md_objects : library_objects; } type summary_disk = { md_name : compilation_unit_name; md_deps : (compilation_unit_name * Safe_typing.vodigest) array; md_ocaml : string; } type library_t = { library_name : compilation_unit_name; library_file : string; library_data : Safe_typing.compiled_library; library_deps : (compilation_unit_name * Safe_typing.vodigest) array; library_digests : Safe_typing.vodigest; library_extra_univs : Univ.ContextSet.t; } let libraries_table : string DPmap.t ref = ref DPmap.empty let register_loaded_library senv libname file = let () = assert (not @@ DPmap.mem libname !libraries_table) in let () = libraries_table := DPmap.add libname file !libraries_table in let prefix = Nativecode.mod_uid_of_dirpath libname ^ "." in let () = Nativecode.register_native_file prefix in senv let mk_library sd f md digests univs = { library_name = sd.md_name; library_file = f; library_data = md; library_deps = sd.md_deps; library_digests = digests; library_extra_univs = univs; } let intern_from_file f = let ch = System.with_magic_number_check (fun file -> ObjFile.open_in ~file) f in let (lsd : summary_disk), digest_lsd = ObjFile.marshal_in_segment ch ~segment:"summary" in let ((lmd : library_disk), digest_lmd) = ObjFile.marshal_in_segment ch ~segment:"library" in let (univs : (Univ.ContextSet.t * bool) option), digest_u = ObjFile.marshal_in_segment ch ~segment:"universes" in ObjFile.close_in ch; System.check_caml_version ~caml:lsd.md_ocaml ~file:f; let open Safe_typing in match univs with | None -> mk_library lsd f lmd.md_compiled (Dvo_or_vi digest_lmd) Univ.ContextSet.empty | Some (uall,true) -> mk_library lsd f lmd.md_compiled (Dvivo (digest_lmd,digest_u)) uall | Some (_,false) -> mk_library lsd f lmd.md_compiled (Dvo_or_vi digest_lmd) Univ.ContextSet.empty let rec intern_library (needed, contents) dir = (* Look if already listed and consequently its dependencies too *) match DPmap.find dir contents with | lib -> lib.library_digests, (needed, contents) | exception Not_found -> let f = Loadpath.locate_absolute_library dir in let m = intern_from_file f in if not (DirPath.equal dir m.library_name) then user_err (str "The file " ++ str f ++ str " contains library" ++ spc () ++ DirPath.print m.library_name ++ spc () ++ str "and not library" ++ spc() ++ DirPath.print dir ++ str "."); let (needed, contents) = intern_library_deps (needed, contents) dir m f in m.library_digests, (dir :: needed, DPmap.add dir m contents) and intern_library_deps libs dir m from = Array.fold_left (intern_mandatory_library dir from) libs m.library_deps and intern_mandatory_library caller from libs (dir,d) = let digest, libs = intern_library libs dir in if not (Safe_typing.digest_match ~actual:digest ~required:d) then user_err (str "Compiled library " ++ DirPath.print caller ++ str " (in file " ++ str from ++ str ") makes inconsistent assumptions \ over library " ++ DirPath.print dir); libs let register_library senv m = let mp = MPfile m.library_name in let mp', senv = Safe_typing.import m.library_data m.library_extra_univs m.library_digests senv in let () = if not (ModPath.equal mp mp') then anomaly (Pp.str "Unexpected disk module name.") in register_loaded_library senv m.library_name m.library_file let save_library_to env dir f lib = let mp = MPfile dir in let ast = Nativelibrary.dump_library mp env lib in let fn = Filename.dirname f ^"/"^ Nativecode.mod_uid_of_dirpath dir in Nativelib.compile_library ast fn let get_used_load_paths () = String.Set.elements (DPmap.fold (fun m f acc -> String.Set.add (Filename.dirname f) acc) !libraries_table String.Set.empty) let _ = Nativelib.get_load_paths := get_used_load_paths end let add_path ~unix_path:dir ~coq_root:coq_dirpath = let open System in if exists_dir dir then begin Loadpath.add_load_path (dir,coq_dirpath) end else Feedback.msg_warning (str "Cannot open " ++ str dir) let convert_string d = try Id.of_string d with CErrors.UserError _ -> Flags.if_verbose Feedback.msg_warning (str "Directory " ++ str d ++ str " cannot be used as a Coq identifier (skipped)"); raise Exit let coq_root = Id.of_string "Coq" let add_rec_path ~unix_path ~coq_root = let open System in if exists_dir unix_path then let dirs = all_subdirs ~unix_path in let prefix = DirPath.repr coq_root in let convert_dirs (lp, cp) = try let path = List.rev_map convert_string cp @ prefix in Some (lp, Names.DirPath.make path) with Exit -> None in let dirs = List.map_filter convert_dirs dirs in List.iter Loadpath.add_load_path dirs; Loadpath.add_load_path (unix_path, coq_root) else Feedback.msg_warning (str "Cannot open " ++ str unix_path) let init_load_path_std () = let env = Boot.Env.init () in let stdlib = Boot.Env.stdlib env |> Boot.Path.to_string in let user_contrib = Boot.Env.user_contrib env |> Boot.Path.to_string in let xdg_dirs = Envars.xdg_dirs in let coqpath = Envars.coqpath in (* NOTE: These directories are searched from last to first *) (* first standard library *) add_rec_path ~unix_path:stdlib ~coq_root:(Names.DirPath.make[coq_root]); (* then user-contrib *) if Sys.file_exists user_contrib then add_rec_path ~unix_path:user_contrib ~coq_root:Loadpath.default_root_prefix; (* then directories in XDG_DATA_DIRS and XDG_DATA_HOME *) List.iter (fun s -> add_rec_path ~unix_path:s ~coq_root:Loadpath.default_root_prefix) (xdg_dirs ~warn:(fun x -> Feedback.msg_warning (str x))); (* then directories in COQPATH *) List.iter (fun s -> add_rec_path ~unix_path:s ~coq_root:Loadpath.default_root_prefix) coqpath let init_load_path ~boot ~vo_path = if not boot then init_load_path_std (); (* always add current directory *) add_path ~unix_path:"." ~coq_root:Loadpath.default_root_prefix; (* additional loadpath, given with -R/-Q options *) List.iter (fun (unix_path, coq_root) -> add_rec_path ~unix_path ~coq_root) (List.rev vo_path) let fb_handler = function | Feedback.{ contents; _ } -> match contents with | Feedback.Message(_lvl,_loc,msg)-> Format.printf "%s@\n%!" Pp.(string_of_ppcmds msg) | _ -> () let init_coq () = let senv = Safe_typing.empty_environment in let () = Flags.set_native_compiler true in let senv = Safe_typing.set_native_compiler true senv in let () = Safe_typing.allow_delayed_constants := false in let dummy = Names.DirPath.make [Names.Id.of_string_soft "@native"] in let _, senv = Safe_typing.start_library dummy senv in senv let compile senv ~in_file = let lib = Library.intern_from_file in_file in let dir = lib.Library.library_name in (* Require the dependencies **only once** *) let deps, contents = Library.intern_library_deps ([], DPmap.empty) dir lib in_file in let fold senv dep = Library.register_library senv (DPmap.find dep contents) in let senv = List.fold_left fold senv (List.rev deps) in (* Extract the native code and compile it *) let modl = (Safe_typing.module_of_library lib.Library.library_data).Declarations.mod_type in let out_vo = Filename.(remove_extension in_file) ^ ".vo" in Library.save_library_to (Safe_typing.env_of_safe_env senv) dir out_vo modl module Usage : sig val usage : unit -> 'a end = struct let print_usage_channel co command = output_string co command; output_string co "coqnative options are:\n"; output_string co " -Q dir coqdir map physical dir to logical coqdir\ \n -R dir coqdir synonymous for -Q\ \n\ \n\ \n -boot boot mode\ \n -coqlib dir set coqnative's standard library location\ \n -native-output-dir set the output directory for native objects\ \n -nI dir OCaml include directories for the native compiler (default if not set) \ \n\ \n -h, --help print this list of options\ \n" (* print the usage on standard error *) let print_usage = print_usage_channel stderr let print_usage_coqnative () = print_usage "Usage: coqnative file\n\n" let usage () = print_usage_coqnative (); flush stderr; exit 1 end type opts = { boot : bool; vo_path : (string * DirPath.t) list; ml_path : string list; } let rec parse_args (args : string list) accu = match args with | [] -> CErrors.user_err (Pp.str "parse args error: missing argument") | "-boot" :: rem -> parse_args rem { accu with boot = true} (* We ignore as we don't require Prelude explicitly *) | "-noinit" :: rem -> parse_args rem accu | ("-Q" | "-R") :: d :: p :: rem -> let p = Loadpath.dirpath_of_string p in let accu = { accu with vo_path = (d, p) :: accu.vo_path } in parse_args rem accu | "-I" :: _d :: rem -> (* Ignore *) parse_args rem accu | "-nI" :: dir :: rem -> let accu = { accu with ml_path = dir :: accu.ml_path } in parse_args rem accu |"-native-output-dir" :: dir :: rem -> Nativelib.output_dir := dir; parse_args rem accu | "-coqlib" :: s :: rem -> if not (Minisys.exists_dir s) then fatal_error (str "Directory '" ++ str s ++ str "' does not exist") false; Boot.Env.set_coqlib s; parse_args rem accu | ("-?"|"-h"|"-H"|"-help"|"--help") :: _ -> Usage.usage () | [file] -> accu, file | args -> let args_msg = String.concat " " args in CErrors.user_err Pp.(str "parse args error, too many arguments: " ++ str args_msg) let () = let _ = Feedback.add_feeder fb_handler in try let opts = { boot = false; vo_path = []; ml_path = [] } in let opts, in_file = parse_args (List.tl @@ Array.to_list Sys.argv) opts in let () = init_load_path ~boot:opts.boot ~vo_path:(List.rev opts.vo_path) in let () = Nativelib.include_dirs := List.rev opts.ml_path in let senv = init_coq () in compile senv ~in_file with exn -> Format.eprintf "Error: @[%a@]@\n%!" Pp.pp_with (CErrors.print exn); let exit_code = if (CErrors.is_anomaly exn) then 129 else 1 in exit exit_code coq-8.15.0/topbin/coqproofworker_bin.ml000066400000000000000000000014571417001151100201240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Some (Pp.str (Format.asprintf "%a" Symtable.report_error e)) | _ -> None ) (* XXX: Remove this for Toploop.get_directive once we bump the OCaml version, unfortunately 4.05 still doesn't have it, and 4.13 deprecated directive_table *) let _get_directive name = let dt = Toploop.directive_table [@ocaml.warning "-3"] in Hashtbl.find_opt dt name (* XXX: 4.13 deprecated Topdirs.load_file in favor of Toopload.load_file, however that last function was introduced in 4.12 so it seems... *) let _load_file = Topdirs.load_file [@ocaml.warning "-3"] let drop_setup () = begin (* Enable rectypes in the toplevel if it has the directive #rectypes *) match _get_directive "rectypes" with | None -> () | Some (Toploop.Directive_none f) -> f () | Some _ -> Format.eprintf "Warning: rectypes directive has changed!!@\n%!" end; let ppf = Format.std_formatter in Mltop.(set_top { load_obj = (fun f -> if not (_load_file ppf f) then CErrors.user_err Pp.(str ("Could not load plugin "^f)) ); add_dir = Topdirs.dir_directory; ml_loop = (fun () -> Toploop.loop ppf); }) (* Main coqtop initialization *) let _ = drop_setup (); Coqtop.(start_coq coqtop_toplevel) coq-8.15.0/topbin/dune000066400000000000000000000025721417001151100145350ustar00rootroot00000000000000(install (section bin) (package coq-core) (files (coqtop_bin.exe as coqtop))) (executable (name coqtop_bin) (public_name coqtop.opt) (package coq-core) (modules coqtop_bin) (libraries coq-core.toplevel) (link_flags -linkall)) (executable (name coqtop_byte_bin) (public_name coqtop.byte) (package coq-core) (modules coqtop_byte_bin) (libraries compiler-libs.toplevel coq-core.toplevel) (modes byte) (link_flags -linkall)) (executable (name coqc_bin) (public_name coqc) (package coq-core) (modules coqc_bin) (libraries coq-core.toplevel) (modes native byte) ; Adding -ccopt -flto to links options could be interesting, however, ; it doesn't work on Windows (link_flags -linkall)) (install (section bin) (package coq-core) (files (coqc_bin.bc as coqc.byte))) (executable (name coqnative_bin) (public_name coqnative) (package coq-core) (modules coqnative_bin) (libraries coq-core.kernel) (link_flags -linkall)) ; Workers (executables (names coqqueryworker_bin coqtacticworker_bin coqproofworker_bin) (public_names coqqueryworker.opt coqtacticworker.opt coqproofworker.opt) (package coq-core) (modules :standard \ coqtop_byte_bin coqtop_bin coqc_bin coqnative_bin) (libraries coq-core.toplevel) (link_flags -linkall)) ; Workers installed targets (alias (name topworkers) (deps %{bin:coqqueryworker.opt} %{bin:coqtacticworker.opt} %{bin:coqproofworker.opt})) coq-8.15.0/toplevel/000077500000000000000000000000001417001151100142105ustar00rootroot00000000000000coq-8.15.0/toplevel/ccompile.ml000066400000000000000000000245151417001151100163440ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Coqrc.load_rcfile ~rcfile:opts.config.rcfile ~state) () else begin Flags.if_verbose Feedback.msg_info (str"Skipping rcfile loading."); state end let load_vernacular opts ~state = List.fold_left (fun state (f_in, echo) -> let s = Loadpath.locate_file f_in in (* Should make the beautify logic clearer *) let load_vernac f = Vernac.load_vernac ~echo ~interactive:false ~check:true ~state f in if !Flags.beautify then Flags.with_option Flags.beautify_file load_vernac f_in else load_vernac s ) state opts.pre.load_vernacular_list let load_init_vernaculars opts ~state = let state = load_init_file opts ~state in let state = load_vernacular opts ~state in state (******************************************************************************) (* File Compilation *) (******************************************************************************) let warn_file_no_extension = CWarnings.create ~name:"file-no-extension" ~category:"filesystem" (fun (f,ext) -> str "File \"" ++ str f ++ strbrk "\" has been implicitly expanded to \"" ++ str f ++ str ext ++ str "\"") let ensure_ext ext f = if Filename.check_suffix f ext then f else begin warn_file_no_extension (f,ext); f ^ ext end let chop_extension f = try Filename.chop_extension f with _ -> f let ensure_bname src tgt = let src, tgt = Filename.basename src, Filename.basename tgt in let src, tgt = chop_extension src, chop_extension tgt in if src <> tgt then fatal_error (str "Source and target file names must coincide, directories can differ" ++ fnl () ++ str "Source: " ++ str src ++ fnl () ++ str "Target: " ++ str tgt) let ensure ext src tgt = ensure_bname src tgt; ensure_ext ext tgt let ensure_exists f = if not (Sys.file_exists f) then fatal_error (hov 0 (str "Can't find file" ++ spc () ++ str f)) let ensure_exists_with_prefix f_in f_out src_suffix tgt_suffix = let long_f_dot_src = ensure src_suffix f_in f_in in ensure_exists long_f_dot_src; let long_f_dot_tgt = match f_out with | None -> chop_extension long_f_dot_src ^ tgt_suffix | Some f -> ensure tgt_suffix long_f_dot_src f in long_f_dot_src, long_f_dot_tgt let create_empty_file filename = let f = open_out filename in close_out f let check_pending_proofs filename = let pfs = Vernacstate.Declare.get_all_proof_names () [@ocaml.warning "-3"] in if not (CList.is_empty pfs) then fatal_error (str "There are pending proofs in file " ++ str filename ++ str": " ++ (pfs |> List.rev |> prlist_with_sep pr_comma Names.Id.print) ++ str "."); let pm = Vernacstate.Declare.get_program () [@ocaml.warning "-3"] in let what_for = Pp.str ("file " ^ filename) in NeList.iter (fun pm -> Declare.Obls.check_solved_obligations ~what_for ~pm) pm (* Compile a vernac file *) let compile opts stm_options injections copts ~echo ~f_in ~f_out = let open Vernac.State in let output_native_objects = match opts.config.native_compiler with | NativeOff -> false | NativeOn {ondemand} -> not ondemand in let mode = copts.compilation_mode in let ext_in, ext_out = match mode with | BuildVo -> ".v", ".vo" | BuildVio -> ".v", ".vio" | Vio2Vo -> ".vio", ".vo" | BuildVos -> ".v", ".vos" | BuildVok -> ".v", ".vok" in let long_f_dot_in, long_f_dot_out = ensure_exists_with_prefix f_in f_out ext_in ext_out in let dump_empty_vos () = (* Produce an empty .vos file, as a way to ensure that a stale .vos can never be loaded *) let long_f_dot_vos = (chop_extension long_f_dot_out) ^ ".vos" in create_empty_file long_f_dot_vos in match mode with | BuildVo | BuildVok -> let doc, sid = Topfmt.(in_phase ~phase:LoadingPrelude) Stm.new_doc Stm.{ doc_type = VoDoc long_f_dot_out; injections; stm_options; } in let state = { doc; sid; proof = None; time = opts.config.time } in let state = load_init_vernaculars opts ~state in let ldir = Stm.get_ldir ~doc:state.doc in Aux_file.(start_aux_file ~aux_file:(aux_file_name_for long_f_dot_out) ~v_file:long_f_dot_in); Dumpglob.push_output copts.glob_out; Dumpglob.start_dump_glob ~vfile:long_f_dot_in ~vofile:long_f_dot_out; Dumpglob.dump_string ("F" ^ Names.DirPath.to_string ldir ^ "\n"); let wall_clock1 = Unix.gettimeofday () in let check = Stm.AsyncOpts.(stm_options.async_proofs_mode = APoff) in let state = Vernac.load_vernac ~echo ~check ~interactive:false ~state ~ldir long_f_dot_in in let _doc = Stm.join ~doc:state.doc in let wall_clock2 = Unix.gettimeofday () in check_pending_proofs long_f_dot_in; (* In .vo production, dump a complete .vo file. In .vok production, only dump an empty .vok file. *) if mode = BuildVo then Library.save_library_to ~output_native_objects Library.ProofsTodoNone ldir long_f_dot_out; Aux_file.record_in_aux_at "vo_compile_time" (Printf.sprintf "%.3f" (wall_clock2 -. wall_clock1)); Aux_file.stop_aux_file (); (* In .vo production, dump an empty .vos file to indicate that the .vo should be loaded, and dump an empty .vok file to indicate that proofs are ok. *) if mode = BuildVo then begin dump_empty_vos(); create_empty_file (long_f_dot_out ^ "k"); end; Dumpglob.end_dump_glob () | BuildVio | BuildVos -> (* We need to disable error resiliency, otherwise some errors will be ignored in batch mode. c.f. #6707 This is not necessary in the vo case as it fully checks the document anyways. *) let stm_options = let open Stm.AsyncOpts in { stm_options with async_proofs_mode = APon; async_proofs_n_workers = 0; async_proofs_cmd_error_resilience = false; async_proofs_tac_error_resilience = FNone; } in let doc, sid = Topfmt.(in_phase ~phase:LoadingPrelude) Stm.new_doc Stm.{ doc_type = VioDoc long_f_dot_out; injections; stm_options; } in let state = { doc; sid; proof = None; time = opts.config.time } in let state = load_init_vernaculars opts ~state in let ldir = Stm.get_ldir ~doc:state.doc in let state = Vernac.load_vernac ~echo ~check:false ~interactive:false ~state long_f_dot_in in let doc = Stm.finish ~doc:state.doc in check_pending_proofs long_f_dot_in; let create_vos = (mode = BuildVos) in (* In .vos production, the output .vos file contains compiled statements. In .vio production, the output .vio file contains compiled statements and suspended proofs. *) let () = ignore (Stm.snapshot_vio ~create_vos ~doc ~output_native_objects ldir long_f_dot_out) in Stm.reset_task_queue (); (* In .vio production, dump an empty .vos file to indicate that the .vio should be loaded. *) if mode = BuildVio then dump_empty_vos() | Vio2Vo -> Flags.async_proofs_worker_id := "Vio2Vo"; let sum, lib, univs, tasks, proofs = Library.load_library_todo long_f_dot_in in let univs, proofs = Stm.finish_tasks long_f_dot_out univs proofs tasks in Library.save_library_raw long_f_dot_out sum lib univs proofs; (* Like in direct .vo production, dump an empty .vok file and an empty .vos file. *) dump_empty_vos(); create_empty_file (long_f_dot_out ^ "k") let compile opts stm_opts copts injections ~echo ~f_in ~f_out = ignore(CoqworkmgrApi.get 1); compile opts stm_opts injections copts ~echo ~f_in ~f_out; CoqworkmgrApi.giveback 1 let compile_file opts stm_opts copts injections (f_in, echo) = let f_out = copts.compilation_output_name in if !Flags.beautify then Flags.with_option Flags.beautify_file (fun f_in -> compile opts stm_opts copts injections ~echo ~f_in ~f_out) f_in else compile opts stm_opts copts injections ~echo ~f_in ~f_out let compile_file opts stm_opts copts injections = Option.iter (compile_file opts stm_opts copts injections) copts.compile_file (******************************************************************************) (* VIO Dispatching *) (******************************************************************************) let check_vio_tasks copts = Flags.async_proofs_worker_id := "VioChecking"; let rc = List.fold_left (fun acc (n,f) -> let f_in = ensure ".vio" f f in ensure_exists f_in; Vio_checking.check_vio (n,f_in) && acc) true copts.vio_tasks in if not rc then fatal_error Pp.(str "VIO Task Check failed") (* vio files *) let schedule_vio copts = let l = List.map (fun f -> let f_in = ensure ".vio" f f in ensure_exists f_in; f_in) copts.vio_files in if copts.vio_checking then Vio_checking.schedule_vio_checking copts.vio_files_j l else Vio_checking.schedule_vio_compilation copts.vio_files_j l let do_vio opts copts _injections = (* Vio compile pass *) if copts.vio_files <> [] then schedule_vio copts; (* Vio task pass *) if copts.vio_tasks <> [] then check_vio_tasks copts coq-8.15.0/toplevel/ccompile.mli000066400000000000000000000021711417001151100165070ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* state:Vernac.State.t-> Vernac.State.t (** [compile_file opts] compile file specified in [opts] *) val compile_file : Coqargs.t -> Stm.AsyncOpts.stm_opt -> Coqcargs.t -> Coqargs.injection_command list -> unit (** [do_vio opts] process [.vio] files in [opts] *) val do_vio : Coqargs.t -> Coqcargs.t -> Coqargs.injection_command list -> unit coq-8.15.0/toplevel/coqc.ml000066400000000000000000000072041417001151100154720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* flush_all(); Topfmt.print_err_exn exn; flush_all(); let exit_code = if (CErrors.is_anomaly exn) then 129 else 1 in exit exit_code let custom_coqc : ((Coqcargs.t * Coqtop.color) * Stm.AsyncOpts.stm_opt, 'b) Coqtop.custom_toplevel = Coqtop.{ parse_extra = (fun extras -> let color_mode, extras = Coqtop.parse_extra_colors extras in let stm_opts, extras = Stmargs.parse_args ~init:Stm.AsyncOpts.default_opts extras in let coqc_opts = Coqcargs.parse extras in ((coqc_opts, color_mode), stm_opts), []); usage = coqc_specific_usage; init_extra = coqc_init; run = coqc_run; initial_args = Coqargs.default; } let main () = Coqtop.start_coq custom_coqc coq-8.15.0/toplevel/coqc.mli000066400000000000000000000012741417001151100156440ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit coq-8.15.0/toplevel/coqcargs.ml000066400000000000000000000154641417001151100163560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 0 && s.[0] = '-' let add_compile ?echo copts s = if is_dash_argument s then arg_error Pp.(str "Unknown option " ++ str s); (* make the file name explicit; needed not to break up Coq loadpath stuff. *) let echo = Option.default copts.echo echo in let s = let open Filename in if is_implicit s then concat current_dir_name s else s in { copts with compile_file = Some (s,echo) } let add_compile ?echo copts v_file = match copts.compile_file with | Some _ -> arg_error Pp.(str "More than one file to compile: " ++ str v_file) | None -> add_compile ?echo copts v_file let add_vio_task opts f = { opts with vio_tasks = f :: opts.vio_tasks } let add_vio_file opts f = { opts with vio_files = f :: opts.vio_files } let set_vio_checking_j opts opt j = try { opts with vio_files_j = int_of_string j } with Failure _ -> prerr_endline ("The first argument of " ^ opt ^ " must the number"); prerr_endline "of concurrent workers to be used (a positive integer)."; prerr_endline "Makefiles generated by coq_makefile should be called"; prerr_endline "setting the J variable like in 'make vio2vo J=3'"; exit 1 let set_compilation_mode opts mode = match opts.compilation_mode with | BuildVo -> { opts with compilation_mode = mode } | mode' when mode <> mode' -> prerr_endline "Options -vio and -vio2vo are exclusive"; exit 1 | _ -> opts let get_task_list s = List.map (fun s -> try int_of_string s with Failure _ -> prerr_endline "Option -check-vio-tasks expects a comma-separated list"; prerr_endline "of integers followed by a list of files"; exit 1) (Str.split (Str.regexp ",") s) let is_not_dash_option = function | Some f when String.length f > 0 && f.[0] <> '-' -> true | _ -> false let rec add_vio_args peek next oval = if is_not_dash_option (peek ()) then let oval = add_vio_file oval (next ()) in add_vio_args peek next oval else oval let warn_deprecated_quick = CWarnings.create ~name:"deprecated-quick" ~category:"deprecated" (fun () -> Pp.strbrk "The -quick option is renamed -vio. Please consider using the -vos feature instead.") let parse arglist : t = let echo = ref false in let args = ref arglist in let extras = ref [] in let rec parse (oval : t) = match !args with | [] -> (oval, List.rev !extras) | opt :: rem -> args := rem; let next () = match !args with | x::rem -> args := rem; x | [] -> error_missing_arg opt in let peek_next () = match !args with | x::_ -> Some x | [] -> None in let noval : t = begin match opt with (* Deprecated options *) | "-opt" | "-byte" as opt -> depr opt; oval | "-image" as opt -> depr opt; let _ = next () in oval (* Non deprecated options *) | "-output-context" -> { oval with output_context = true } (* Verbose == echo mode *) | "-verbose" -> echo := true; oval (* Output filename *) | "-o" -> { oval with compilation_output_name = Some (next ()) } | "-quick" -> warn_deprecated_quick(); set_compilation_mode oval BuildVio | "-vio" -> set_compilation_mode oval BuildVio |"-vos" -> Flags.load_vos_libraries := true; { oval with compilation_mode = BuildVos } |"-vok" -> Flags.load_vos_libraries := true; { oval with compilation_mode = BuildVok } | "-check-vio-tasks" -> let tno = get_task_list (next ()) in let tfile = next () in add_vio_task oval (tno,tfile) | "-schedule-vio-checking" -> let oval = { oval with vio_checking = true } in let oval = set_vio_checking_j oval opt (next ()) in let oval = add_vio_file oval (next ()) in add_vio_args peek_next next oval | "-schedule-vio2vo" -> let oval = set_vio_checking_j oval opt (next ()) in let oval = add_vio_file oval (next ()) in add_vio_args peek_next next oval | "-vio2vo" -> let oval = add_compile ~echo:false oval (next ()) in set_compilation_mode oval Vio2Vo (* Glob options *) |"-no-glob" | "-noglob" -> { oval with glob_out = Dumpglob.NoGlob } |"-dump-glob" -> let file = next () in { oval with glob_out = Dumpglob.File file } (* Rest *) | s -> extras := s :: !extras; oval end in parse noval in try let opts, extra = parse default in let args = List.fold_left add_compile opts extra in args with any -> fatal_error any let parse args = let opts = parse args in { opts with vio_tasks = List.rev opts.vio_tasks ; vio_files = List.rev opts.vio_files } coq-8.15.0/toplevel/coqcargs.mli000066400000000000000000000036151417001151100165220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t coq-8.15.0/toplevel/coqloop.ml000066400000000000000000000442371417001151100162300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string; mutable str : Bytes.t; (* buffer of already read characters *) mutable len : int; (* number of chars in the buffer *) mutable bols : int list; (* offsets in str of beginning of lines *) mutable tokens : Pcoq.Parsable.t; (* stream of tokens *) mutable start : int } (* stream count of the first char of the buffer *) (* Double the size of the buffer. *) let resize_buffer ibuf = let open Bytes in let nstr = create (2 * length ibuf.str + 1) in blit ibuf.str 0 nstr 0 (length ibuf.str); ibuf.str <- nstr (* Delete all irrelevant lines of the input buffer. Keep the last line in the buffer (useful when there are several commands on the same line). *) let resynch_buffer ibuf = match ibuf.bols with | ll::_ -> let new_len = ibuf.len - ll in Bytes.blit ibuf.str ll ibuf.str 0 new_len; ibuf.len <- new_len; ibuf.bols <- []; ibuf.start <- ibuf.start + ll | _ -> () (* emacs special prompt tag for easy detection. No special character, to avoid interfering with utf8. Compatibility code removed. *) let emacs_prompt_startstring () = if !print_emacs then "" else "" let emacs_prompt_endstring () = if !print_emacs then "" else "" (* Read a char in an input channel, displaying a prompt at every beginning of line. *) let prompt_char doc ic ibuf count = let bol = match ibuf.bols with | ll::_ -> Int.equal ibuf.len ll | [] -> Int.equal ibuf.len 0 in if bol && not !print_emacs then top_stderr (str (ibuf.prompt doc)); try let c = input_char ic in if c == '\n' then ibuf.bols <- (ibuf.len+1) :: ibuf.bols; if ibuf.len == Bytes.length ibuf.str then resize_buffer ibuf; Bytes.set ibuf.str ibuf.len c; ibuf.len <- ibuf.len + 1; Some c with End_of_file -> None (* Reinitialize the char stream (after a Drop) *) let reset_input_buffer doc ic ibuf = ibuf.str <- Bytes.empty; ibuf.len <- 0; ibuf.bols <- []; ibuf.tokens <- Pcoq.Parsable.make (Stream.from (prompt_char doc ic ibuf)); ibuf.start <- 0 (* Functions to print underlined locations from an input buffer. *) module TopErr = struct (* Given a location, returns the list of locations of each line. The last line is returned separately. It also checks the location bounds. *) let get_bols_of_loc ibuf (bp,ep) = let add_line (b,e) lines = if b < 0 || e < b then CErrors.anomaly (Pp.str "Bad location."); match lines with | ([],None) -> ([], Some (b,e)) | (fl,oe) -> ((b,e)::fl, oe) in let rec lines_rec ba after = function | [] -> add_line (0,ba) after | ll::_ when ll <= bp -> add_line (ll,ba) after | ll::fl -> let nafter = if ll < ep then add_line (ll,ba) after else after in lines_rec ll nafter fl in let (fl,ll) = lines_rec ibuf.len ([],None) ibuf.bols in (fl,Option.get ll) let dotted_location (b,e) = if e-b < 3 then ("", String.make (e-b) ' ') else (String.make (e-b-1) '.', " ") let blanch_utf8_string s bp ep = let open Bytes in let s' = make (ep-bp) ' ' in let j = ref 0 in for i = bp to ep - 1 do let n = Char.code (get s i) in (* Heuristic: assume utf-8 chars are printed using a single fixed-size char and therefore contract all utf-8 code into one space; in any case, preserve tabulation so that its effective interpretation in terms of spacing is preserved *) if get s i == '\t' then set s' !j '\t'; if n < 0x80 || 0xC0 <= n then incr j done; Bytes.sub_string s' 0 !j let adjust_loc_buf ib loc = let open Loc in { loc with ep = loc.ep - ib.start; bp = loc.bp - ib.start } let print_highlight_location ib loc = let (bp,ep) = Loc.unloc loc in let highlight_lines = match get_bols_of_loc ib (bp,ep) with | ([],(bl,el)) -> let shift = blanch_utf8_string ib.str bl bp in let span = String.length (blanch_utf8_string ib.str bp ep) in (str"> " ++ str(Bytes.sub_string ib.str bl (el-bl-1)) ++ fnl () ++ str"> " ++ str(shift) ++ str(String.make span '^')) | ((b1,e1)::ml,(bn,en)) -> let (d1,s1) = dotted_location (b1,bp) in let (dn,sn) = dotted_location (ep,en) in let l1 = (str"> " ++ str d1 ++ str s1 ++ str(Bytes.sub_string ib.str bp (e1-bp))) in let li = prlist (fun (bi,ei) -> (str"> " ++ str(Bytes.sub_string ib.str bi (ei-bi)))) ml in let ln = (str"> " ++ str(Bytes.sub_string ib.str bn (ep-bn)) ++ str sn ++ str dn) in (l1 ++ li ++ ln) in highlight_lines let valid_buffer_loc ib loc = let (b,e) = Loc.unloc loc in b-ib.start >= 0 && e-ib.start < ib.len && b<=e (* Toplevel error explanation. *) let error_info_for_buffer ?loc buf = match loc with | None -> Topfmt.pr_phase ?loc () | Some loc -> let fname = loc.Loc.fname in (* We are in the toplevel *) match fname with | Loc.ToplevelInput -> let nloc = adjust_loc_buf buf loc in if valid_buffer_loc buf loc then match Topfmt.pr_phase ~loc:nloc () with | None -> None | Some hd -> Some (hd ++ fnl () ++ print_highlight_location buf nloc) (* in the toplevel, but not a valid buffer *) else Topfmt.pr_phase ~loc () (* we are in batch mode, don't adjust location *) | Loc.InFile _ -> Topfmt.pr_phase ~loc () (* Actual printing routine *) let print_error_for_buffer ?loc lvl msg buf = let pre_hdr = error_info_for_buffer ?loc buf in if !print_emacs then Topfmt.emacs_logger ?pre_hdr lvl msg else Topfmt.std_logger ?pre_hdr lvl msg (* let print_toplevel_parse_error (e, info) buf = let loc = Loc.get_loc info in let lvl = Feedback.Error in let msg = CErrors.iprint (e, info) in print_error_for_buffer ?loc lvl msg buf *) end (*s The Coq prompt is the name of the focused proof, if any, and "Coq" otherwise. We trap all exceptions to prevent the error message printing from cycling. *) let make_prompt () = try (Names.Id.to_string (Vernacstate.Declare.get_current_proof_name ())) ^ " < " with Vernacstate.Declare.NoCurrentProof -> "Coq < " [@@ocaml.warning "-3"] (* the coq prompt added to the default one when in emacs mode The prompt contains the current state label [n] (for global backtracking) and the current proof state [p] (for proof backtracking) plus the list of open (nested) proofs (for proof aborting when backtracking). It looks like: "n |lem1|lem2|lem3| p < " *) let make_emacs_prompt doc = let statnum = Stateid.to_string (Stm.get_current_state ~doc) in let dpth = Stm.current_proof_depth ~doc in let pending = Stm.get_all_proof_names ~doc in let pendingprompt = List.fold_left (fun acc x -> acc ^ (if CString.is_empty acc then "" else "|") ^ Names.Id.to_string x) "" pending in let proof_info = if dpth >= 0 then string_of_int dpth else "0" in if !print_emacs then statnum ^ " |" ^ pendingprompt ^ "| " ^ proof_info ^ " < " else "" (* A buffer to store the current command read on stdin. It is * initialized when a vernac command is immediately followed by "\n", * or after a Drop. *) let top_buffer = let pr doc = emacs_prompt_startstring() ^ make_prompt() ^ make_emacs_prompt doc ^ emacs_prompt_endstring() in { prompt = pr; str = Bytes.empty; len = 0; bols = []; tokens = Pcoq.Parsable.make (Stream.of_list []); start = 0 } let set_prompt prompt = top_buffer.prompt <- (fun doc -> emacs_prompt_startstring() ^ prompt () ^ emacs_prompt_endstring()) (* Read the input stream until a dot is encountered *) let parse_to_dot = let rec dot st = match LStream.next st with | Tok.KEYWORD ("."|"...") -> () | Tok.EOI -> () | _ -> dot st in Pcoq.Entry.(of_parser "Coqtoplevel.dot" { parser_fun = dot }) (* If an error occurred while parsing, we try to read the input until a dot token is encountered. We assume that when a lexer error occurs, at least one char was eaten *) let rec discard_to_dot () = try Pcoq.Entry.parse parse_to_dot top_buffer.tokens with | CLexer.Error.E _ -> (* Lexer failed *) discard_to_dot () | e when CErrors.noncritical e -> () let read_sentence ~state input = (* XXX: careful with ignoring the state Eugene!*) let open Vernac.State in try Stm.parse_sentence ~doc:state.doc state.sid ~entry:G_toplevel.vernac_toplevel input with reraise -> let reraise = Exninfo.capture reraise in (* When typing Ctrl-C, two situations may arise: - if a lexer/parsing arrived first, the rest of the ill-formed sentence needs to be discarded, and, if Ctrl-C is found while trying to discarding (in discard_to_dot), let it bypass the reporting of the parsing error and report the Sys.Break instead. - if a Ctrl-C arrives after a valid start of sentence, do not discard_to_dot since Ctrl-C is the last read character and there is nothing left to discard. *) (match fst reraise with | Sys.Break -> Pp.pp_with !Topfmt.err_ft (Pp.fnl ()) | _ -> try discard_to_dot () with Sys.Break -> Pp.pp_with !Topfmt.err_ft (Pp.fnl ()); raise Sys.Break); (* The caller of read_sentence does the error printing now, this should be re-enabled once we rely on the feedback error printer again *) (* TopErr.print_toplevel_parse_error reraise top_buffer; *) Exninfo.iraise reraise let extract_default_loc loc doc_id sid : Loc.t option = match loc with | Some _ -> loc | None -> try let doc = Stm.get_doc doc_id in Option.cata (fun {CAst.loc} -> loc) None Stm.(get_ast ~doc sid) with _ -> loc (** Coqloop Console feedback handler *) let coqloop_feed (fb : Feedback.feedback) = let open Feedback in match fb.contents with | Processed -> () | Incomplete -> () | Complete -> () | ProcessingIn _ -> () | InProgress _ -> () | WorkerStatus (_,_) -> () | AddedAxiom -> () | GlobRef (_,_,_,_,_) -> () | GlobDef (_,_,_,_) -> () | FileDependency (_,_) -> () | FileLoaded (_,_) -> () | Custom (_,_,_) -> () (* Re-enable when we switch back to feedback-based error printing *) | Message (Error,loc,msg) -> () (* TopErr.print_error_for_buffer ?loc lvl msg top_buffer *) | Message (Warning,loc,msg) -> let loc = extract_default_loc loc fb.doc_id fb.span_id in TopErr.print_error_for_buffer ?loc Warning msg top_buffer | Message (lvl,loc,msg) -> TopErr.print_error_for_buffer ?loc lvl msg top_buffer (** Main coq loop : read vernacular expressions until Drop is entered. Ctrl-C is handled internally as Sys.Break instead of aborting Coq. Normally, the only exceptions that can come out of [do_vernac] and exit the loop are Drop and Quit. Any other exception there indicates an issue with [print_toplevel_error] above. *) (* Flush in a compatible order with 8.5 *) (* This mimics the semantics of the old Pp.flush_all *) let loop_flush_all () = flush stderr; flush stdout; Format.pp_print_flush !Topfmt.std_ft (); Format.pp_print_flush !Topfmt.err_ft () (* Goal equality heuristic. *) let pequal cmp1 cmp2 (a1,a2) (b1,b2) = cmp1 a1 b1 && cmp2 a2 b2 let evleq e1 e2 = CList.equal Evar.equal e1 e2 let cproof p1 p2 = let Proof.{goals=a1;stack=a2;sigma=sigma1} = Proof.data p1 in let Proof.{goals=b1;stack=b2;sigma=sigma2} = Proof.data p2 in evleq a1 b1 && CList.equal (pequal evleq evleq) a2 b2 && CList.equal Evar.equal (Evd.shelf sigma1) (Evd.shelf sigma2) && Evar.Set.equal (Evd.given_up sigma1) (Evd.given_up sigma2) let drop_last_doc = ref None (* todo: could add other Set/Unset commands, such as "Printing Universes" *) let print_anyway_opts = [ [ "Diffs" ]; ] let print_anyway c = let open Vernacexpr in match c.expr with | VernacSetOption (_, opt, _) -> List.mem opt print_anyway_opts | _ -> false (* We try to behave better when goal printing raises an exception [usually Ctrl-C] This is mostly a hack as we should protect printing in a more generic way, but that'll do for now *) let top_goal_print ~doc c oldp newp = try let proof_changed = not (Option.equal cproof oldp newp) in let print_goals = proof_changed && Vernacstate.Declare.there_are_pending_proofs () || print_anyway c in if not !Flags.quiet && print_goals then begin let dproof = Stm.get_prev_proof ~doc (Stm.get_current_state ~doc) in Printer.print_and_diff dproof newp end with | exn -> let (e, info) = Exninfo.capture exn in let loc = Loc.get_loc info in let msg = CErrors.iprint (e, info) in TopErr.print_error_for_buffer ?loc Feedback.Error msg top_buffer [@@ocaml.warning "-3"] let exit_on_error = let open Goptions in declare_bool_option_and_ref ~depr:false ~key:["Coqtop";"Exit";"On";"Error"] ~value:false let show_proof_diff_cmd ~state diff_opt = let open Vernac.State in match state.proof with | None -> CErrors.user_err (str "No proofs to diff.") | Some proof -> let old = Stm.get_prev_proof ~doc:state.doc state.sid in Proof_diffs.diff_proofs ~diff_opt ?old proof let process_toplevel_command ~state stm = let open Vernac.State in let open G_toplevel in match stm with (* Usually handled in the caller *) | VernacDrop -> state | VernacBackTo bid -> let bid = Stateid.of_int bid in let doc, res = Stm.edit_at ~doc:state.doc bid in assert (res = Stm.NewTip); { state with doc; sid = bid } | VernacQuit -> exit 0 | VernacControl { CAst.loc; v=c } -> let nstate = Vernac.process_expr ~state (CAst.make ?loc c) in top_goal_print ~doc:state.doc c state.proof nstate.proof; nstate | VernacShowGoal { gid; sid } -> let proof = Stm.get_proof ~doc:state.doc (Stateid.of_int sid) in let goal = Printer.pr_goal_emacs ~proof gid sid in let evars = match proof with | None -> mt() | Some p -> let gl = (Evar.unsafe_of_int gid) in let { Proof.sigma } = Proof.data p in try Printer.print_dependent_evars (Some gl) sigma [ gl ] with Not_found -> mt() in Feedback.msg_notice (v 0 (goal ++ evars)); state | VernacShowProofDiffs diff_opt -> (* We print nothing if there are no goals left *) if not (Proof_diffs.color_enabled ()) then CErrors.user_err Pp.(str "Show Proof Diffs requires setting the \"-color\" command line argument to \"on\" or \"auto\".") else let out = show_proof_diff_cmd ~state diff_opt in Feedback.msg_notice out; state (* We return a new state and true if we got a `Drop` command *) let read_and_execute_base ~state = let input = top_buffer.tokens in match read_sentence ~state input with | Some G_toplevel.VernacDrop -> if Mltop.is_ocaml_top() then (drop_last_doc := Some state; state, true) else (Feedback.msg_warning (str "There is no ML toplevel."); state, false) | Some stm -> process_toplevel_command ~state stm, false (* End of file *) | None -> top_stderr (fnl ()); exit 0 let read_and_execute ~state = try read_and_execute_base ~state with (* Exception printing should be done by the feedback listener, however this is not yet ready so we rely on the exception for now. *) | Sys_blocked_io -> (* the parser doesn't like nonblocking mode, cf #10918 *) let msg = Pp.(strbrk "Coqtop needs the standard input to be in blocking mode." ++ spc() ++ str "One way of clearing the non-blocking flag is through Python:" ++ fnl() ++ str " import os" ++ fnl() ++ str " os.set_blocking(0, True)") in TopErr.print_error_for_buffer Feedback.Error msg top_buffer; exit 1 | any -> let (e, info) = Exninfo.capture any in let loc = Loc.get_loc info in let msg = CErrors.iprint (e, info) in TopErr.print_error_for_buffer ?loc Feedback.Error msg top_buffer; if exit_on_error () then exit 1; state, false (* This function will only return on [Drop], careful to keep it tail-recursive *) let rec vernac_loop ~state = let open Vernac.State in loop_flush_all (); top_stderr (fnl()); if !print_emacs then top_stderr (str (top_buffer.prompt state.doc)); resynch_buffer top_buffer; let state, drop = read_and_execute ~state in if drop then state else (vernac_loop [@ocaml.tailcall]) ~state (* Default toplevel loop, machinery for drop is below *) let drop_args = ref None (* Initialises the Ocaml toplevel before launching it, so that it can find the "include" file in the *source* directory *) let init_ocaml_path () = let env = Boot.Env.init () in let corelib = Boot.Env.corelib env |> Boot.Path.to_string in let add_subdir dl = Mltop.add_ml_dir (Filename.concat corelib dl) in List.iter add_subdir ("dev" :: Coq_config.all_src_dirs) let loop ~opts ~state = drop_args := Some opts; let open Coqargs in print_emacs := opts.config.print_emacs; (* We initialize the console only if we run the toploop_run *) let tl_feed = Feedback.add_feeder coqloop_feed in (* Initialize buffer *) Sys.catch_break true; reset_input_buffer state.Vernac.State.doc stdin top_buffer; (* Call the main loop *) let _ : Vernac.State.t = vernac_loop ~state in (* Initialise and launch the Ocaml toplevel *) init_ocaml_path (); Mltop.ocaml_toploop(); (* We delete the feeder after the OCaml toploop has ended so users of Drop can see the feedback. *) Feedback.del_feeder tl_feed coq-8.15.0/toplevel/coqloop.mli000066400000000000000000000032161417001151100163710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string; mutable str : Bytes.t; (** buffer of already read characters *) mutable len : int; (** number of chars in the buffer *) mutable bols : int list; (** offsets in str of beginning of lines *) mutable tokens : Pcoq.Parsable.t; (** stream of tokens *) mutable start : int } (** stream count of the first char of the buffer *) (** The input buffer of stdin. *) val top_buffer : input_buffer val set_prompt : (unit -> string) -> unit (** Toplevel feedback printer. *) val coqloop_feed : Feedback.feedback -> unit (** Last document seen after `Drop` *) val drop_last_doc : Vernac.State.t option ref val drop_args : Coqargs.t option ref (** Main entry point of Coq: read and execute vernac commands. *) val loop : opts:Coqargs.t -> state:Vernac.State.t -> unit coq-8.15.0/toplevel/coqrc.ml000066400000000000000000000040151417001151100156510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* if CUnix.file_readable_p rcfile then Vernac.load_vernac ~echo:false ~interactive:false ~check:true ~state rcfile else raise (Sys_error ("Cannot read rcfile: "^ rcfile)) | None -> try let warn x = Feedback.msg_warning (Pp.str x) in let inferedrc = List.find CUnix.file_readable_p [ Envars.xdg_config_home warn / rcdefaultname^"."^Coq_config.version; Envars.xdg_config_home warn / rcdefaultname; Envars.home ~warn / "."^rcdefaultname^"."^Coq_config.version; Envars.home ~warn / "."^rcdefaultname ] in Vernac.load_vernac ~echo:false ~interactive:false ~check:true ~state inferedrc with Not_found -> state (* Flags.if_verbose mSGNL (str ("No coqrc or coqrc."^Coq_config.version^ " found. Skipping rcfile loading.")) *) with reraise -> let reraise = Exninfo.capture reraise in let () = Feedback.msg_info (Pp.str"Load of rcfile failed.") in Exninfo.iraise reraise coq-8.15.0/toplevel/coqrc.mli000066400000000000000000000013671417001151100160310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* state:Vernac.State.t -> Vernac.State.t coq-8.15.0/toplevel/coqtop.ml000066400000000000000000000214451417001151100160550ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Boot.Path.to_string in let ch = open_in revision in let ver = input_line ch in let rev = input_line ch in let () = close_in ch in Printf.sprintf "%s (%s)" ver rev with _ -> Coq_config.version let print_header () = Feedback.msg_info (str "Welcome to Coq " ++ str (get_version ())); flush_all () (******************************************************************************) (* Fatal Errors *) (******************************************************************************) (** Prints info which is either an error or an anomaly and then exits with the appropriate error code *) let fatal_error_exn exn = Topfmt.(in_phase ~phase:Initialization print_err_exn exn); flush_all (); let exit_code = if (CErrors.is_anomaly exn) then 129 else 1 in exit exit_code type ('a,'b) custom_toplevel = { parse_extra : string list -> 'a * string list ; usage : Usage.specific_usage ; init_extra : 'a -> Coqargs.injection_command list -> opts:Coqargs.t -> 'b ; initial_args : Coqargs.t ; run : 'a -> opts:Coqargs.t -> 'b -> unit } (** Main init routine *) let init_toplevel { parse_extra; init_extra; usage; initial_args } = Coqinit.init_ocaml (); let opts, customopts = Coqinit.parse_arguments ~parse_extra ~usage ~initial_args () in Stm.init_process (snd customopts); let injections = Coqinit.init_runtime opts in (* This state will be shared by all the documents *) Stm.init_core (); let customstate = init_extra ~opts customopts injections in opts, customopts, customstate let start_coq custom = let init_feeder = Feedback.add_feeder Coqloop.coqloop_feed in (* Init phase *) let opts, custom_opts, state = try init_toplevel custom with any -> flush_all(); fatal_error_exn any in Feedback.del_feeder init_feeder; (* Run phase *) custom.run ~opts custom_opts state (** ****************************************) (** Specific support for coqtop executable *) type color = [`ON | `AUTO | `EMACS | `OFF] let init_color opts = let has_color = match opts with | `OFF -> false | `EMACS -> false | `ON -> true | `AUTO -> Terminal.has_style Unix.stdout && Terminal.has_style Unix.stderr && (* emacs compilation buffer does not support colors by default, its TERM variable is set to "dumb". *) try Sys.getenv "TERM" <> "dumb" with Not_found -> false in let term_color = if has_color then begin let colors = try Some (Sys.getenv "COQ_COLORS") with Not_found -> None in match colors with | None -> Topfmt.default_styles (); true (* Default colors *) | Some "" -> false (* No color output *) | Some s -> Topfmt.parse_color_config s; true (* Overwrite all colors *) end else begin Topfmt.default_styles (); false (* textual markers, no color *) end in if opts = `EMACS then Topfmt.set_emacs_print_strings () else if not term_color then begin Proof_diffs.write_color_enabled term_color; if Proof_diffs.show_diffs () then (prerr_endline "Error: -diffs requires enabling -color"; exit 1) end; Topfmt.init_terminal_output ~color:term_color let print_style_tags opts = let () = init_color opts in let tags = Topfmt.dump_tags () in let iter (t, st) = let opt = Terminal.eval st ^ t ^ Terminal.reset ^ "\n" in print_string opt in let make (t, st) = let tags = List.map string_of_int (Terminal.repr st) in (t ^ "=" ^ String.concat ";" tags) in let repr = List.map make tags in let () = Printf.printf "COQ_COLORS=\"%s\"\n" (String.concat ":" repr) in let () = List.iter iter tags in flush_all () let ltac_debug_answer = let open DebugHook.Answer in function | Prompt prompt -> (* No newline *) Format.fprintf !Topfmt.err_ft "@[%a@]%!" Pp.pp_with prompt | Goal g -> Format.fprintf !Topfmt.err_ft "@[%a@]@\n%!" Pp.pp_with g | Output o -> Format.fprintf !Topfmt.err_ft "@[%a@]@\n%!" Pp.pp_with o | Init -> Format.fprintf !Topfmt.err_ft "@[%a@]@\n%!" Pp.pp_with (str "Init") | Stack _ | Vars _ -> CErrors.anomaly (str "ltac_debug_answer: unsupported Answer type") let ltac_debug_parse () = let open DebugHook in let act = try Action.parse (read_line ()) with End_of_file -> Ok Action.Interrupt in match act with | Ok act -> act | Error error -> ltac_debug_answer (Answer.Output (str error)); Action.Failed type query = PrintTags | PrintModUid of string list type run_mode = Interactive | Batch | Query of query type toplevel_options = { run_mode : run_mode; color_mode : color; } let init_document opts stm_options injections = (* Coq init process, phase 3: Stm initialization, backtracking state. It is essential that the module system is in a consistent state before we take the first snapshot. This was not guaranteed in the past, but now is thanks to the STM API. *) (* Next line allows loading .vos files when in interactive mode *) Flags.load_vos_libraries := true; let open Vernac.State in let doc, sid = Stm.(new_doc { doc_type = Interactive opts.config.logic.toplevel_name; injections; stm_options; }) in { doc; sid; proof = None; time = opts.config.time } let init_toploop opts stm_opts injections = let state = init_document opts stm_opts injections in let state = Ccompile.load_init_vernaculars opts ~state in state let coqtop_init ({ run_mode; color_mode }, async_opts) injections ~opts = if run_mode != Interactive then Flags.quiet := true; init_color (if opts.config.print_emacs then `EMACS else color_mode); Flags.if_verbose print_header (); DebugHook.Intf.(set { read_cmd = ltac_debug_parse ; submit_answer = ltac_debug_answer ; isTerminal = true }); init_toploop opts async_opts injections let set_color = function | "yes" | "on" -> `ON | "no" | "off" -> `OFF | "auto" ->`AUTO | _ -> error_wrong_arg ("Error: on/off/auto expected after option color") let parse_extra_colors extras = let rec parse_extra color_mode = function | "-color" :: next :: rest -> parse_extra (set_color next) rest | "-list-tags" :: rest -> parse_extra color_mode rest | x :: rest -> let color_mode, rest = parse_extra color_mode rest in color_mode, x :: rest | [] -> color_mode, [] in parse_extra `AUTO extras let coqtop_parse_extra extras = let rec parse_extra run_mode = function | "-batch" :: rest -> parse_extra Batch rest | "-print-mod-uid" :: rest -> Query (PrintModUid rest), [] | x :: rest -> let run_mode, rest = parse_extra run_mode rest in run_mode, x :: rest | [] -> run_mode, [] in let run_mode, extras = parse_extra Interactive extras in let color_mode, extras = parse_extra_colors extras in let async_opts, extras = Stmargs.parse_args ~init:Stm.AsyncOpts.default_opts extras in ({ run_mode; color_mode}, async_opts), extras let get_native_name s = (* We ignore even critical errors because this mode has to be super silent *) try Filename.(List.fold_left concat (dirname s) [ !Nativelib.output_dir ; Library.native_name_from_filename s ]) with _ -> "" let coqtop_run ({ run_mode; color_mode },_) ~opts state = match run_mode with | Interactive -> Coqloop.loop ~opts ~state; | Query PrintTags -> print_style_tags color_mode; exit 0 | Query (PrintModUid sl) -> let s = String.concat " " (List.map get_native_name sl) in print_endline s; exit 0 | Batch -> exit 0 let coqtop_specific_usage = Usage.{ executable_name = "coqtop"; extra_args = ""; extra_options = "\n\ coqtop specific options:\n\ \n -batch batch mode (exits after interpretation of command line)\ \n" } let coqtop_toplevel = { parse_extra = coqtop_parse_extra ; usage = coqtop_specific_usage ; init_extra = coqtop_init ; run = coqtop_run ; initial_args = Coqargs.default } coq-8.15.0/toplevel/coqtop.mli000066400000000000000000000043061417001151100162230ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a * string list ; usage : Usage.specific_usage ; init_extra : 'a -> Coqargs.injection_command list -> opts:Coqargs.t -> 'b ; initial_args : Coqargs.t ; run : 'a -> opts:Coqargs.t -> 'b -> unit } (** The generic Coq main module. [start custom] will parse the command line, print the banner, initialize the load path, load the input state, load the files given on the command line, load the resource file, produce the output state if any, and finally will launch [custom.run]. *) val start_coq : ('a * Stm.AsyncOpts.stm_opt,'b) custom_toplevel -> unit (** Initializer color for output *) type color = [`ON | `AUTO | `EMACS | `OFF] val init_color : color -> unit val parse_extra_colors : string list -> color * string list val print_style_tags : color -> unit (** Prepare state for interactive loop *) val init_toploop : Coqargs.t -> Stm.AsyncOpts.stm_opt -> Coqargs.injection_command list -> Vernac.State.t (** The specific characterization of the coqtop_toplevel *) type query = PrintTags | PrintModUid of string list type run_mode = Interactive | Batch | Query of query type toplevel_options = { run_mode : run_mode; color_mode : color; } val coqtop_toplevel : (toplevel_options * Stm.AsyncOpts.stm_opt,Vernac.State.t) custom_toplevel val ltac_debug_answer : DebugHook.Answer.t -> unit val ltac_debug_parse : unit -> DebugHook.Action.t coq-8.15.0/toplevel/dune000066400000000000000000000005631417001151100150720ustar00rootroot00000000000000(library (name toplevel) (public_name coq-core.toplevel) (synopsis "Coq's Interactive Shell [terminal-based]") (wrapped false) ; until ocaml/dune#4892 fixed ; (private_modules g_toplevel) (libraries coq-core.stm)) ; Interp provides the `zarith` library to plugins, we could also use ; -linkall in the plugins file, to be discussed. (coq.pp (modules g_toplevel)) coq-8.15.0/toplevel/g_toplevel.mlg000066400000000000000000000043751417001151100170620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* > lk_kw "Goal" >> lk_nat end } GRAMMAR EXTEND Gram GLOBAL: vernac_toplevel; vernac_toplevel: FIRST [ [ IDENT "Drop"; "." -> { Some VernacDrop } | IDENT "Quit"; "." -> { Some VernacQuit } | IDENT "BackTo"; n = natural; "." -> { Some (VernacBackTo n) } (* show a goal for the specified proof state *) | test_show_goal; IDENT "Show"; IDENT "Goal"; gid = natural; IDENT "at"; sid = natural; "." -> { Some (VernacShowGoal {gid; sid}) } | IDENT "Show"; IDENT "Proof"; IDENT "Diffs"; removed = OPT [ IDENT "removed" -> { () } ]; "." -> { Some (VernacShowProofDiffs (if removed = None then Proof_diffs.DiffOn else Proof_diffs.DiffRemoved)) } | cmd = Pvernac.Vernac_.main_entry -> { match cmd with | None -> None | Some v -> Some (VernacControl v) } ] ] ; END { let vernac_toplevel pm = Pvernac.Unsafe.set_tactic_entry pm; vernac_toplevel } coq-8.15.0/toplevel/vernac.ml000066400000000000000000000151431417001151100160240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let len = loc.ep - loc.bp in seek_in in_chan loc.bp; Feedback.msg_notice @@ str @@ really_input_string in_chan len ) loc (* Re-enable when we get back to feedback printing *) (* let is_end_of_input any = match any with *) (* Stm.End_of_input -> true *) (* | _ -> false *) module State = struct type t = { doc : Stm.doc; sid : Stateid.t; proof : Proof.t option; time : bool; } end let interp_vernac ~check ~interactive ~state ({CAst.loc;_} as com) = let open State in try (* The -time option is only supported from console-based clients due to the way it prints. *) let com = if state.time then begin CAst.map (fun cmd -> { cmd with control = ControlTime state.time :: cmd.control }) com end else com in let doc, nsid, ntip = Stm.add ~doc:state.doc ~ontop:state.sid (not !Flags.quiet) com in (* Main STM interaction *) if ntip <> Stm.NewAddTip then anomaly (str "vernac.ml: We got an unfocus operation on the toplevel!"); (* Force the command *) let ndoc = if check then Stm.observe ~doc nsid else doc in let new_proof = Vernacstate.Declare.give_me_the_proof_opt () [@ocaml.warning "-3"] in { state with doc = ndoc; sid = nsid; proof = new_proof; } with reraise -> let (reraise, info) = Exninfo.capture reraise in (* XXX: In non-interactive mode edit_at seems to do very weird things, so we better avoid it while we investigate *) if interactive then ignore(Stm.edit_at ~doc:state.doc state.sid); let info = begin match Loc.get_loc info with | None -> Option.cata (Loc.add_loc info) info loc | Some _ -> info end in Exninfo.iraise (reraise, info) (* Load a vernac file. CErrors are annotated with file and location *) let load_vernac_core ~echo ~check ~interactive ~state ?ldir file = (* Keep in sync *) let in_chan = open_utf8_file_in file in let in_echo = if echo then Some (open_utf8_file_in file) else None in let input_cleanup () = close_in in_chan; Option.iter close_in in_echo in let dirpath = Option.cata (fun ldir -> Some Names.DirPath.(to_string ldir)) None ldir in let in_pa = Pcoq.Parsable.make ~loc:Loc.(initial (InFile {dirpath; file})) (Stream.of_channel in_chan) in let open State in (* ids = For beautify, list of parsed sids *) let rec loop state ids = match Stm.parse_sentence ~doc:state.doc ~entry:Pvernac.main_entry state.sid in_pa with | None -> input_cleanup (); state, ids, Pcoq.Parsable.comments in_pa | Some ast -> (* Printing of AST for -compile-verbose *) Option.iter (vernac_echo ?loc:ast.CAst.loc) in_echo; checknav ast; let state = Flags.silently (interp_vernac ~check ~interactive ~state) ast in (loop [@ocaml.tailcall]) state (state.sid :: ids) in try loop state [] with any -> (* whatever the exception *) let (e, info) = Exninfo.capture any in input_cleanup (); Exninfo.iraise (e, info) let process_expr ~state loc_ast = interp_vernac ~interactive:true ~check:true ~state loc_ast (******************************************************************************) (* Beautify-specific code *) (******************************************************************************) (* vernac parses the given stream, executes interpfun on the syntax tree it * parses, and is verbose on "primitives" commands if verbosely is true *) let beautify_suffix = ".beautified" let set_formatter_translator ch = let out s b e = output_substring ch s b e in let ft = Format.make_formatter out (fun () -> flush ch) in Format.pp_set_max_boxes ft max_int; ft let pr_new_syntax ?loc ft_beautify ocom = let loc = Option.cata Loc.unloc (0,0) loc in let before = comment (Pputils.extract_comments (fst loc)) in let com = Option.cata Ppvernac.pr_vernac (mt ()) ocom in let after = comment (Pputils.extract_comments (snd loc)) in if !Flags.beautify_file then (Pp.pp_with ft_beautify (hov 0 (before ++ com ++ after)); Format.pp_print_flush ft_beautify ()) else Feedback.msg_info (hov 4 (str"New Syntax:" ++ fnl() ++ (hov 0 com))) (* load_vernac with beautify *) let beautify_pass ~doc ~comments ~ids ~filename = let ft_beautify, close_beautify = if !Flags.beautify_file then let chan_beautify = open_out (filename^beautify_suffix) in set_formatter_translator chan_beautify, fun () -> close_out chan_beautify; else !Topfmt.std_ft, fun () -> () in (* The interface to the comment printer is imperative, so we first set the comments, then we call print. This has to be done for each file. *) Pputils.beautify_comments := comments; List.iter (fun id -> pr_new_syntax ft_beautify (Stm.get_ast ~doc id)) ids; (* Is this called so comments at EOF are printed? *) pr_new_syntax ~loc:(Loc.make_loc (max_int,max_int)) ft_beautify None; close_beautify () (* Main driver for file loading. For now, we only do one beautify pass. *) let load_vernac ~echo ~check ~interactive ~state ?ldir filename = let ostate, ids, comments = load_vernac_core ~echo ~check ~interactive ~state ?ldir filename in (* Pass for beautify *) if !Flags.beautify then beautify_pass ~doc:ostate.State.doc ~comments ~ids:(List.rev ids) ~filename; (* End pass *) ostate coq-8.15.0/toplevel/vernac.mli000066400000000000000000000026361417001151100162000ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Vernacexpr.vernac_control -> State.t (** [load_vernac echo sid file] Loads [file] on top of [sid], will echo the commands if [echo] is set. Callers are expected to handle and print errors in form of exceptions. *) val load_vernac : echo:bool -> check:bool -> interactive:bool -> state:State.t -> ?ldir:Names.DirPath.t -> string -> State.t coq-8.15.0/toplevel/workerLoop.ml000066400000000000000000000031311417001151100167030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (* the state is not used since the worker will receive one from master *) loop ()); } in start_coq custom coq-8.15.0/toplevel/workerLoop.mli000066400000000000000000000014501417001151100170560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit) -> loop:(unit -> unit) -> string -> unit coq-8.15.0/user-contrib/000077500000000000000000000000001417001151100147725ustar00rootroot00000000000000coq-8.15.0/user-contrib/Ltac2/000077500000000000000000000000001417001151100157375ustar00rootroot00000000000000coq-8.15.0/user-contrib/Ltac2/Array.v000066400000000000000000000217001417001151100172040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a array := "ltac2" "array_empty". Ltac2 @external make : int -> 'a -> 'a array := "ltac2" "array_make". Ltac2 @external length : 'a array -> int := "ltac2" "array_length". Ltac2 @external get : 'a array -> int -> 'a := "ltac2" "array_get". Ltac2 @external set : 'a array -> int -> 'a -> unit := "ltac2" "array_set". Ltac2 @external lowlevel_blit : 'a array -> int -> 'a array -> int -> int -> unit := "ltac2" "array_blit". Ltac2 @external lowlevel_fill : 'a array -> int -> int -> 'a -> unit := "ltac2" "array_fill". Ltac2 @external concat : ('a array) list -> 'a array := "ltac2" "array_concat". (* Low level array operations *) Ltac2 lowlevel_sub (arr : 'a array) (start : int) (len : int) := let l := length arr in match Int.equal l 0 with | true => empty () | false => let newarr:=make len (get arr 0) in lowlevel_blit arr start newarr 0 len; newarr end. (* Array functions as defined in the OCaml library *) Ltac2 init (l : int) (f : int->'a) := let rec init_aux (dst : 'a array) (pos : int) (len : int) (f : int->'a) := match Int.equal len 0 with | true => () | false => set dst pos (f pos); init_aux dst (Int.add pos 1) (Int.sub len 1) f end in match Int.le l 0 with | true => empty () | false => let arr:=make l (f 0) in init_aux arr 1 (Int.sub l 1) f; arr end. Ltac2 make_matrix (sx : int) (sy : int) (v : 'a) := let init1 i := v in let initr i := init sy init1 in init sx initr. Ltac2 copy a := lowlevel_sub a 0 (length a). Ltac2 append (a1 : 'a array) (a2 : 'a array) := match Int.equal (length a1) 0 with | true => copy a2 | false => match Int.equal (length a2) 0 with | true => copy a1 | false => let newarr:=make (Int.add (length a1) (length a2)) (get a1 0) in lowlevel_blit a1 0 newarr 0 (length a1); lowlevel_blit a2 0 newarr (length a1) (length a2); newarr end end. Ltac2 sub (a : 'a array) (ofs : int) (len : int) := Control.assert_valid_argument "Array.sub ofs<0" (Int.ge ofs 0); Control.assert_valid_argument "Array.sub len<0" (Int.ge len 0); Control.assert_bounds "Array.sub" (Int.le ofs (Int.sub (length a) len)); lowlevel_sub a ofs len. Ltac2 fill (a : 'a array) (ofs : int) (len : int) (v : 'a) := Control.assert_valid_argument "Array.fill ofs<0" (Int.ge ofs 0); Control.assert_valid_argument "Array.fill len<0" (Int.ge len 0); Control.assert_bounds "Array.fill" (Int.le ofs (Int.sub (length a) len)); lowlevel_fill a ofs len v. Ltac2 blit (a1 : 'a array) (ofs1 : int) (a2 : 'a array) (ofs2 : int) (len : int) := Control.assert_valid_argument "Array.blit ofs1<0" (Int.ge ofs1 0); Control.assert_valid_argument "Array.blit ofs2<0" (Int.ge ofs2 0); Control.assert_valid_argument "Array.blit len<0" (Int.ge len 0); Control.assert_bounds "Array.blit ofs1+len>len a1" (Int.le ofs1 (Int.sub (length a1) len)); Control.assert_bounds "Array.blit ofs2+len>len a2" (Int.le ofs2 (Int.sub (length a2) len)); lowlevel_blit a1 ofs1 a2 ofs2 len. Ltac2 rec iter_aux (f : 'a -> unit) (a : 'a array) (pos : int) (len : int) := match Int.equal len 0 with | true => () | false => f (get a pos); iter_aux f a (Int.add pos 1) (Int.sub len 1) end. Ltac2 iter (f : 'a -> unit) (a : 'a array) := iter_aux f a 0 (length a). Ltac2 rec iter2_aux (f : 'a -> 'b -> unit) (a : 'a array) (b : 'b array) (pos : int) (len : int) := match Int.equal len 0 with | true => () | false => f (get a pos) (get b pos); iter2_aux f a b (Int.add pos 1) (Int.sub len 1) end. Ltac2 rec iter2 (f : 'a -> 'b -> unit) (a : 'a array) (b : 'b array) := Control.assert_valid_argument "Array.iter2" (Int.equal (length a) (length b)); iter2_aux f a b 0 (length a). Ltac2 map (f : 'a -> 'b) (a : 'a array) := init (length a) (fun i => f (get a i)). Ltac2 map2 (f : 'a -> 'b -> 'c) (a : 'a array) (b : 'b array) := Control.assert_valid_argument "Array.map2" (Int.equal (length a) (length b)); init (length a) (fun i => f (get a i) (get b i)). Ltac2 rec iteri_aux (f : int -> 'a -> unit) (a : 'a array) (pos : int) (len : int) := match Int.equal len 0 with | true => () | false => f pos (get a pos); iteri_aux f a (Int.add pos 1) (Int.sub len 1) end. Ltac2 iteri (f : int -> 'a -> unit) (a : 'a array) := iteri_aux f a 0 (length a). Ltac2 mapi (f : int -> 'a -> 'b) (a : 'a array) := init (length a) (fun i => f i (get a i)). Ltac2 rec to_list_aux (a : 'a array) (pos : int) (len : int) := match Int.equal len 0 with | true => [] | false => get a pos :: to_list_aux a (Int.add pos 1) (Int.sub len 1) end. Ltac2 to_list (a : 'a array) := to_list_aux a 0 (length a). Ltac2 rec of_list_aux (ls : 'a list) (dst : 'a array) (pos : int) := match ls with | [] => () | hd::tl => set dst pos hd; of_list_aux tl dst (Int.add pos 1) end. Ltac2 of_list (ls : 'a list) := (* Don't use List.length here because the List module might depend on Array some day *) let rec list_length (ls : 'a list) := match ls with | [] => 0 | _ :: tl => Int.add 1 (list_length tl) end in match ls with | [] => empty () | hd::tl => let anew := make (list_length ls) hd in of_list_aux ls anew 0; anew end. Ltac2 rec fold_left_aux (f : 'a -> 'b -> 'a) (x : 'a) (a : 'b array) (pos : int) (len : int) := match Int.equal len 0 with | true => x | false => fold_left_aux f (f x (get a pos)) a (Int.add pos 1) (Int.sub len 1) end. Ltac2 fold_left (f : 'a -> 'b -> 'a) (x : 'a) (a : 'b array) := fold_left_aux f x a 0 (length a). Ltac2 rec fold_right_aux (f : 'a -> 'b -> 'a) (x : 'a) (a : 'b array) (pos : int) (len : int) := (* Note: one could compare pos<0. We keep an extra len parameter so that the function can be used for any sub array *) match Int.equal len 0 with | true => x | false => fold_right_aux f (f x (get a pos)) a (Int.sub pos 1) (Int.sub len 1) end. Ltac2 fold_right (f : 'a -> 'b -> 'a) (x : 'a) (a : 'b array) := fold_right_aux f x a (Int.sub (length a) 1) (length a). Ltac2 rec exist_aux (p : 'a -> bool) (a : 'a array) (pos : int) (len : int) := match Int.equal len 0 with | true => false | false => match p (get a pos) with | true => true | false => exist_aux p a (Int.add pos 1) (Int.sub len 1) end end. (* Note: named exist (as in Coq library) rather than exists cause exists is a notation *) Ltac2 exist (p : 'a -> bool) (a : 'a array) := exist_aux p a 0 (length a). Ltac2 rec for_all_aux (p : 'a -> bool) (a : 'a array) (pos : int) (len : int) := match Int.equal len 0 with | true => true | false => match p (get a pos) with | true => for_all_aux p a (Int.add pos 1) (Int.sub len 1) | false => false end end. Ltac2 for_all (p : 'a -> bool) (a : 'a array) := for_all_aux p a 0 (length a). (* Note: we don't have (yet) a generic equality function in Ltac2 *) Ltac2 mem (eq : 'a -> 'a -> bool) (x : 'a) (a : 'a array) := exist (eq x) a. coq-8.15.0/user-contrib/Ltac2/Bool.v000066400000000000000000000034641417001151100170300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* y | false => false end. Ltac2 or x y := match x with | true => true | false => y end. Ltac2 impl x y := match x with | true => y | false => true end. Ltac2 neg x := match x with | true => false | false => true end. Ltac2 xor x y := match x with | true => match y with | true => false | false => true end | false => match y with | true => true | false => false end end. Ltac2 equal x y := match x with | true => match y with | true => true | false => false end | false => match y with | true => false | false => true end end. (** * Boolean operators with lazy evaluation of the second argument *) Ltac2 Notation x(self) "&&" y(thunk(self)) : 2 := match x with | true => y () | false => false end. Ltac2 Notation x(self) "||" y(thunk(self)) : 3 := match x with | true => true | false => y () end. (** * Compatibility notations *) #[deprecated(note="Use Bool.equal", since="8.14")] Ltac2 eq := equal. coq-8.15.0/user-contrib/Ltac2/Char.v000066400000000000000000000014761417001151100170130ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* char := "ltac2" "char_of_int". Ltac2 @external to_int : char -> int := "ltac2" "char_to_int". coq-8.15.0/user-contrib/Ltac2/Constr.v000066400000000000000000000113111417001151100173730ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* constr := "ltac2" "constr_type". (** Return the type of a term *) Ltac2 @ external equal : constr -> constr -> bool := "ltac2" "constr_equal". (** Strict syntactic equality: only up to α-conversion and evar expansion *) Module Unsafe. (** Low-level access to kernel terms. Use with care! *) Ltac2 Type case. Ltac2 Type case_invert := [ | NoInvert | CaseInvert (constr array) ]. Ltac2 Type kind := [ | Rel (int) | Var (ident) | Meta (meta) | Evar (evar, constr array) | Sort (sort) | Cast (constr, cast, constr) | Prod (binder, constr) | Lambda (binder, constr) | LetIn (binder, constr, constr) | App (constr, constr array) | Constant (constant, instance) | Ind (inductive, instance) | Constructor (constructor, instance) | Case (case, constr, case_invert, constr, constr array) | Fix (int array, int, binder array, constr array) | CoFix (int, binder array, constr array) | Proj (projection, constr) | Uint63 (uint63) | Float (float) | Array (instance, constr array, constr, constr) ]. Ltac2 @ external kind : constr -> kind := "ltac2" "constr_kind". Ltac2 @ external make : kind -> constr := "ltac2" "constr_make". Ltac2 @ external check : constr -> constr result := "ltac2" "constr_check". (** Checks that a constr generated by unsafe means is indeed safe in the current environment, and returns it, or the error otherwise. Panics if not focused. *) Ltac2 @ external substnl : constr list -> int -> constr -> constr := "ltac2" "constr_substnl". (** [substnl [r₁;...;rₙ] k c] substitutes in parallel [Rel(k+1); ...; Rel(k+n)] with [r₁;...;rₙ] in [c]. *) Ltac2 @ external closenl : ident list -> int -> constr -> constr := "ltac2" "constr_closenl". (** [closenl [x₁;...;xₙ] k c] abstracts over variables [x₁;...;xₙ] and replaces them with [Rel(k); ...; Rel(k+n-1)] in [c]. If two names are identical, the one of least index is kept. *) Ltac2 @ external case : inductive -> case := "ltac2" "constr_case". (** Generate the case information for a given inductive type. *) Ltac2 @ external constructor : inductive -> int -> constructor := "ltac2" "constr_constructor". (** Generate the i-th constructor for a given inductive type. Indexing starts at 0. Panics if there is no such constructor. *) End Unsafe. Module Binder. Ltac2 @ external make : ident option -> constr -> binder := "ltac2" "constr_binder_make". (** Create a binder given the name and the type of the bound variable. *) Ltac2 @ external name : binder -> ident option := "ltac2" "constr_binder_name". (** Retrieve the name of a binder. *) Ltac2 @ external type : binder -> constr := "ltac2" "constr_binder_type". (** Retrieve the type of a binder. *) End Binder. Ltac2 @ external in_context : ident -> constr -> (unit -> unit) -> constr := "ltac2" "constr_in_context". (** On a focused goal [Γ ⊢ A], [in_context id c tac] evaluates [tac] in a focused goal [Γ, id : c ⊢ ?X] and returns [fun (id : c) => t] where [t] is the proof built by the tactic. *) Ltac2 @ external pretype : preterm -> constr := "ltac2" "constr_pretype". (** Pretype the provided preterm. Assumes the goal to be focussed. *) Ltac2 is_evar(c: constr) := match Unsafe.kind c with | Unsafe.Evar _ _ => true | _ => false end. Ltac2 @ external has_evar : constr -> bool := "ltac2" "constr_has_evar". Ltac2 is_var(c: constr) := match Unsafe.kind c with | Unsafe.Var _ => true | _ => false end. Ltac2 is_fix(c: constr) := match Unsafe.kind c with | Unsafe.Fix _ _ _ _ => true | _ => false end. Ltac2 is_cofix(c: constr) := match Unsafe.kind c with | Unsafe.CoFix _ _ _ => true | _ => false end. Ltac2 is_ind(c: constr) := match Unsafe.kind c with | Unsafe.Ind _ _ => true | _ => false end. Ltac2 is_constructor(c: constr) := match Unsafe.kind c with | Unsafe.Constructor _ _ => true | _ => false end. Ltac2 is_proj(c: constr) := match Unsafe.kind c with | Unsafe.Proj _ _ => true | _ => false end. Ltac2 is_const(c: constr) := match Unsafe.kind c with | Unsafe.Constant _ _ => true | _ => false end. coq-8.15.0/user-contrib/Ltac2/Control.v000066400000000000000000000105011417001151100175430ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a := "ltac2" "throw". (** Fatal exception throwing. This does not induce backtracking. *) (** Generic backtracking control *) Ltac2 @ external zero : exn -> 'a := "ltac2" "zero". Ltac2 @ external plus : (unit -> 'a) -> (exn -> 'a) -> 'a := "ltac2" "plus". Ltac2 @ external once : (unit -> 'a) -> 'a := "ltac2" "once". Ltac2 @ external dispatch : (unit -> unit) list -> unit := "ltac2" "dispatch". Ltac2 @ external extend : (unit -> unit) list -> (unit -> unit) -> (unit -> unit) list -> unit := "ltac2" "extend". Ltac2 @ external enter : (unit -> unit) -> unit := "ltac2" "enter". Ltac2 @ external case : (unit -> 'a) -> ('a * (exn -> 'a)) result := "ltac2" "case". (** Proof state manipulation *) Ltac2 @ external focus : int -> int -> (unit -> 'a) -> 'a := "ltac2" "focus". Ltac2 @ external shelve : unit -> unit := "ltac2" "shelve". Ltac2 @ external shelve_unifiable : unit -> unit := "ltac2" "shelve_unifiable". Ltac2 @ external new_goal : evar -> unit := "ltac2" "new_goal". (** Adds the given evar to the list of goals as the last one. If it is already defined in the current state, don't do anything. Panics if the evar is not in the current state. *) Ltac2 @ external progress : (unit -> 'a) -> 'a := "ltac2" "progress". (** Goal inspection *) Ltac2 @ external goal : unit -> constr := "ltac2" "goal". (** Panics if there is not exactly one goal under focus. Otherwise returns the conclusion of this goal. *) Ltac2 @ external hyp : ident -> constr := "ltac2" "hyp". (** Panics if there is more than one goal under focus. If there is no goal under focus, looks for the section variable with the given name. If there is one, looks for the hypothesis with the given name. *) Ltac2 @ external hyps : unit -> (ident * constr option * constr) list := "ltac2" "hyps". (** Panics if there is more than one goal under focus. If there is no goal under focus, returns the list of section variables. If there is one, returns the list of hypotheses. In both cases, the list is ordered with rightmost values being last introduced. *) (** Refinement *) Ltac2 @ external refine : (unit -> constr) -> unit := "ltac2" "refine". (** Evars *) Ltac2 @ external with_holes : (unit -> 'a) -> ('a -> 'b) -> 'b := "ltac2" "with_holes". (** [with_holes x f] evaluates [x], then apply [f] to the result, and fails if all evars generated by the call to [x] have not been solved when [f] returns. *) (** Misc *) Ltac2 @ external time : string option -> (unit -> 'a) -> 'a := "ltac2" "time". (** Displays the time taken by a tactic to evaluate. *) Ltac2 @ external abstract : ident option -> (unit -> unit) -> unit := "ltac2" "abstract". (** Abstract a subgoal. *) Ltac2 @ external check_interrupt : unit -> unit := "ltac2" "check_interrupt". (** For internal use. *) (** Assertions throwing exceptions and short form throws *) Ltac2 throw_invalid_argument (msg : string) := Control.throw (Invalid_argument (Some (Message.of_string msg))). Ltac2 throw_out_of_bounds (msg : string) := Control.throw (Out_of_bounds (Some (Message.of_string msg))). Ltac2 assert_valid_argument (msg : string) (test : bool) := match test with | true => () | false => throw_invalid_argument msg end. Ltac2 assert_bounds (msg : string) (test : bool) := match test with | true => () | false => throw_out_of_bounds msg end. Ltac2 assert_true b := if b then () else throw Assertion_failure. Ltac2 assert_false b := if b then throw Assertion_failure else (). (** Short form backtracks *) Ltac2 backtrack_tactic_failure (msg : string) := Control.zero (Tactic_failure (Some (Message.of_string msg))). coq-8.15.0/user-contrib/Ltac2/Env.v000066400000000000000000000030101417001151100166500ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Std.reference option := "ltac2" "env_get". (** Returns the global reference corresponding to the absolute name given as argument if it exists. *) Ltac2 @ external expand : ident list -> Std.reference list := "ltac2" "env_expand". (** Returns the list of all global references whose absolute name contains the argument list as a suffix. *) Ltac2 @ external path : Std.reference -> ident list := "ltac2" "env_path". (** Returns the absolute name of the given reference. Panics if the reference does not exist. *) Ltac2 @ external instantiate : Std.reference -> constr := "ltac2" "env_instantiate". (** Returns a fresh instance of the corresponding reference, in particular generating fresh universe variables and constraints when this reference is universe-polymorphic. *) coq-8.15.0/user-contrib/Ltac2/Fresh.v000066400000000000000000000025471417001151100172050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> t := "ltac2" "fresh_free_union". Ltac2 @ external of_ids : ident list -> t := "ltac2" "fresh_free_of_ids". Ltac2 @ external of_constr : constr -> t := "ltac2" "fresh_free_of_constr". Ltac2 of_goal () := of_ids (List.map (fun (id, _, _) => id) (Control.hyps ())). End Free. Ltac2 @ external fresh : Free.t -> ident -> ident := "ltac2" "fresh_fresh". (** Generate a fresh identifier with the given base name which is not a member of the provided set of free variables. *) Ltac2 in_goal id := Fresh.fresh (Free.of_goal ()) id. coq-8.15.0/user-contrib/Ltac2/Ident.v000066400000000000000000000016611417001151100171750ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool := "ltac2" "ident_equal". Ltac2 @ external of_string : string -> t option := "ltac2" "ident_of_string". Ltac2 @ external to_string : t -> string := "ltac2" "ident_to_string". coq-8.15.0/user-contrib/Ltac2/Ind.v000066400000000000000000000042761417001151100166510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> bool := "ltac2" "ind_equal". (** Equality test. *) Ltac2 Type data. (** Type of data representing inductive blocks. *) Ltac2 @ external data : t -> data := "ltac2" "ind_data". (** Get the mutual blocks corresponding to an inductive type in the current environment. Panics if there is no such inductive. *) Ltac2 @ external repr : data -> t := "ltac2" "ind_repr". (** Returns the inductive corresponding to the block. Inverse of [data]. *) Ltac2 @ external index : t -> int := "ltac2" "ind_index". (** Returns the index of the inductive type inside its mutual block. Guaranteed to range between [0] and [nblocks data - 1] where [data] was retrieved using the above function. *) Ltac2 @ external nblocks : data -> int := "ltac2" "ind_nblocks". (** Returns the number of inductive types appearing in a mutual block. *) Ltac2 @ external nconstructors : data -> int := "ltac2" "ind_nconstructors". (** Returns the number of constructors appearing in the current block. *) Ltac2 @ external get_block : data -> int -> data := "ltac2" "ind_get_block". (** Returns the block corresponding to the nth inductive type. Index must range between [0] and [nblocks data - 1], otherwise the function panics. *) Ltac2 @ external get_constructor : data -> int -> constructor := "ltac2" "ind_get_constructor". (** Returns the nth constructor of the inductive type. Index must range between [0] and [nconstructors data - 1], otherwise the function panics. *) coq-8.15.0/user-contrib/Ltac2/Init.v000066400000000000000000000052271417001151100170370ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* int -> bool := "ltac2" "int_equal". Ltac2 @ external compare : int -> int -> int := "ltac2" "int_compare". Ltac2 @ external add : int -> int -> int := "ltac2" "int_add". Ltac2 @ external sub : int -> int -> int := "ltac2" "int_sub". Ltac2 @ external mul : int -> int -> int := "ltac2" "int_mul". Ltac2 @ external neg : int -> int := "ltac2" "int_neg". Ltac2 lt (x : int) (y : int) := equal (compare x y) -1. Ltac2 gt (x : int) (y : int) := equal (compare x y) 1. Ltac2 le (x : int) (y : int) := (* we might use [lt x (add y 1)], but that has the wrong behavior on MAX_INT *) match equal x y with | true => true | false => lt x y end. Ltac2 ge (x : int) (y : int) := (* we might use [lt (add x 1) y], but that has the wrong behavior on MAX_INT *) match equal x y with | true => true | false => gt x y end. coq-8.15.0/user-contrib/Ltac2/List.v000066400000000000000000000365501417001151100170520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 0 | _ :: xs => Int.add 1 (length xs) end. Ltac2 rec compare_lengths (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => match ls2 with | [] => 0 | _ :: _ => -1 end | _ :: ls1 => match ls2 with | [] => 1 | _ :: ls2 => compare_lengths ls1 ls2 end end. Ltac2 rec compare_length_with (ls : 'a list) (n : int) := match Int.lt n 0 with | true => 1 | false => match ls with | [] => Int.compare 0 n | _ :: ls => compare_length_with ls (Int.sub n 1) end end. Ltac2 cons (x : 'a) (xs : 'a list) := x :: xs. (* Since Ltac-2 distinguishes between backtracking and fatal exceptions, we provide option and default variants of functions which throw in the OCaml stdlib. *) Ltac2 hd_opt (ls : 'a list) := match ls with | [] => None | x :: xs => Some x end. Ltac2 hd (ls : 'a list) := match ls with | [] => Control.throw_invalid_argument "List.hd" | x :: xs => x end. Ltac2 tl (ls : 'a list) := match ls with | [] => [] | x :: xs => xs end. Ltac2 rec last_opt (ls : 'a list) := match ls with | [] => None | x :: xs => match xs with | [] => Some x | _ :: _ => last_opt xs end end. Ltac2 last (ls : 'a list) := match last_opt ls with | None => Control.throw_invalid_argument "List.last" | Some v => v end. Ltac2 rec removelast (ls : 'a list) := match ls with | [] => [] | x :: xs => match xs with | [] => [] | _ :: _ => x :: removelast xs end end. Ltac2 rec nth_opt_aux (ls : 'a list) (n : int) := match ls with | [] => None | x :: xs => match Int.equal n 0 with | true => Some x | false => nth_opt_aux xs (Int.sub n 1) end end. Ltac2 nth_opt (ls : 'a list) (n : int) := Control.assert_valid_argument "List.nth" (Int.ge n 0); nth_opt_aux ls n. Ltac2 nth (ls : 'a list) (n : int) := match nth_opt ls n with | Some v => v | None => Control.throw_out_of_bounds "List.nth" end. Ltac2 rec rev_append (l1 : 'a list) (l2 : 'a list) := match l1 with | [] => l2 | a :: l => rev_append l (a :: l2) end. Ltac2 rev l := rev_append l []. Ltac2 rec append ls1 ls2 := match ls1 with | [] => ls2 | x :: xs => x :: append xs ls2 end. Ltac2 rec concat (ls : 'a list list) := match ls with | [] => [] | x :: xs => append x (concat xs) end. Ltac2 flatten (ls : 'a list list) := concat ls. Ltac2 rec iter (f : 'a -> unit) (ls : 'a list) := match ls with | [] => () | l :: ls => f l; iter f ls end. Ltac2 rec iteri_aux (i : int) (f : int -> 'a -> unit) (ls : 'a list) := match ls with | [] => () | l :: ls => f i l; iteri_aux (Int.add i 1) f ls end. Ltac2 iteri (f : int -> 'a -> unit) (ls : 'a list) := iteri_aux 0 f ls. Ltac2 rec map (f : 'a -> 'b) (ls : 'a list) := match ls with | [] => [] | l :: ls => f l :: map f ls end. Ltac2 rec mapi_aux (i : int) (f : int -> 'a -> 'b) (ls : 'a list) := match ls with | [] => [] | l :: ls => f i l :: mapi_aux (Int.add i 1) f ls end. Ltac2 mapi (f : int -> 'a -> 'b) (ls : 'a list) := mapi_aux 0 f ls. Ltac2 rec flat_map (f : 'a -> 'b list) (xs : 'a list) := match xs with | [] => [] | x :: xs => append (f x) (flat_map f xs) end. (* from the OCaml std lib *) Ltac2 rev_map (f : 'a -> 'b) (ls : 'a list) := let rec rmap_f accu ls := match ls with | [] => accu | a::l => rmap_f (f a :: accu) l end in rmap_f [] ls. Ltac2 rec fold_right (f : 'a -> 'b -> 'b) (a : 'b) (ls : 'a list) := match ls with | [] => a | l :: ls => f l (fold_right f a ls) end. Ltac2 rec fold_left (f : 'a -> 'b -> 'a) (xs : 'b list) (a : 'a) := match xs with | [] => a | x :: xs => fold_left f xs (f a x) end. Ltac2 rec iter2 (f : 'a -> 'b -> unit) (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => match ls2 with | [] => () | _ :: _ => Control.throw_invalid_argument "List.iter2" end | l1 :: ls1 => match ls2 with | [] => Control.throw_invalid_argument "List.iter2" | l2 :: ls2 => f l1 l2; iter2 f ls1 ls2 end end. Ltac2 rec map2 (f : 'a -> 'b -> 'c) (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => match ls2 with | [] => [] | _ :: _ => Control.throw_invalid_argument "List.map2" end | l1 :: ls1 => match ls2 with | [] => Control.throw_invalid_argument "List.map2" | l2 :: ls2 => f l1 l2 :: map2 f ls1 ls2 end end. (* from the OCaml std lib *) Ltac2 rev_map2 (f : 'a -> 'b -> 'c) (ls1 : 'a list) (ls2 : 'b list) := let rec rmap2_f accu ls1 ls2 := match ls1 with | [] => match ls2 with | [] => accu | _ :: _ => Control.throw_invalid_argument "List.rev_map2" end | l1 :: ls1 => match ls2 with | [] => Control.throw_invalid_argument "List.rev_map2" | l2 :: ls2 => rmap2_f (f l1 l2 :: accu) ls1 ls2 end end in rmap2_f [] ls1 ls2. Ltac2 rec fold_right2 (f : 'a -> 'b -> 'c -> 'c) (a : 'c) (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => match ls2 with | [] => a | _ :: _ => Control.throw_invalid_argument "List.fold_right2" end | l1 :: ls1 => match ls2 with | [] => Control.throw_invalid_argument "List.fold_right2" | l2 :: ls2 => f l1 l2 (fold_right2 f a ls1 ls2) end end. Ltac2 rec fold_left2 (f : 'a -> 'b -> 'c -> 'a) (ls1 : 'b list) (ls2 : 'c list) (a : 'a) := match ls1 with | [] => match ls2 with | [] => a | _ :: _ => Control.throw_invalid_argument "List.fold_left2" end | l1 :: ls1 => match ls2 with | [] => Control.throw_invalid_argument "List.fold_left2" | l2 :: ls2 => fold_left2 f ls1 ls2 (f a l1 l2) end end. Ltac2 rec for_all f ls := match ls with | [] => true | x :: xs => match f x with | true => for_all f xs | false => false end end. (* we would call this [exists] a la OCaml's [List.exists], but that's a syntax error, so instead we name it exist *) Ltac2 rec exist f ls := match ls with | [] => false | x :: xs => match f x with | true => true | false => exist f xs end end. Ltac2 rec for_all2 f xs ys := match xs with | [] => match ys with | [] => true | y :: ys' => Control.throw_invalid_argument "List.for_all2" end | x :: xs' => match ys with | [] => Control.throw_invalid_argument "List.for_all2" | y :: ys' => match f x y with | true => for_all2 f xs' ys' | false => false end end end. Ltac2 rec exist2 f xs ys := match xs with | [] => match ys with | [] => false | y :: ys' => Control.throw_invalid_argument "List.exist2" end | x :: xs' => match ys with | [] => Control.throw_invalid_argument "List.exist2" | y :: ys' => match f x y with | true => true | false => exist2 f xs' ys' end end end. Ltac2 rec find_opt f xs := match xs with | [] => None | x :: xs => match f x with | true => Some x | false => find_opt f xs end end. Ltac2 find f xs := match find_opt f xs with | Some v => v | None => Control.throw Not_found end. Ltac2 rec find_rev_opt f xs := match xs with | [] => None | x :: xs => match find_rev_opt f xs with | Some v => Some v | None => match f x with | true => Some x | false => None end end end. Ltac2 find_rev f xs := match find_rev_opt f xs with | Some v => v | None => Control.throw Not_found end. Ltac2 mem (eq : 'a -> 'a -> bool) (a : 'a) (ls : 'a list) := exist (eq a) ls. Ltac2 rec filter f xs := match xs with | [] => [] | x :: xs => match f x with | true => x :: filter f xs | false => filter f xs end end. Ltac2 rec filter_out f xs := filter (fun x => Bool.neg (f x)) xs. Ltac2 find_all (f : 'a -> bool) (ls : 'a list) := filter f ls. Ltac2 remove (eqb : 'a -> 'a -> bool) (x : 'a) (ls : 'a list) := filter_out (eqb x) ls. Ltac2 count_occ (eqb : 'a -> 'a -> bool) (x : 'a) (ls : 'a list) := length (filter (eqb x) ls). (* from the Coq stdlib *) Ltac2 rec list_power (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => [] :: [] | x :: t => flat_map (fun f => map (fun y => (x, y) :: f) ls2) (list_power t ls2) end. Ltac2 rec partition (f : 'a -> bool) (l : 'a list) := match l with | [] => ([], []) | x :: tl => let (g, d) := partition f tl in match f x with | true => ((x::g), d) | false => (g, (x::d)) end end. (* from the Coq stdlib *) (** [list_prod] has the same signature as [combine], but unlike [combine], it adds every possible pairs, not only those at the same position. *) Ltac2 rec list_prod (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => [] | x :: t => append (map (fun y => (x, y)) ls2) (list_prod t ls2) end. Ltac2 rec firstn (n : int) (ls : 'a list) := Control.assert_valid_argument "List.firstn" (Int.ge n 0); match Int.equal n 0 with | true => [] | false => match ls with | [] => Control.throw_out_of_bounds "List.firstn" | x :: xs => x :: firstn (Int.sub n 1) xs end end. Ltac2 rec skipn (n : int) (ls : 'a list) := Control.assert_valid_argument "List.skipn" (Int.ge n 0); match Int.equal n 0 with | true => ls | false => match ls with | [] => Control.throw_out_of_bounds "List.skipn" | x :: xs => skipn (Int.sub n 1) xs end end. Ltac2 lastn (n : int) (ls : 'a list) := let l := length ls in Control.assert_valid_argument "List.lastn" (Int.ge n 0); Control.assert_bounds "List.lastn" (Int.le n l); skipn (Int.sub l n). Ltac2 rec nodup (eqb : 'a -> 'a -> bool) (ls : 'a list) := match ls with | [] => [] | x :: xs => match mem eqb x xs with | true => nodup eqb xs | false => x :: nodup eqb xs end end. (* seq start 1 last = start :: start + 1 :: ... :: (last - 1) *) Ltac2 rec seq (start : int) (step : int) (last : int) := match Int.lt (Int.sub last start) step with | true => [] | false => start :: seq (Int.add start step) step last end. Ltac2 init (len : int) (f : int -> 'a) := Control.assert_valid_argument "List.init" (Int.ge len 0); map f (seq 0 1 len). Ltac2 repeat (x : 'a) (n : 'int) := init n (fun _ => x). Ltac2 assoc (eqk : 'k -> 'k -> bool) (k : 'k) (l : ('k * 'v) list) := let eq_key kv := let (k', _) := kv in eqk k k' in let (_, v) := find eq_key l in v. Ltac2 assoc_opt (eqk : 'k -> 'k -> bool) (k : 'k) (l : ('k * 'v) list) := let eq_key kv := let (k', _) := kv in eqk k k' in match find_opt eq_key l with | Some kv => let (_, v) := kv in Some v | None => None end. Ltac2 mem_assoc (eqk : 'k -> 'k -> bool) (k : 'k) (l : ('k * 'v) list) := let eq_key kv := let (k', _) := kv in eqk k k' in exist eq_key l. Ltac2 remove_assoc (eqk : 'k -> 'k -> bool) (k : 'k) (l : ('k * 'v) list) := let eq_key kv := let (k', _) := kv in eqk k k' in filter_out eq_key l. Ltac2 rec split (ls : ('a * 'b) list) := match ls with | [] => ([], []) | xy :: tl => let (x, y) := xy in let (left, right) := split tl in ((x::left), (y::right)) end. Ltac2 rec combine (ls1 : 'a list) (ls2 : 'b list) := match ls1 with | [] => match ls2 with | [] => [] | _ :: _ => Control.throw_invalid_argument "List.combine" end | x :: xs => match ls2 with | y :: ys => (x, y) :: combine xs ys | [] => Control.throw_invalid_argument "List.combine" end end. Ltac2 enumerate (ls : 'a list) := combine (seq 0 1 (length ls)) ls. (* from Coq stdlib *) Ltac2 rec merge (cmp : 'a -> 'a -> int) (l1 : 'a list) (l2 : 'b list) := let rec merge_aux l2 := match l1 with | [] => l2 | a1 :: l1' => match l2 with | [] => l1 | a2 :: l2' => match Int.le (cmp a1 a2) 0 with | true => a1 :: merge cmp l1' l2 | false => a2 :: merge_aux l2' end end end in merge_aux l2. Ltac2 rec merge_list_to_stack cmp stack l := match stack with | [] => [Some l] | l' :: stack' => match l' with | None => Some l :: stack' | Some l' => None :: merge_list_to_stack cmp stack' (merge cmp l' l) end end. Ltac2 rec merge_stack cmp stack := match stack with | [] => [] | l :: stack' => match l with | None => merge_stack cmp stack' | Some l => merge cmp l (merge_stack cmp stack') end end. Ltac2 rec iter_merge cmp stack l := match l with | [] => merge_stack cmp stack | a::l' => iter_merge cmp (merge_list_to_stack cmp stack [a]) l' end. Ltac2 sort cmp l := iter_merge cmp [] l. (* TODO: maybe replace this with a faster implementation *) Ltac2 sort_uniq (cmp : 'a -> 'a -> int) (l : 'a list) := let rec uniq l := match l with | [] => [] | x1 :: xs => match xs with | [] => x1 :: xs | x2 :: _ => match Int.equal (cmp x1 x2) 0 with | true => uniq xs | false => x1 :: uniq xs end end end in uniq (sort cmp l). coq-8.15.0/user-contrib/Ltac2/Ltac1.v000066400000000000000000000044421417001151100170760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t := "ltac2" "ltac1_ref". (** Returns the Ltac1 definition with the given absolute name. *) Ltac2 @ external run : t -> unit := "ltac2" "ltac1_run". (** Runs an Ltac1 value, assuming it is a 'tactic', i.e. not returning anything. *) Ltac2 @ external lambda : (t -> t) -> t := "ltac2" "ltac1_lambda". (** Embed an Ltac2 function into Ltac1 values. Contrarily to the ltac1:(...) quotation, this function allows both to capture an Ltac2 context inside the closure and to return an Ltac1 value. Returning values in Ltac1 is a intrepid endeavour prone to weird runtime semantics. *) Ltac2 @ external apply : t -> t list -> (t -> unit) -> unit := "ltac2" "ltac1_apply". (** Applies an Ltac1 value to a list of arguments, and provides the result in CPS style. It does **not** run the returned value. *) (** Conversion functions *) Ltac2 @ external of_constr : constr -> t := "ltac2" "ltac1_of_constr". Ltac2 @ external to_constr : t -> constr option := "ltac2" "ltac1_to_constr". Ltac2 @ external of_ident : ident -> t := "ltac2" "ltac1_of_ident". Ltac2 @ external to_ident : t -> ident option := "ltac2" "ltac1_to_ident". Ltac2 @ external of_list : t list -> t := "ltac2" "ltac1_of_list". Ltac2 @ external to_list : t -> t list option := "ltac2" "ltac1_to_list". coq-8.15.0/user-contrib/Ltac2/Ltac2.v000066400000000000000000000021321417001151100170710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit := "ltac2" "print". Ltac2 @ external of_string : string -> message := "ltac2" "message_of_string". Ltac2 @ external of_int : int -> message := "ltac2" "message_of_int". Ltac2 @ external of_ident : ident -> message := "ltac2" "message_of_ident". Ltac2 @ external of_constr : constr -> message := "ltac2" "message_of_constr". (** Panics if there is more than one goal under focus. *) Ltac2 @ external of_exn : exn -> message := "ltac2" "message_of_exn". (** Panics if there is more than one goal under focus. *) Ltac2 @ external concat : message -> message -> message := "ltac2" "message_concat". Module Format. (** Only for internal use. *) Ltac2 @ external stop : unit -> ('a, 'b, 'c, 'a) format := "ltac2" "format_stop". Ltac2 @ external string : ('a, 'b, 'c, 'd) format -> (string -> 'a, 'b, 'c, 'd) format := "ltac2" "format_string". Ltac2 @ external int : ('a, 'b, 'c, 'd) format -> (int -> 'a, 'b, 'c, 'd) format := "ltac2" "format_int". Ltac2 @ external constr : ('a, 'b, 'c, 'd) format -> (constr -> 'a, 'b, 'c, 'd) format := "ltac2" "format_constr". Ltac2 @ external ident : ('a, 'b, 'c, 'd) format -> (ident -> 'a, 'b, 'c, 'd) format := "ltac2" "format_ident". Ltac2 @ external literal : string -> ('a, 'b, 'c, 'd) format -> ('a, 'b, 'c, 'd) format := "ltac2" "format_literal". Ltac2 @ external alpha : ('a, 'b, 'c, 'd) format -> (('b -> 'r -> 'c) -> 'r -> 'a, 'b, 'c, 'd) format := "ltac2" "format_alpha". Ltac2 @ external kfprintf : (message -> 'r) -> ('a, unit, message, 'r) format -> 'a := "ltac2" "format_kfprintf". End Format. coq-8.15.0/user-contrib/Ltac2/Notations.v000066400000000000000000000422511417001151100201100ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* f e | Val ans => let (x, k) := ans in Control.plus (fun _ => x) k end. Ltac2 ifcatch t s f := match Control.case t with | Err e => f e | Val ans => let (x, k) := ans in Control.plus (fun _ => s x) (fun e => s (k e)) end. Ltac2 fail0 (_ : unit) := Control.enter (fun _ => Control.zero (Tactic_failure None)). Ltac2 Notation fail := fail0 (). Ltac2 try0 t := Control.enter (fun _ => orelse t (fun _ => ())). Ltac2 Notation try := try0. Ltac2 rec repeat0 (t : unit -> unit) := Control.enter (fun () => ifcatch (fun _ => Control.progress t) (fun _ => Control.check_interrupt (); repeat0 t) (fun _ => ())). Ltac2 Notation repeat := repeat0. Ltac2 dispatch0 t (head, tail) := match tail with | None => Control.enter (fun _ => t (); Control.dispatch head) | Some tacs => let (def, rem) := tacs in Control.enter (fun _ => t (); Control.extend head def rem) end. Ltac2 Notation t(thunk(self)) ">" "[" l(dispatch) "]" : 4 := dispatch0 t l. Ltac2 do0 n t := let rec aux n t := match Int.equal n 0 with | true => () | false => t (); aux (Int.sub n 1) t end in aux (n ()) t. Ltac2 Notation do := do0. Ltac2 Notation once := Control.once. Ltac2 progress0 tac := Control.enter (fun _ => Control.progress tac). Ltac2 Notation progress := progress0. Ltac2 rec first0 tacs := match tacs with | [] => Control.zero (Tactic_failure None) | tac :: tacs => Control.enter (fun _ => orelse tac (fun _ => first0 tacs)) end. Ltac2 Notation "first" "[" tacs(list0(thunk(tactic(6)), "|")) "]" := first0 tacs. Ltac2 complete tac := let ans := tac () in Control.enter (fun () => Control.zero (Tactic_failure None)); ans. Ltac2 rec solve0 tacs := match tacs with | [] => Control.zero (Tactic_failure None) | tac :: tacs => Control.enter (fun _ => orelse (fun _ => complete tac) (fun _ => solve0 tacs)) end. Ltac2 Notation "solve" "[" tacs(list0(thunk(tactic(6)), "|")) "]" := solve0 tacs. Ltac2 time0 tac := Control.time None tac. Ltac2 Notation time := time0. Ltac2 abstract0 tac := Control.abstract None tac. Ltac2 Notation abstract := abstract0. (** Base tactics *) (** Note that we redeclare notations that can be parsed as mere identifiers as abbreviations, so that it allows to parse them as function arguments without having to write them within parentheses. *) (** Enter and check evar resolution *) Ltac2 enter_h ev f arg := match ev with | true => Control.enter (fun () => f ev (arg ())) | false => Control.enter (fun () => Control.with_holes arg (fun x => f ev x)) end. Ltac2 intros0 ev p := Control.enter (fun () => Std.intros ev p). Ltac2 Notation "intros" p(intropatterns) := intros0 false p. Ltac2 Notation intros := intros. Ltac2 Notation "eintros" p(intropatterns) := intros0 true p. Ltac2 Notation eintros := eintros. Ltac2 split0 ev bnd := enter_h ev Std.split bnd. Ltac2 Notation "split" bnd(thunk(with_bindings)) := split0 false bnd. Ltac2 Notation split := split. Ltac2 Notation "esplit" bnd(thunk(with_bindings)) := split0 true bnd. Ltac2 Notation esplit := esplit. Ltac2 exists0 ev bnds := match bnds with | [] => split0 ev (fun () => Std.NoBindings) | _ => let rec aux bnds := match bnds with | [] => () | bnd :: bnds => split0 ev bnd; aux bnds end in aux bnds end. Ltac2 Notation "exists" bnd(list0(thunk(bindings), ",")) := exists0 false bnd. (* Ltac2 Notation exists := exists. *) Ltac2 Notation "eexists" bnd(list0(thunk(bindings), ",")) := exists0 true bnd. Ltac2 Notation eexists := eexists. Ltac2 left0 ev bnd := enter_h ev Std.left bnd. Ltac2 Notation "left" bnd(thunk(with_bindings)) := left0 false bnd. Ltac2 Notation left := left. Ltac2 Notation "eleft" bnd(thunk(with_bindings)) := left0 true bnd. Ltac2 Notation eleft := eleft. Ltac2 right0 ev bnd := enter_h ev Std.right bnd. Ltac2 Notation "right" bnd(thunk(with_bindings)) := right0 false bnd. Ltac2 Notation right := right. Ltac2 Notation "eright" bnd(thunk(with_bindings)) := right0 true bnd. Ltac2 Notation eright := eright. Ltac2 constructor0 ev n bnd := enter_h ev (fun ev bnd => Std.constructor_n ev n bnd) bnd. Ltac2 Notation "constructor" := Control.enter (fun () => Std.constructor false). Ltac2 Notation constructor := constructor. Ltac2 Notation "constructor" n(tactic) bnd(thunk(with_bindings)) := constructor0 false n bnd. Ltac2 Notation "econstructor" := Control.enter (fun () => Std.constructor true). Ltac2 Notation econstructor := econstructor. Ltac2 Notation "econstructor" n(tactic) bnd(thunk(with_bindings)) := constructor0 true n bnd. Ltac2 specialize0 c pat := enter_h false (fun _ c => Std.specialize c pat) c. Ltac2 Notation "specialize" c(thunk(seq(constr, with_bindings))) ipat(opt(seq("as", intropattern))) := specialize0 c ipat. Ltac2 elim0 ev c bnd use := let f ev (c, bnd, use) := Std.elim ev (c, bnd) use in enter_h ev f (fun () => c (), bnd (), use ()). Ltac2 Notation "elim" c(thunk(constr)) bnd(thunk(with_bindings)) use(thunk(opt(seq("using", constr, with_bindings)))) := elim0 false c bnd use. Ltac2 Notation "eelim" c(thunk(constr)) bnd(thunk(with_bindings)) use(thunk(opt(seq("using", constr, with_bindings)))) := elim0 true c bnd use. Ltac2 apply0 adv ev cb cl := Std.apply adv ev cb cl. Ltac2 Notation "eapply" cb(list1(thunk(seq(constr, with_bindings)), ",")) cl(opt(seq("in", ident, opt(seq("as", intropattern))))) := apply0 true true cb cl. Ltac2 Notation "apply" cb(list1(thunk(seq(constr, with_bindings)), ",")) cl(opt(seq("in", ident, opt(seq("as", intropattern))))) := apply0 true false cb cl. Ltac2 default_on_concl cl := match cl with | None => { Std.on_hyps := Some []; Std.on_concl := Std.AllOccurrences } | Some cl => cl end. Ltac2 pose0 ev p := enter_h ev (fun ev (na, p) => Std.pose na p) p. Ltac2 Notation "pose" p(thunk(pose)) := pose0 false p. Ltac2 Notation "epose" p(thunk(pose)) := pose0 true p. Ltac2 Notation "set" p(thunk(pose)) cl(opt(clause)) := Std.set false p (default_on_concl cl). Ltac2 Notation "eset" p(thunk(pose)) cl(opt(clause)) := Std.set true p (default_on_concl cl). Ltac2 assert0 ev ast := enter_h ev (fun _ ast => Std.assert ast) ast. Ltac2 Notation "assert" ast(thunk(assert)) := assert0 false ast. Ltac2 Notation "eassert" ast(thunk(assert)) := assert0 true ast. Ltac2 enough_from_assertion(a : Std.assertion) := match a with | Std.AssertType ip_opt term tac_opt => Std.enough term (Some tac_opt) ip_opt | Std.AssertValue ident constr => Std.pose (Some ident) constr end. Ltac2 enough0 ev ast := enter_h ev (fun _ ast => enough_from_assertion ast) ast. Ltac2 Notation "enough" ast(thunk(assert)) := enough0 false ast. Ltac2 Notation "eenough" ast(thunk(assert)) := enough0 true ast. Ltac2 default_everywhere cl := match cl with | None => { Std.on_hyps := None; Std.on_concl := Std.AllOccurrences } | Some cl => cl end. Ltac2 Notation "remember" c(thunk(open_constr)) na(opt(seq("as", ident))) pat(opt(seq("eqn", ":", intropattern))) cl(opt(clause)) := Std.remember false na c pat (default_everywhere cl). Ltac2 Notation "eremember" c(thunk(open_constr)) na(opt(seq("as", ident))) pat(opt(seq("eqn", ":", intropattern))) cl(opt(clause)) := Std.remember true na c pat (default_everywhere cl). Ltac2 induction0 ev ic use := let f ev use := Std.induction ev ic use in enter_h ev f use. Ltac2 Notation "induction" ic(list1(induction_clause, ",")) use(thunk(opt(seq("using", constr, with_bindings)))) := induction0 false ic use. Ltac2 Notation "einduction" ic(list1(induction_clause, ",")) use(thunk(opt(seq("using", constr, with_bindings)))) := induction0 true ic use. Ltac2 generalize0 gen := enter_h false (fun _ gen => Std.generalize gen) gen. Ltac2 Notation "generalize" gen(thunk(list1(seq (open_constr, occurrences, opt(seq("as", ident))), ","))) := generalize0 gen. Ltac2 destruct0 ev ic use := let f ev use := Std.destruct ev ic use in enter_h ev f use. Ltac2 Notation "destruct" ic(list1(induction_clause, ",")) use(thunk(opt(seq("using", constr, with_bindings)))) := destruct0 false ic use. Ltac2 Notation "edestruct" ic(list1(induction_clause, ",")) use(thunk(opt(seq("using", constr, with_bindings)))) := destruct0 true ic use. Ltac2 Notation "simple" "inversion" arg(destruction_arg) pat(opt(seq("as", intropattern))) ids(opt(seq("in", list1(ident)))) := Std.inversion Std.SimpleInversion arg pat ids. Ltac2 Notation "inversion" arg(destruction_arg) pat(opt(seq("as", intropattern))) ids(opt(seq("in", list1(ident)))) := Std.inversion Std.FullInversion arg pat ids. Ltac2 Notation "inversion_clear" arg(destruction_arg) pat(opt(seq("as", intropattern))) ids(opt(seq("in", list1(ident)))) := Std.inversion Std.FullInversionClear arg pat ids. Ltac2 Notation "red" cl(opt(clause)) := Std.red (default_on_concl cl). Ltac2 Notation red := red. Ltac2 Notation "hnf" cl(opt(clause)) := Std.hnf (default_on_concl cl). Ltac2 Notation hnf := hnf. Ltac2 Notation "simpl" s(strategy) pl(opt(seq(pattern, occurrences))) cl(opt(clause)) := Std.simpl s pl (default_on_concl cl). Ltac2 Notation simpl := simpl. Ltac2 Notation "cbv" s(strategy) cl(opt(clause)) := Std.cbv s (default_on_concl cl). Ltac2 Notation cbv := cbv. Ltac2 Notation "cbn" s(strategy) cl(opt(clause)) := Std.cbn s (default_on_concl cl). Ltac2 Notation cbn := cbn. Ltac2 Notation "lazy" s(strategy) cl(opt(clause)) := Std.lazy s (default_on_concl cl). Ltac2 Notation lazy := lazy. Ltac2 Notation "unfold" pl(list1(seq(reference, occurrences), ",")) cl(opt(clause)) := Std.unfold pl (default_on_concl cl). Ltac2 fold0 pl cl := let cl := default_on_concl cl in Control.enter (fun () => Control.with_holes pl (fun pl => Std.fold pl cl)). Ltac2 Notation "fold" pl(thunk(list1(open_constr))) cl(opt(clause)) := fold0 pl cl. Ltac2 Notation "pattern" pl(list1(seq(constr, occurrences), ",")) cl(opt(clause)) := Std.pattern pl (default_on_concl cl). Ltac2 Notation "vm_compute" pl(opt(seq(pattern, occurrences))) cl(opt(clause)) := Std.vm pl (default_on_concl cl). Ltac2 Notation vm_compute := vm_compute. Ltac2 Notation "native_compute" pl(opt(seq(pattern, occurrences))) cl(opt(clause)) := Std.native pl (default_on_concl cl). Ltac2 Notation native_compute := native_compute. Ltac2 Notation "eval" "red" "in" c(constr) := Std.eval_red c. Ltac2 Notation "eval" "hnf" "in" c(constr) := Std.eval_hnf c. Ltac2 Notation "eval" "simpl" s(strategy) pl(opt(seq(pattern, occurrences))) "in" c(constr) := Std.eval_simpl s pl c. Ltac2 Notation "eval" "cbv" s(strategy) "in" c(constr) := Std.eval_cbv s c. Ltac2 Notation "eval" "cbn" s(strategy) "in" c(constr) := Std.eval_cbn s c. Ltac2 Notation "eval" "lazy" s(strategy) "in" c(constr) := Std.eval_lazy s c. Ltac2 Notation "eval" "unfold" pl(list1(seq(reference, occurrences), ",")) "in" c(constr) := Std.eval_unfold pl c. Ltac2 Notation "eval" "fold" pl(thunk(list1(open_constr))) "in" c(constr) := Std.eval_fold (pl ()) c. Ltac2 Notation "eval" "pattern" pl(list1(seq(constr, occurrences), ",")) "in" c(constr) := Std.eval_pattern pl c. Ltac2 Notation "eval" "vm_compute" pl(opt(seq(pattern, occurrences))) "in" c(constr) := Std.eval_vm pl c. Ltac2 Notation "eval" "native_compute" pl(opt(seq(pattern, occurrences))) "in" c(constr) := Std.eval_native pl c. Ltac2 change0 p cl := let (pat, c) := p in Std.change pat c (default_on_concl cl). Ltac2 Notation "change" c(conversion) cl(opt(clause)) := change0 c cl. Ltac2 rewrite0 ev rw cl tac := let cl := default_on_concl cl in Std.rewrite ev rw cl tac. Ltac2 Notation "rewrite" rw(list1(rewriting, ",")) cl(opt(clause)) tac(opt(seq("by", thunk(tactic)))) := rewrite0 false rw cl tac. Ltac2 Notation "erewrite" rw(list1(rewriting, ",")) cl(opt(clause)) tac(opt(seq("by", thunk(tactic)))) := rewrite0 true rw cl tac. (** coretactics *) Ltac2 exact0 ev c := Control.enter (fun _ => match ev with | true => let c := c () in Control.refine (fun _ => c) | false => Control.with_holes c (fun c => Control.refine (fun _ => c)) end ). Ltac2 Notation "exact" c(thunk(open_constr)) := exact0 false c. Ltac2 Notation "eexact" c(thunk(open_constr)) := exact0 true c. Ltac2 Notation "intro" id(opt(ident)) mv(opt(move_location)) := Std.intro id mv. Ltac2 Notation intro := intro. Ltac2 Notation "move" id(ident) mv(move_location) := Std.move id mv. Ltac2 Notation reflexivity := Std.reflexivity (). Ltac2 symmetry0 cl := Std.symmetry (default_on_concl cl). Ltac2 Notation "symmetry" cl(opt(clause)) := symmetry0 cl. Ltac2 Notation symmetry := symmetry. Ltac2 Notation "revert" ids(list1(ident)) := Std.revert ids. Ltac2 Notation assumption := Std.assumption (). Ltac2 Notation etransitivity := Std.etransitivity (). Ltac2 Notation admit := Std.admit (). Ltac2 clear0 ids := match ids with | [] => Std.keep [] | _ => Std.clear ids end. Ltac2 Notation "clear" ids(list0(ident)) := clear0 ids. Ltac2 Notation "clear" "-" ids(list1(ident)) := Std.keep ids. Ltac2 Notation clear := clear. Ltac2 Notation refine := Control.refine. (** extratactics *) Ltac2 absurd0 c := Control.enter (fun _ => Std.absurd (c ())). Ltac2 Notation "absurd" c(thunk(open_constr)) := absurd0 c. Ltac2 subst0 ids := match ids with | [] => Std.subst_all () | _ => Std.subst ids end. Ltac2 Notation "subst" ids(list0(ident)) := subst0 ids. Ltac2 Notation subst := subst. Ltac2 Notation "discriminate" arg(opt(destruction_arg)) := Std.discriminate false arg. Ltac2 Notation discriminate := discriminate. Ltac2 Notation "ediscriminate" arg(opt(destruction_arg)) := Std.discriminate true arg. Ltac2 Notation ediscriminate := ediscriminate. Ltac2 Notation "injection" arg(opt(destruction_arg)) ipat(opt(seq("as", intropatterns))):= Std.injection false ipat arg. Ltac2 Notation "einjection" arg(opt(destruction_arg)) ipat(opt(seq("as", intropatterns))):= Std.injection true ipat arg. (** Auto *) Ltac2 default_db dbs := match dbs with | None => Some [] | Some dbs => match dbs with | None => None | Some l => Some l end end. Ltac2 default_list use := match use with | None => [] | Some use => use end. Ltac2 trivial0 use dbs := let dbs := default_db dbs in let use := default_list use in Std.trivial Std.Off use dbs. Ltac2 Notation "trivial" use(opt(seq("using", list1(thunk(constr), ",")))) dbs(opt(seq("with", hintdb))) := trivial0 use dbs. Ltac2 Notation trivial := trivial. Ltac2 auto0 n use dbs := let dbs := default_db dbs in let use := default_list use in Std.auto Std.Off n use dbs. Ltac2 Notation "auto" n(opt(tactic(0))) use(opt(seq("using", list1(thunk(constr), ",")))) dbs(opt(seq("with", hintdb))) := auto0 n use dbs. Ltac2 Notation auto := auto. Ltac2 eauto0 n p use dbs := let dbs := default_db dbs in let use := default_list use in Std.eauto Std.Off n p use dbs. Ltac2 Notation "eauto" n(opt(tactic(0))) p(opt(tactic(0))) use(opt(seq("using", list1(thunk(constr), ",")))) dbs(opt(seq("with", hintdb))) := eauto0 n p use dbs. Ltac2 Notation eauto := eauto. Ltac2 Notation "typeclasses_eauto" n(opt(tactic(0))) dbs(opt(seq("with", list1(ident)))) := Std.typeclasses_eauto None n dbs. Ltac2 Notation "typeclasses_eauto" "bfs" n(opt(tactic(0))) dbs(opt(seq("with", list1(ident)))) := Std.typeclasses_eauto (Some Std.BFS) n dbs. Ltac2 Notation typeclasses_eauto := typeclasses_eauto. Ltac2 Notation "unify" x(constr) y(constr) := Std.unify x y. (** Congruence *) Ltac2 f_equal0 () := ltac1:(f_equal). Ltac2 Notation f_equal := f_equal0 (). (** now *) Ltac2 now0 t := t (); ltac1:(easy). Ltac2 Notation "now" t(thunk(self)) : 6 := now0 t. coq-8.15.0/user-contrib/Ltac2/Option.v000066400000000000000000000031531417001151100174000ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit) (ov : 'a option) := match ov with | Some v => f v | None => () end. Ltac2 map (f : 'a -> 'b) (ov : 'a option) := match ov with | Some v => Some (f v) | None => None end. Ltac2 default (def : 'a) (ov : 'a option) := match ov with | Some v => v | None => def end. Ltac2 map_default (f : 'a -> 'b) (def : 'b) (ov : 'a option) := match ov with | Some v => f v | None => def end. Ltac2 get (ov : 'a option) := match ov with | Some v => v | None => Control.throw No_value end. Ltac2 get_bt (ov : 'a option) := match ov with | Some v => v | None => Control.zero No_value end. Ltac2 bind (x : 'a option) (f : 'a -> 'b option) := match x with | Some x => f x | None => None end. Ltac2 ret (x : 'a) := Some x. Ltac2 lift (f : 'a -> 'b) (x : 'a option) := map f x. coq-8.15.0/user-contrib/Ltac2/Pattern.v000066400000000000000000000126251417001151100175510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* context := "ltac2" "pattern_empty_context". (** A trivial context only made of the hole. *) Ltac2 @ external matches : t -> constr -> (ident * constr) list := "ltac2" "pattern_matches". (** If the term matches the pattern, returns the bound variables. If it doesn't, fail with [Match_failure]. Panics if not focused. *) Ltac2 @ external matches_subterm : t -> constr -> context * ((ident * constr) list) := "ltac2" "pattern_matches_subterm". (** Returns a stream of results corresponding to all of the subterms of the term that matches the pattern as in [matches]. The stream is encoded as a backtracking value whose last exception is [Match_failure]. The additional value compared to [matches] is the context of the match, to be filled with the instantiate function. *) Ltac2 @ external matches_vect : t -> constr -> constr array := "ltac2" "pattern_matches_vect". (** Internal version of [matches] that does not return the identifiers. *) Ltac2 @ external matches_subterm_vect : t -> constr -> context * constr array := "ltac2" "pattern_matches_subterm_vect". (** Internal version of [matches_subterms] that does not return the identifiers. *) Ltac2 @ external matches_goal : bool -> (match_kind * t) list -> (match_kind * t) -> ident array * context array * constr array * context := "ltac2" "pattern_matches_goal". (** Given a list of patterns [hpats] for hypotheses and one pattern [cpat] for the conclusion, [matches_goal rev hpats cpat] produces (a stream of) tuples of: - An array of idents, whose size is the length of [hpats], corresponding to the name of matched hypotheses. - An array of contexts, whose size is the length of [hpats], corresponding to the contexts matched for every hypothesis pattern. In case the match kind of a hypothesis was [MatchPattern], the corresponding context is ensured to be empty. - An array of terms, whose size is the total number of pattern variables without duplicates. Terms are ordered by identifier order, e.g. ?a comes before ?b. - A context corresponding to the conclusion, which is ensured to be empty if the kind of [cpat] was [MatchPattern]. This produces a backtracking stream of results containing all the possible result combinations. The order of considered hypotheses is reversed if [rev] is true. *) Ltac2 @ external instantiate : context -> constr -> constr := "ltac2" "pattern_instantiate". (** Fill the hole of a context with the given term. *) (** Implementation of Ltac matching over terms and goals *) Ltac2 lazy_match0 t pats := let rec interp m := match m with | [] => Control.zero Match_failure | p :: m => let next _ := interp m in let (knd, pat, f) := p in let p := match knd with | MatchPattern => (fun _ => let context := empty_context () in let bind := matches_vect pat t in fun _ => f context bind) | MatchContext => (fun _ => let (context, bind) := matches_subterm_vect pat t in fun _ => f context bind) end in Control.plus p next end in Control.once (fun () => interp pats) (). Ltac2 multi_match0 t pats := let rec interp m := match m with | [] => Control.zero Match_failure | p :: m => let next _ := interp m in let (knd, pat, f) := p in let p := match knd with | MatchPattern => (fun _ => let context := empty_context () in let bind := matches_vect pat t in f context bind) | MatchContext => (fun _ => let (context, bind) := matches_subterm_vect pat t in f context bind) end in Control.plus p next end in interp pats. Ltac2 one_match0 t m := Control.once (fun _ => multi_match0 t m). Ltac2 lazy_goal_match0 rev pats := let rec interp m := match m with | [] => Control.zero Match_failure | p :: m => let next _ := interp m in let (pat, f) := p in let (phyps, pconcl) := pat in let cur _ := let (hids, hctx, subst, cctx) := matches_goal rev phyps pconcl in fun _ => f hids hctx subst cctx in Control.plus cur next end in Control.once (fun () => interp pats) (). Ltac2 multi_goal_match0 rev pats := let rec interp m := match m with | [] => Control.zero Match_failure | p :: m => let next _ := interp m in let (pat, f) := p in let (phyps, pconcl) := pat in let cur _ := let (hids, hctx, subst, cctx) := matches_goal rev phyps pconcl in f hids hctx subst cctx in Control.plus cur next end in interp pats. Ltac2 one_goal_match0 rev pats := Control.once (fun _ => multi_goal_match0 rev pats). coq-8.15.0/user-contrib/Ltac2/Printf.v000066400000000000000000000046331417001151100173760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* > where the type value defines which kind of arguments will be accepted and how they will be printed. They can take the following values. - << i >>: takes an argument of type int and behaves as Message.of_int - << I >>: takes an argument of type ident and behaves as Message.of_ident - << s >>: takes an argument of type string and behaves as Message.of_string - << t >>: takes an argument of type constr and behaves as Message.of_constr - << a >>: takes two arguments << f >> of type << (unit -> 'a -> message) >> and << x >> of type << 'a >> and behaves as << f () x >> - << % >>: outputs << % >> verbatim TODO: add printing modifiers. *) Ltac2 printf fmt := Format.kfprintf print fmt. Ltac2 fprintf fmt := Format.kfprintf (fun x => x) fmt. (** The two following notations are made available when this module is imported. - printf will parse a format and generate a function taking the corresponding arguments ant printing the resulting message as per Message.print. In particular when fully applied it has type unit. - fprintf behaves similarly but return the message as a value instead of printing it. *) Ltac2 Notation "printf" fmt(format) := printf fmt. Ltac2 Notation "fprintf" fmt(format) := fprintf fmt. coq-8.15.0/user-contrib/Ltac2/Std.v000066400000000000000000000242611417001151100166650ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* constr), intro_pattern) | IntroRewrite (bool) ] with or_and_intro_pattern := [ | IntroOrPattern (intro_pattern list list) | IntroAndPattern (intro_pattern list) ]. Ltac2 Type destruction_arg := [ | ElimOnConstr (unit -> constr_with_bindings) | ElimOnIdent (ident) | ElimOnAnonHyp (int) ]. Ltac2 Type induction_clause := { indcl_arg : destruction_arg; indcl_eqn : intro_pattern_naming option; indcl_as : or_and_intro_pattern option; indcl_in : clause option; }. Ltac2 Type assertion := [ | AssertType (intro_pattern option, constr, (unit -> unit) option) | AssertValue (ident, constr) ]. Ltac2 Type repeat := [ | Precisely (int) | UpTo (int) | RepeatStar | RepeatPlus ]. Ltac2 Type orientation := [ LTR | RTL ]. Ltac2 Type rewriting := { rew_orient : orientation option; rew_repeat : repeat; rew_equatn : (unit -> constr_with_bindings); }. Ltac2 Type evar_flag := bool. Ltac2 Type advanced_flag := bool. Ltac2 Type move_location := [ | MoveAfter (ident) | MoveBefore (ident) | MoveFirst | MoveLast ]. Ltac2 Type inversion_kind := [ | SimpleInversion | FullInversion | FullInversionClear ]. (** Standard, built-in tactics. See Ltac1 for documentation. *) Ltac2 @ external intros : evar_flag -> intro_pattern list -> unit := "ltac2" "tac_intros". Ltac2 @ external apply : advanced_flag -> evar_flag -> (unit -> constr_with_bindings) list -> (ident * (intro_pattern option)) option -> unit := "ltac2" "tac_apply". Ltac2 @ external elim : evar_flag -> constr_with_bindings -> constr_with_bindings option -> unit := "ltac2" "tac_elim". Ltac2 @ external case : evar_flag -> constr_with_bindings -> unit := "ltac2" "tac_case". Ltac2 @ external generalize : (constr * occurrences * ident option) list -> unit := "ltac2" "tac_generalize". Ltac2 @ external assert : assertion -> unit := "ltac2" "tac_assert". Ltac2 @ external enough : constr -> (unit -> unit) option option -> intro_pattern option -> unit := "ltac2" "tac_enough". Ltac2 @ external pose : ident option -> constr -> unit := "ltac2" "tac_pose". Ltac2 @ external set : evar_flag -> (unit -> ident option * constr) -> clause -> unit := "ltac2" "tac_set". Ltac2 @ external remember : evar_flag -> ident option -> (unit -> constr) -> intro_pattern option -> clause -> unit := "ltac2" "tac_remember". Ltac2 @ external destruct : evar_flag -> induction_clause list -> constr_with_bindings option -> unit := "ltac2" "tac_destruct". Ltac2 @ external induction : evar_flag -> induction_clause list -> constr_with_bindings option -> unit := "ltac2" "tac_induction". Ltac2 @ external red : clause -> unit := "ltac2" "tac_red". Ltac2 @ external hnf : clause -> unit := "ltac2" "tac_hnf". Ltac2 @ external simpl : red_flags -> (pattern * occurrences) option -> clause -> unit := "ltac2" "tac_simpl". Ltac2 @ external cbv : red_flags -> clause -> unit := "ltac2" "tac_cbv". Ltac2 @ external cbn : red_flags -> clause -> unit := "ltac2" "tac_cbn". Ltac2 @ external lazy : red_flags -> clause -> unit := "ltac2" "tac_lazy". Ltac2 @ external unfold : (reference * occurrences) list -> clause -> unit := "ltac2" "tac_unfold". Ltac2 @ external fold : constr list -> clause -> unit := "ltac2" "tac_fold". Ltac2 @ external pattern : (constr * occurrences) list -> clause -> unit := "ltac2" "tac_pattern". Ltac2 @ external vm : (pattern * occurrences) option -> clause -> unit := "ltac2" "tac_vm". Ltac2 @ external native : (pattern * occurrences) option -> clause -> unit := "ltac2" "tac_native". Ltac2 @ external eval_red : constr -> constr := "ltac2" "eval_red". Ltac2 @ external eval_hnf : constr -> constr := "ltac2" "eval_hnf". Ltac2 @ external eval_red : constr -> constr := "ltac2" "eval_red". Ltac2 @ external eval_simpl : red_flags -> (pattern * occurrences) option -> constr -> constr := "ltac2" "eval_simpl". Ltac2 @ external eval_cbv : red_flags -> constr -> constr := "ltac2" "eval_cbv". Ltac2 @ external eval_cbn : red_flags -> constr -> constr := "ltac2" "eval_cbn". Ltac2 @ external eval_lazy : red_flags -> constr -> constr := "ltac2" "eval_lazy". Ltac2 @ external eval_unfold : (reference * occurrences) list -> constr -> constr := "ltac2" "eval_unfold". Ltac2 @ external eval_fold : constr list -> constr -> constr := "ltac2" "eval_fold". Ltac2 @ external eval_pattern : (constr * occurrences) list -> constr -> constr := "ltac2" "eval_pattern". Ltac2 @ external eval_vm : (pattern * occurrences) option -> constr -> constr := "ltac2" "eval_vm". Ltac2 @ external eval_native : (pattern * occurrences) option -> constr -> constr := "ltac2" "eval_native". Ltac2 @ external change : pattern option -> (constr array -> constr) -> clause -> unit := "ltac2" "tac_change". Ltac2 @ external rewrite : evar_flag -> rewriting list -> clause -> (unit -> unit) option -> unit := "ltac2" "tac_rewrite". Ltac2 @ external reflexivity : unit -> unit := "ltac2" "tac_reflexivity". Ltac2 @ external assumption : unit -> unit := "ltac2" "tac_assumption". Ltac2 @ external transitivity : constr -> unit := "ltac2" "tac_transitivity". Ltac2 @ external etransitivity : unit -> unit := "ltac2" "tac_etransitivity". Ltac2 @ external cut : constr -> unit := "ltac2" "tac_cut". Ltac2 @ external left : evar_flag -> bindings -> unit := "ltac2" "tac_left". Ltac2 @ external right : evar_flag -> bindings -> unit := "ltac2" "tac_right". Ltac2 @ external constructor : evar_flag -> unit := "ltac2" "tac_constructor". Ltac2 @ external split : evar_flag -> bindings -> unit := "ltac2" "tac_split". Ltac2 @ external constructor_n : evar_flag -> int -> bindings -> unit := "ltac2" "tac_constructorn". Ltac2 @ external intros_until : hypothesis -> unit := "ltac2" "tac_introsuntil". Ltac2 @ external symmetry : clause -> unit := "ltac2" "tac_symmetry". Ltac2 @ external rename : (ident * ident) list -> unit := "ltac2" "tac_rename". Ltac2 @ external revert : ident list -> unit := "ltac2" "tac_revert". Ltac2 @ external admit : unit -> unit := "ltac2" "tac_admit". Ltac2 @ external fix_ : ident option -> int -> unit := "ltac2" "tac_fix". Ltac2 @ external cofix_ : ident option -> unit := "ltac2" "tac_cofix". Ltac2 @ external clear : ident list -> unit := "ltac2" "tac_clear". Ltac2 @ external keep : ident list -> unit := "ltac2" "tac_keep". Ltac2 @ external clearbody : ident list -> unit := "ltac2" "tac_clearbody". Ltac2 @ external exact_no_check : constr -> unit := "ltac2" "tac_exactnocheck". Ltac2 @ external vm_cast_no_check : constr -> unit := "ltac2" "tac_vmcastnocheck". Ltac2 @ external native_cast_no_check : constr -> unit := "ltac2" "tac_nativecastnocheck". Ltac2 @ external inversion : inversion_kind -> destruction_arg -> intro_pattern option -> ident list option -> unit := "ltac2" "tac_inversion". (** coretactics *) Ltac2 @ external move : ident -> move_location -> unit := "ltac2" "tac_move". Ltac2 @ external intro : ident option -> move_location option -> unit := "ltac2" "tac_intro". Ltac2 @ external specialize : constr_with_bindings -> intro_pattern option -> unit := "ltac2" "tac_specialize". (** extratactics *) Ltac2 @ external discriminate : evar_flag -> destruction_arg option -> unit := "ltac2" "tac_discriminate". Ltac2 @ external injection : evar_flag -> intro_pattern list option -> destruction_arg option -> unit := "ltac2" "tac_injection". Ltac2 @ external absurd : constr -> unit := "ltac2" "tac_absurd". Ltac2 @ external contradiction : constr_with_bindings option -> unit := "ltac2" "tac_contradiction". Ltac2 @ external autorewrite : bool -> (unit -> unit) option -> ident list -> clause -> unit := "ltac2" "tac_autorewrite". Ltac2 @ external subst : ident list -> unit := "ltac2" "tac_subst". Ltac2 @ external subst_all : unit -> unit := "ltac2" "tac_substall". (** auto *) Ltac2 Type debug := [ Off | Info | Debug ]. Ltac2 Type strategy := [ BFS | DFS ]. Ltac2 @ external trivial : debug -> (unit -> constr) list -> ident list option -> unit := "ltac2" "tac_trivial". Ltac2 @ external auto : debug -> int option -> (unit -> constr) list -> ident list option -> unit := "ltac2" "tac_auto". Ltac2 @ external eauto : debug -> int option -> int option -> (unit -> constr) list -> ident list option -> unit := "ltac2" "tac_eauto". Ltac2 @ external typeclasses_eauto : strategy option -> int option -> ident list option -> unit := "ltac2" "tac_typeclasses_eauto". Ltac2 @ external unify : constr -> constr -> unit := "ltac2" "tac_unify". coq-8.15.0/user-contrib/Ltac2/String.v000066400000000000000000000017341417001151100174010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* char -> string := "ltac2" "string_make". Ltac2 @external length : string -> int := "ltac2" "string_length". Ltac2 @external get : string -> int -> char := "ltac2" "string_get". Ltac2 @external set : string -> int -> char -> unit := "ltac2" "string_set". coq-8.15.0/user-contrib/Ltac2/dune000066400000000000000000000002501417001151100166120ustar00rootroot00000000000000(coq.theory (name Ltac2) (package coq-stdlib) (synopsis "Ltac2 tactic language") (flags -w -deprecated-native-compiler-option) (libraries coq-core.plugins.ltac2)) coq-8.15.0/vernac/000077500000000000000000000000001417001151100136345ustar00rootroot00000000000000coq-8.15.0/vernac/assumptions.ml000066400000000000000000000374101417001151100165600ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* raise Not_found | (l, SFBmodule mb) :: _ when Label.equal l lab -> mb | _ :: fields -> search_mod_label lab fields let rec search_cst_label lab = function | [] -> raise Not_found | (l, SFBconst cb) :: _ when Label.equal l lab -> cb | _ :: fields -> search_cst_label lab fields let rec search_mind_label lab = function | [] -> raise Not_found | (l, SFBmind mind) :: _ when Label.equal l lab -> mind | _ :: fields -> search_mind_label lab fields (* TODO: using [empty_delta_resolver] below is probably slightly incorrect. But: a) I don't see currently what should be used instead b) this shouldn't be critical for Print Assumption. At worse some constants will have a canonical name which is non-canonical, leading to failures in [Global.lookup_constant], but our own [lookup_constant] should work. *) let rec fields_of_functor f subs mp0 args = function |NoFunctor a -> f subs mp0 args a |MoreFunctor (mbid,_,e) -> match args with | [] -> assert false (* we should only encounter applied functors *) | mpa :: args -> let subs = join (map_mbid mbid mpa empty_delta_resolver (*TODO*)) subs in fields_of_functor f subs mp0 args e let rec lookup_module_in_impl mp = match mp with | MPfile _ -> Global.lookup_module mp | MPbound _ -> Global.lookup_module mp | MPdot (mp',lab') -> if ModPath.equal mp' (Global.current_modpath ()) then Global.lookup_module mp else let fields = memoize_fields_of_mp mp' in search_mod_label lab' fields and memoize_fields_of_mp mp = try MPmap.find mp !modcache with Not_found -> let l = fields_of_mp mp in modcache := MPmap.add mp l !modcache; l and fields_of_mp mp = let mb = lookup_module_in_impl mp in let fields,inner_mp,subs = fields_of_mb empty_subst mb [] in let subs = if ModPath.equal inner_mp mp then subs else add_mp inner_mp mp mb.mod_delta subs in Modops.subst_structure subs fields and fields_of_mb subs mb args = match mb.mod_expr with |Algebraic expr -> fields_of_expression subs mb.mod_mp args expr |Struct sign -> fields_of_signature subs mb.mod_mp args sign |Abstract|FullStruct -> fields_of_signature subs mb.mod_mp args mb.mod_type (** The Abstract case above corresponds to [Declare Module] *) and fields_of_signature x = fields_of_functor (fun subs mp0 args struc -> assert (List.is_empty args); (struc, mp0, subs)) x and fields_of_expr subs mp0 args = function |MEident mp -> let mb = lookup_module_in_impl (subst_mp subs mp) in fields_of_mb subs mb args |MEapply (me1,mp2) -> fields_of_expr subs mp0 (mp2::args) me1 |MEwith _ -> assert false (* no 'with' in [mod_expr] *) and fields_of_expression x = fields_of_functor fields_of_expr x let lookup_constant_in_impl cst fallback = try let mp,lab = KerName.repr (Constant.canonical cst) in let fields = memoize_fields_of_mp mp in (* A module found this way is necessarily closed, in particular our constant cannot be in an opened section : *) search_cst_label lab fields with Not_found -> (* Either: - The module part of the constant isn't registered yet : we're still in it, so the [constant_body] found earlier (if any) was a true axiom. - The label has not been found in the structure. This is an error *) match fallback with | Some cb -> cb | None -> anomaly (str "Print Assumption: unknown constant " ++ Constant.print cst ++ str ".") let lookup_constant cst = let env = Global.env() in if not (Environ.mem_constant cst env) then lookup_constant_in_impl cst None else let cb = Environ.lookup_constant cst env in if Declareops.constant_has_body cb then cb else lookup_constant_in_impl cst (Some cb) let lookup_mind_in_impl mind = try let mp,lab = KerName.repr (MutInd.canonical mind) in let fields = memoize_fields_of_mp mp in search_mind_label lab fields with Not_found -> anomaly (str "Print Assumption: unknown inductive " ++ MutInd.print mind ++ str ".") let lookup_mind mind = let env = Global.env() in if Environ.mem_mind mind env then Environ.lookup_mind mind env else lookup_mind_in_impl mind (** Graph traversal of an object, collecting on the way the dependencies of traversed objects *) let label_of = let open GlobRef in function | ConstRef kn -> Constant.label kn | IndRef (kn,_) | ConstructRef ((kn,_),_) -> MutInd.label kn | VarRef id -> Label.of_id id let fold_with_full_binders g f n acc c = let open Context.Rel.Declaration in match kind c with | Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ | Int _ | Float _ -> acc | Cast (c,_, t) -> f n (f n acc c) t | Prod (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c | Lambda (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c | LetIn (na,b,t,c) -> f (g (LocalDef (na,b,t)) n) (f n (f n acc b) t) c | App (c,l) -> Array.fold_left (f n) (f n acc c) l | Proj (_,c) -> f n acc c | Evar (_,l) -> List.fold_left (f n) acc l | Case (ci, u, pms, p, iv, c, bl) -> let mib = lookup_mind (fst ci.ci_ind) in let (ci, p, iv, c, bl) = Inductive.expand_case_specif mib (ci, u, pms, p, iv, c, bl) in Array.fold_left (f n) (f n (fold_invert (f n) (f n acc p) iv) c) bl | Fix (_,(lna,tl,bl)) -> let n' = CArray.fold_left2_i (fun i c n t -> g (LocalAssum (n,lift i t)) c) n lna tl in let fd = Array.map2 (fun t b -> (t,b)) tl bl in Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd | CoFix (_,(lna,tl,bl)) -> let n' = CArray.fold_left2_i (fun i c n t -> g (LocalAssum (n,lift i t)) c) n lna tl in let fd = Array.map2 (fun t b -> (t,b)) tl bl in Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd | Array(_u,t,def,ty) -> f n (f n (Array.fold_left (f n) acc t) def) ty let get_constant_body kn = let cb = lookup_constant kn in let access = Library.indirect_accessor in match cb.const_body with | Undef _ | Primitive _ -> None | Def c -> Some c | OpaqueDef o -> match Global.force_proof access o with | c, _ -> Some c | exception _ -> None (* missing delayed body, e.g. in vok mode *) let rec traverse current ctx accu t = let open GlobRef in match Constr.kind t with | Var id -> let body () = id |> Global.lookup_named |> NamedDecl.get_value in traverse_object accu body (VarRef id) | Const (kn, _) -> let body () = get_constant_body kn in traverse_object accu body (ConstRef kn) | Ind ((mind, _) as ind, _) -> traverse_inductive accu mind (IndRef ind) | Construct (((mind, _), _) as cst, _) -> traverse_inductive accu mind (ConstructRef cst) | Meta _ | Evar _ -> assert false | Case (_, _, _, ([|_|], oty), _, c, [||]) when Vars.noccurn 1 oty -> (* non dependent match on an inductive with no constructors *) begin match Constr.kind c with | Const (kn, _) when not (Declareops.constant_has_body (lookup_constant kn)) -> let (curr, data, ax2ty) = accu in let obj = ConstRef kn in let already_in = GlobRef.Map_env.mem obj data in let data = if not already_in then GlobRef.Map_env.add obj GlobRef.Set_env.empty data else data in let ty = (current, ctx, Vars.subst1 mkProp oty) in let ax2ty = try let l = GlobRef.Map_env.find obj ax2ty in GlobRef.Map_env.add obj (ty::l) ax2ty with Not_found -> GlobRef.Map_env.add obj [ty] ax2ty in (GlobRef.Set_env.add obj curr, data, ax2ty) | _ -> fold_with_full_binders Context.Rel.add (traverse current) ctx accu t end | _ -> fold_with_full_binders Context.Rel.add (traverse current) ctx accu t and traverse_object (curr, data, ax2ty) body obj = let data, ax2ty = let already_in = GlobRef.Map_env.mem obj data in if already_in then data, ax2ty else match body () (* Beware: this can be very costly *) with | None -> GlobRef.Map_env.add obj GlobRef.Set_env.empty data, ax2ty | Some body -> let contents,data,ax2ty = traverse (label_of obj) Context.Rel.empty (GlobRef.Set_env.empty,data,ax2ty) body in GlobRef.Map_env.add obj contents data, ax2ty in (GlobRef.Set_env.add obj curr, data, ax2ty) (** Collects the references occurring in the declaration of mutual inductive definitions. All the constructors and names of a mutual inductive definition share exactly the same dependencies. Also, there is no explicit dependency between mutually defined inductives and constructors. *) and traverse_inductive (curr, data, ax2ty) mind obj = let firstind_ref = (GlobRef.IndRef (mind, 0)) in let label = label_of obj in let data, ax2ty = (* Invariant : I_0 \in data iff I_i \in data iff c_ij \in data where I_0, I_1, ... are in the same mutual definition and c_ij are all their constructors. *) if (* recursive call: *) GlobRef.Set_env.mem firstind_ref curr || (* already in: *) GlobRef.Map_env.mem firstind_ref data then data, ax2ty else (* Take into account potential recursivity of ind in itself *) let curr = GlobRef.Set_env.add firstind_ref GlobRef.Set_env.empty in let accu = (curr, data, ax2ty) in let mib = lookup_mind mind in (* Collects references of parameters *) let param_ctx = mib.mind_params_ctxt in let nparam = List.length param_ctx in let accu = traverse_context label Context.Rel.empty accu param_ctx in (* For each inductive, collects references in their arity and in the type of constructors*) let (contents, data, ax2ty) = Array.fold_left (fun accu oib -> let arity_wo_param = List.rev (List.skipn nparam (List.rev oib.mind_arity_ctxt)) in let accu = traverse_context label param_ctx accu arity_wo_param in Array.fold_left (fun accu cst_typ -> let param_ctx, cst_typ_wo_param = Term.decompose_prod_n_assum nparam cst_typ in traverse label param_ctx accu cst_typ_wo_param) accu oib.mind_user_lc) accu mib.mind_packets in (* Maps all these dependencies to inductives and constructors*) let data = let contents = GlobRef.Set_env.remove firstind_ref contents in Array.fold_left_i (fun n data oib -> let ind = (mind, n) in let data = GlobRef.Map_env.add (GlobRef.IndRef ind) contents data in Array.fold_left_i (fun k data _ -> GlobRef.Map_env.add (GlobRef.ConstructRef (ind, k+1)) contents data ) data oib.mind_consnames) data mib.mind_packets in (data, ax2ty) in (GlobRef.Set_env.add obj curr, data, ax2ty) (** Collects references in a rel_context. *) and traverse_context current ctx accu ctxt = snd (Context.Rel.fold_outside (fun decl (ctx, accu) -> match decl with | Context.Rel.Declaration.LocalDef (_,c,t) -> let accu = traverse current ctx (traverse current ctx accu t) c in let ctx = Context.Rel.add decl ctx in ctx, accu | Context.Rel.Declaration.LocalAssum (_,t) -> let accu = traverse current ctx accu t in let ctx = Context.Rel.add decl ctx in ctx, accu) ctxt ~init:(ctx, accu)) let traverse current t = let () = modcache := MPmap.empty in traverse current Context.Rel.empty (GlobRef.Set_env.empty, GlobRef.Map_env.empty, GlobRef.Map_env.empty) t (** Hopefully bullet-proof function to recover the type of a constant. It just ignores all the universe stuff. There are many issues that can arise when considering terms out of any valid environment, so use with caution. *) let type_of_constant cb = cb.Declarations.const_type let uses_uip mib = Array.exists (fun mip -> mip.mind_relevance == Sorts.Irrelevant && Array.length mip.mind_nf_lc = 1 && List.length (fst mip.mind_nf_lc.(0)) = List.length mib.mind_params_ctxt) mib.mind_packets let assumptions ?(add_opaque=false) ?(add_transparent=false) st gr t = (* Only keep the transitive dependencies *) let (_, graph, ax2ty) = traverse (label_of gr) t in let open GlobRef in let fold obj _ accu = match obj with | VarRef id -> let decl = Global.lookup_named id in if is_local_assum decl then let t = Context.Named.Declaration.get_type decl in ContextObjectMap.add (Variable id) t accu else accu | ConstRef kn -> let cb = lookup_constant kn in let accu = if cb.const_typing_flags.check_guarded then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (Guarded obj, l)) Constr.mkProp accu in let accu = if cb.const_typing_flags.check_universes then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (TypeInType obj, l)) Constr.mkProp accu in if not (Declareops.constant_has_body cb) then let t = type_of_constant cb in let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (Constant kn,l)) t accu else if add_opaque && (Declareops.is_opaque cb || not (TransparentState.is_transparent_constant st kn)) then let t = type_of_constant cb in ContextObjectMap.add (Opaque kn) t accu else if add_transparent then let t = type_of_constant cb in ContextObjectMap.add (Transparent kn) t accu else accu | IndRef (m,_) | ConstructRef ((m,_),_) -> let mind = lookup_mind m in let accu = if mind.mind_typing_flags.check_positive then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (Positive m, l)) Constr.mkProp accu in let accu = if mind.mind_typing_flags.check_guarded then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (Guarded obj, l)) Constr.mkProp accu in let accu = if mind.mind_typing_flags.check_universes then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (TypeInType obj, l)) Constr.mkProp accu in let accu = if not (uses_uip mind) then accu else let l = try GlobRef.Map_env.find obj ax2ty with Not_found -> [] in ContextObjectMap.add (Axiom (UIP m, l)) Constr.mkProp accu in accu in GlobRef.Map_env.fold fold graph ContextObjectMap.empty coq-8.15.0/vernac/assumptions.mli000066400000000000000000000031611417001151100167250ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* constr -> (GlobRef.Set_env.t * GlobRef.Set_env.t GlobRef.Map_env.t * (Label.t * Constr.rel_context * types) list GlobRef.Map_env.t) (** Collects all the assumptions (optionally including opaque definitions) on which a term relies (together with their type). The above warning of {!traverse} also applies. *) val assumptions : ?add_opaque:bool -> ?add_transparent:bool -> TransparentState.t -> GlobRef.t -> constr -> types ContextObjectMap.t coq-8.15.0/vernac/attributes.ml000066400000000000000000000304721417001151100163620ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.str b | FlagString s -> Pp.(quote (str s)) let rec pr_vernac_flag_value = let open Pp in function | VernacFlagEmpty -> mt () | VernacFlagLeaf l -> str "=" ++ pr_vernac_flag_leaf l | VernacFlagList s -> surround (prlist_with_sep pr_comma pr_vernac_flag s) and pr_vernac_flag_r (s, arguments) = let open Pp in str s ++ (pr_vernac_flag_value arguments) and pr_vernac_flag {CAst.v} = pr_vernac_flag_r v let warn_unsupported_attributes = CWarnings.create ~name:"unsupported-attributes" ~category:"parsing" ~default:CWarnings.AsError (fun atts -> let keys = List.map (fun x -> fst x.CAst.v) atts in let keys = List.sort_uniq String.compare keys in let conj = match keys with [_] -> "this attribute: " | _ -> "these attributes: " in Pp.(str "This command does not support " ++ str conj ++ prlist str keys ++ str".")) let unsupported_attributes = function | [] -> () | atts -> let loc = List.fold_left (fun loc att -> Loc.merge_opt loc att.CAst.loc) None atts in warn_unsupported_attributes ?loc atts type 'a key_parser = ?loc:Loc.t -> 'a option -> vernac_flag_value -> 'a type 'a attribute = vernac_flags -> vernac_flags * 'a let parse_with_extra (p:'a attribute) (atts:vernac_flags) : vernac_flags * 'a = p atts let parse_drop_extra att atts = snd (parse_with_extra att atts) let parse (p:'a attribute) atts : 'a = let extra, v = parse_with_extra p atts in unsupported_attributes extra; v let make_attribute x = x module Notations = struct type 'a t = 'a attribute let return x = fun atts -> atts, x let (>>=) att f = fun atts -> let atts, v = att atts in f v atts let (>>) p1 p2 = fun atts -> let atts, () = p1 atts in p2 atts let map f att = fun atts -> let atts, v = att atts in atts, f v let (++) (p1:'a attribute) (p2:'b attribute) : ('a*'b) attribute = fun atts -> let atts, v1 = p1 atts in let atts, v2 = p2 atts in atts, (v1, v2) end open Notations let assert_empty ?loc k v = if v <> VernacFlagEmpty then user_err ?loc Pp.(str "Attribute " ++ str k ++ str " does not accept arguments") let error_twice ?loc ~name : 'a = user_err ?loc Pp.(str "Attribute for " ++ str name ++ str " specified twice.") let assert_once ?loc ~name prev = if Option.has_some prev then error_twice ?loc ~name let attribute_of_list (l:(string * 'a key_parser) list) : 'a option attribute = let rec p extra v = function | [] -> List.rev extra, v | ({CAst.v=key, attv; loc} as att) :: rem -> (match CList.assoc_f String.equal key l with | exception Not_found -> p (att::extra) v rem | parser -> let v = Some (parser ?loc v attv) in p extra v rem) in p [] None let single_key_parser ~name ~key v ?loc prev args = assert_empty ?loc key args; assert_once ?loc ~name prev; v let pr_possible_values ~values = Pp.(str "{" ++ prlist_with_sep pr_comma str (List.map fst values) ++ str "}") (** [key_value_attribute ~key ~default ~values] parses a attribute [key=value] with possible [key] [value] in [values], [default] is for compatibility for users doing [qualif(key)] which is parsed as [qualif(key=default)] *) let key_value_attribute ~key ~default ~(values : (string * 'a) list) : 'a option attribute = let parser ?loc = function | Some v -> CErrors.user_err ?loc Pp.(str "key '" ++ str key ++ str "' has been already set.") | None -> begin function | VernacFlagLeaf (FlagIdent b) -> begin match CList.assoc_f String.equal b values with | exception Not_found -> CErrors.user_err ?loc Pp.(str "Invalid value '" ++ str b ++ str "' for key " ++ str key ++ fnl () ++ str "use one of " ++ pr_possible_values ~values) | value -> value end | VernacFlagEmpty -> default | err -> CErrors.user_err ?loc Pp.(str "Invalid syntax " ++ pr_vernac_flag_r (key, err) ++ str ", try " ++ str key ++ str "=" ++ pr_possible_values ~values ++ str " instead.") end in attribute_of_list [key, parser] let bool_attribute ~name : bool option attribute = let values = ["yes", true; "no", false] in key_value_attribute ~key:name ~default:true ~values (* Variant of the [bool] attribute with only two values (bool has three). *) let get_bool_value ?loc ~key ~default = function | VernacFlagEmpty -> default | VernacFlagLeaf (FlagIdent "yes") -> true | VernacFlagLeaf (FlagIdent "no") -> false | _ -> user_err ?loc Pp.(str "Attribute " ++ str key ++ str " only accepts boolean values.") let enable_attribute ~key ~default : bool attribute = fun atts -> let this, extra = List.partition (fun {CAst.v=k, _} -> String.equal key k) atts in extra, match this with | [] -> default () | [ {CAst.v=_, value; loc} ] -> get_bool_value ?loc ~key ~default:true value | _ :: {CAst.loc} :: _ -> (* We report the location of the 2nd item *) error_twice ?loc ~name:key let qualify_attribute qual (parser:'a attribute) : 'a attribute = fun atts -> let rec extract extra qualified = function | [] -> List.rev extra, List.flatten (List.rev qualified) | {CAst.v=key,attv; loc} :: rem when String.equal key qual -> (match attv with | VernacFlagEmpty | VernacFlagLeaf _ -> CErrors.user_err ?loc Pp.(str "Malformed attribute " ++ str qual ++ str ": attribute list expected.") | VernacFlagList atts -> extract extra (atts::qualified) rem) | att :: rem -> extract (att::extra) qualified rem in let extra, qualified = extract [] [] atts in let rem, v = parser qualified in let rem = List.rev_map (fun rem -> CAst.make ?loc:rem.CAst.loc (qual, VernacFlagList [rem])) rem in let extra = List.rev_append rem extra in extra, v (** [program_mode] tells that Program mode has been activated, either globally via [Set Program] or locally via the Program command prefix. *) let program_mode_option_name = ["Program";"Mode"] let program_mode = ref false let () = let open Goptions in declare_bool_option { optdepr = false; optkey = program_mode_option_name; optread = (fun () -> !program_mode); optwrite = (fun b -> program_mode:=b) } let program = enable_attribute ~key:"program" ~default:(fun () -> !program_mode) (* This is a bit complex as the grammar in g_vernac.mlg doesn't distingish between the boolean and ternary case.*) let option_locality_parser = let name = "Locality" in attribute_of_list [ ("local", single_key_parser ~name ~key:"local" Goptions.OptLocal); ("global", single_key_parser ~name ~key:"global" Goptions.OptGlobal); ("export", single_key_parser ~name ~key:"export" Goptions.OptExport); ] let option_locality = option_locality_parser >>= function | None -> return Goptions.OptDefault | Some l -> return l let hint_locality ~default = let open Hints in let name = "Locality" in attribute_of_list [ ("local", single_key_parser ~name ~key:"local" Local); ("global", single_key_parser ~name ~key:"global" SuperGlobal); ("export", single_key_parser ~name ~key:"export" Export); ] >>= function | Some v -> return v | None -> let v = default () in return v let really_hint_locality = hint_locality ~default:Hints.default_hint_locality (* locality is supposed to be true when local, false when global *) let locality = let name = "Locality" in attribute_of_list [ ("local", single_key_parser ~name ~key:"local" true); ("global", single_key_parser ~name ~key:"global" false); ] let ukey = "universes" let universe_polymorphism_option_name = ["Universe"; "Polymorphism"] let is_universe_polymorphism = let b = ref false in let () = let open Goptions in declare_bool_option { optdepr = false; optkey = universe_polymorphism_option_name; optread = (fun () -> !b); optwrite = ((:=) b) } in fun () -> !b let polymorphic = qualify_attribute ukey (bool_attribute ~name:"polymorphic") >>= function | Some b -> return b | None -> return (is_universe_polymorphism()) let template = qualify_attribute ukey (bool_attribute ~name:"template") let deprecation_parser : Deprecation.t key_parser = fun ?loc orig args -> assert_once ?loc ~name:"deprecation" orig; match args with | VernacFlagList [ {CAst.v="since", VernacFlagLeaf (FlagString since)}; {CAst.v="note", VernacFlagLeaf (FlagString note)} ] | VernacFlagList [ {CAst.v="note", VernacFlagLeaf (FlagString note)}; {CAst.v="since", VernacFlagLeaf (FlagString since)} ] -> Deprecation.make ~since ~note () | VernacFlagList [ {CAst.v="since", VernacFlagLeaf (FlagString since)} ] -> Deprecation.make ~since () | VernacFlagList [ {CAst.v="note", VernacFlagLeaf (FlagString note)} ] -> Deprecation.make ~note () | _ -> CErrors.user_err ?loc (Pp.str "Ill formed “deprecated” attribute") let deprecation = attribute_of_list ["deprecated",deprecation_parser] let only_locality atts = parse locality atts let only_polymorphism atts = parse polymorphic atts let vernac_polymorphic_flag loc = CAst.make ?loc (ukey, VernacFlagList [CAst.make ?loc ("polymorphic", VernacFlagEmpty)]) let vernac_monomorphic_flag loc = CAst.make ?loc (ukey, VernacFlagList [CAst.make ?loc ("polymorphic", VernacFlagLeaf (FlagIdent "no"))]) let canonical_field = enable_attribute ~key:"canonical" ~default:(fun () -> true) let canonical_instance = enable_attribute ~key:"canonical" ~default:(fun () -> false) let uses_parser : string key_parser = fun ?loc orig args -> assert_once ?loc ~name:"using" orig; match args with | VernacFlagLeaf (FlagString str) -> str | _ -> CErrors.user_err ?loc (Pp.str "Ill formed \"using\" attribute") let using = attribute_of_list ["using",uses_parser] let process_typing_att ?loc ~typing_flags att disable = let enable = not disable in match att with | "universes" -> { typing_flags with Declarations.check_universes = enable } | "guard" -> { typing_flags with Declarations.check_guarded = enable } | "positivity" -> { typing_flags with Declarations.check_positive = enable } | att -> CErrors.user_err ?loc Pp.(str "Unknown “typing” attribute: " ++ str att) let process_typing_disable ?loc ~key = function | VernacFlagEmpty | VernacFlagLeaf (FlagIdent "yes") -> true | VernacFlagLeaf (FlagIdent "no") -> false | _ -> CErrors.user_err ?loc Pp.(str "Ill-formed attribute value, must be " ++ str key ++ str "={yes, no}") let typing_flags_parser : Declarations.typing_flags key_parser = fun ?loc orig args -> let rec flag_parser typing_flags = function | [] -> typing_flags | {CAst.v=typing_att, disable; loc} :: rest -> let disable = process_typing_disable ?loc ~key:typing_att disable in let typing_flags = process_typing_att ?loc ~typing_flags typing_att disable in flag_parser typing_flags rest in match args with | VernacFlagList atts -> let typing_flags = Global.typing_flags () in flag_parser typing_flags atts | att -> CErrors.user_err ?loc Pp.(str "Ill-formed “typing” attribute: " ++ pr_vernac_flag_value att) let typing_flags = attribute_of_list ["bypass_check", typing_flags_parser] let raw_attributes : _ attribute = fun flags -> [], flags coq-8.15.0/vernac/attributes.mli000066400000000000000000000125131417001151100165270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.t type +'a attribute (** The type of attributes. When parsing attributes if an ['a attribute] is present then an ['a] value will be produced. In the most general case, an attribute transforms the raw flags along with its value. *) val parse : 'a attribute -> vernac_flags -> 'a (** Errors on unsupported attributes. *) val unsupported_attributes : vernac_flags -> unit (** Errors if the list of flags is nonempty. *) module Notations : sig (** Notations to combine attributes. *) include Monad.Def with type 'a t = 'a attribute (** Attributes form a monad. [a1 >>= f] means [f] will be run on the flags transformed by [a1] and using the value produced by [a1]. The trivial attribute [return x] does no action on the flags. *) val (++) : 'a attribute -> 'b attribute -> ('a * 'b) attribute (** Combine 2 attributes. If any keys are in common an error will be raised. *) end (** Definitions for some standard attributes. *) val raw_attributes : vernac_flags attribute val polymorphic : bool attribute val program : bool attribute val template : bool option attribute val locality : bool option attribute val option_locality : Goptions.option_locality attribute val deprecation : Deprecation.t option attribute val canonical_field : bool attribute val canonical_instance : bool attribute val using : string option attribute val hint_locality : default:(unit -> Hints.hint_locality) -> Hints.hint_locality attribute (** With the warning for Hint (and not for Instance etc) *) val really_hint_locality : Hints.hint_locality attribute (** Enable/Disable universe checking *) val typing_flags : Declarations.typing_flags option attribute val program_mode_option_name : string list (** For internal use when messing with the global option. *) val only_locality : vernac_flags -> bool option (** Parse attributes allowing only locality. *) val only_polymorphism : vernac_flags -> bool (** Parse attributes allowing only polymorphism. Uses the global flag for the default value. *) val parse_drop_extra : 'a attribute -> vernac_flags -> 'a (** Ignores unsupported attributes. *) val parse_with_extra : 'a attribute -> vernac_flags -> vernac_flags * 'a (** Returns unsupported attributes. *) (** * Defining attributes. *) type 'a key_parser = ?loc:Loc.t -> 'a option -> vernac_flag_value -> 'a (** A parser for some key in an attribute. It is given a nonempty ['a option] when the attribute is multiply set for some command. eg in [#[polymorphic] Monomorphic Definition foo := ...], when parsing [Monomorphic] it will be given [Some true]. *) val attribute_of_list : (string * 'a key_parser) list -> 'a option attribute (** Make an attribute from a list of key parsers together with their associated key. *) (** Define boolean attribute [name], of the form [name={yes,no}]. The attribute may only be set once for a command. *) val bool_attribute : name:string -> bool option attribute val qualify_attribute : string -> 'a attribute -> 'a attribute (** [qualified_attribute qual att] treats [#[qual(atts)]] like [att] treats [atts]. *) (** Combinators to help define your own parsers. See the implementation of [bool_attribute] for practical use. *) val assert_empty : ?loc:Loc.t -> string -> vernac_flag_value -> unit (** [assert_empty key v] errors if [v] is not empty. [key] is used in the error message as the name of the attribute. *) val assert_once : ?loc:Loc.t -> name:string -> 'a option -> unit (** [assert_once ~name v] errors if [v] is not empty. [name] is used in the error message as the name of the attribute. Used to ensure that a given attribute is not reapeated. *) val single_key_parser : name:string -> key:string -> 'a -> 'a key_parser (** [single_key_parser ~name ~key v] makes a parser for attribute [name] giving the constant value [v] for key [key] taking no arguments. [name] may only be given once. *) val make_attribute : (vernac_flags -> vernac_flags * 'a) -> 'a attribute (** Make an attribute using the internal representation, thus with access to the full power of attributes. Unstable. *) (** Compatibility values for parsing [Polymorphic]. *) val vernac_polymorphic_flag : Loc.t option -> vernac_flag val vernac_monomorphic_flag : Loc.t option -> vernac_flag (** For internal use. *) val universe_polymorphism_option_name : string list val is_universe_polymorphism : unit -> bool coq-8.15.0/vernac/auto_ind_decl.ml000066400000000000000000001247251417001151100167720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* [] | t::q -> t::(kick_last q) | [] -> failwith "kick_last" and aux = function | (0,l') -> l' | (n,h::t) -> aux (n-1,t) | _ -> failwith "quick_chop" in if n > (List.length l) then failwith "quick_chop args" else kick_last (aux (n,l) ) let deconstruct_type t = let l,r = decompose_prod t in (List.rev_map snd l)@[r] exception EqNotFound of inductive * inductive exception EqUnknown of string exception UndefinedCst of string exception InductiveWithProduct exception InductiveWithSort exception ParameterWithoutEquality of GlobRef.t exception NonSingletonProp of inductive exception DecidabilityMutualNotSupported exception NoDecidabilityCoInductive exception ConstructorWithNonParametricInductiveType of inductive exception DecidabilityIndicesNotSupported (* Some pre declaration of constant we are going to use *) let andb_prop = fun _ -> UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.andb_prop") let andb_true_intro = fun _ -> UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.andb_true_intro") (* We avoid to use lazy as the binding of constants can change *) let bb () = UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.type") let tt () = UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.true") let ff () = UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.false") let eq () = UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.eq.type") let sumbool () = UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.sumbool.type") let andb = fun _ -> UnivGen.constr_of_monomorphic_global (Global.env ()) (Coqlib.lib_ref "core.bool.andb") let induct_on c = induction false None c None None let destruct_on c = destruct false None c None None let destruct_on_using c id = destruct false None c (Some (CAst.make @@ IntroOrPattern [[CAst.make @@ IntroNaming IntroAnonymous]; [CAst.make @@ IntroNaming (IntroIdentifier id)]])) None let destruct_on_as c l = destruct false None c (Some (CAst.make l)) None let inj_flags = Some { Equality.keep_proof_equalities = true; (* necessary *) Equality.injection_pattern_l2r_order = true; (* does not matter here *) } let my_discr_tac = Equality.discr_tac false None let my_inj_tac x = Equality.inj inj_flags None false None (EConstr.mkVar x,NoBindings) (* reconstruct the inductive with the correct de Bruijn indexes *) let mkFullInd env (ind,u) n = let mib = Environ.lookup_mind (fst ind) env in let nparams = mib.mind_nparams in let nparrec = mib.mind_nparams_rec in (* params context divided *) let lnonparrec,lnamesparrec = context_chop (nparams-nparrec) mib.mind_params_ctxt in if nparrec > 0 then mkApp (mkIndU (ind,u), Array.of_list(Context.Rel.instance_list mkRel (nparrec+n) lnamesparrec)) else mkIndU (ind,u) let check_bool_is_defined () = if not (Coqlib.has_ref "core.bool.type") then raise (UndefinedCst "bool") let check_no_indices mib = if Array.exists (fun mip -> mip.mind_nrealargs <> 0) mib.mind_packets then raise DecidabilityIndicesNotSupported let get_scheme handle k ind = match local_lookup_scheme handle k ind with | None -> assert false | Some c -> c let beq_scheme_kind_aux = ref (fun _ -> failwith "Undefined") let get_inductive_deps env kn = (* fetching the mutual inductive body *) let mib = Environ.lookup_mind kn env in (* number of inductives in the mutual *) let nb_ind = Array.length mib.mind_packets in (* number of params in the type *) let nparrec = mib.mind_nparams_rec in check_no_indices mib; let make_one_eq accu i = (* This function is only trying to recursively compute the inductive types appearing as arguments of the constructors. This is done to support equality decision over hereditarily first-order types. It could be perfomed in a much cleaner way, e.g. using the kernel normal form of constructor types and kernel whd_all for the argument types. *) let rec aux accu c = let (c,a) = Reductionops.whd_betaiota_stack env Evd.empty EConstr.(of_constr c) in let (c,a) = EConstr.Unsafe.(to_constr c, List.map to_constr a) in match Constr.kind c with | Cast (x,_,_) -> aux accu (Term.applist (x,a)) | App _ -> assert false | Ind ((kn', _), _) -> if Environ.QMutInd.equal env kn kn' then accu else List.fold_left aux (kn' :: accu) a | Const (kn, u) -> (match Environ.constant_opt_value_in env (kn, u) with | Some c -> aux accu (Term.applist (c,a)) | None -> accu) | Rel _ | Var _ | Sort _ | Prod _ | Lambda _ | LetIn _ | Proj _ | Construct _ | Case _ | CoFix _ | Fix _ | Meta _ | Evar _ | Int _ | Float _ | Array _ -> accu in let u = Univ.Instance.empty in let constrs n = get_constructors env (make_ind_family (((kn, i), u), Context.Rel.instance_list mkRel (n+nb_ind-1) mib.mind_params_ctxt)) in let constrsi = constrs (3+nparrec) in let fold i accu arg = let fold accu c = aux accu (RelDecl.get_type c) in List.fold_left fold accu arg.cs_args in Array.fold_left_i fold accu constrsi in Array.fold_left_i (fun i accu _ -> make_one_eq accu i) [] mib.mind_packets let build_beq_scheme_deps env kn = let inds = get_inductive_deps env kn in List.map (fun ind -> SchemeMutualDep (ind, !beq_scheme_kind_aux ())) inds let build_beq_scheme env handle kn = check_bool_is_defined (); (* fetching the mutual inductive body *) let mib = Environ.lookup_mind kn env in (* number of inductives in the mutual *) let nb_ind = Array.length mib.mind_packets in (* number of params in the type *) let nparams = mib.mind_nparams in let nparrec = mib.mind_nparams_rec in check_no_indices mib; (* params context divided *) let lnonparrec,lnamesparrec = context_chop (nparams-nparrec) mib.mind_params_ctxt in (* predef coq's boolean type *) (* rec name *) let rec_name i =(Id.to_string (Array.get mib.mind_packets i).mind_typename)^ "_eqrec" in (* construct the "fun A B ... N, eqA eqB eqC ... N => fixpoint" part *) let create_input c = let myArrow u v = mkArrow u Sorts.Relevant (lift 1 v) and eqName = function | Name s -> Id.of_string ("eq_"^(Id.to_string s)) | Anonymous -> Id.of_string "eq_A" in let ext_rel_list = Context.Rel.instance_list mkRel 0 lnamesparrec in let lift_cnt = ref 0 in let eqs_typ = List.map (fun aa -> let a = lift !lift_cnt aa in incr lift_cnt; myArrow a (myArrow a (bb ())) ) ext_rel_list in let eq_input = List.fold_left2 ( fun a b decl -> (* mkLambda(n,b,a) ) *) (* here I leave the Naming thingy so that the type of the function is more readable for the user *) mkNamedLambda (map_annot eqName (RelDecl.get_annot decl)) b a ) c (List.rev eqs_typ) lnamesparrec in List.fold_left (fun a decl ->(* mkLambda(n,t,a)) eq_input rel_list *) (* Same here , hoping the auto renaming will do something good ;) *) let x = map_annot (function Name s -> s | Anonymous -> Id.of_string "A") (RelDecl.get_annot decl) in mkNamedLambda x (RelDecl.get_type decl) a) eq_input lnamesparrec in let make_one_eq cur = let u = Univ.Instance.empty in let ind = (kn,cur),u (* FIXME *) in (* current inductive we are working on *) let cur_packet = mib.mind_packets.(snd (fst ind)) in (* Inductive toto : [rettyp] := *) let rettyp = Inductive.type_of_inductive ((mib,cur_packet),u) in (* split rettyp in a list without the non rec params and the last -> e.g. Inductive vec (A:Set) : nat -> Set := ... will do [nat] *) let rettyp_l = quick_chop nparrec (deconstruct_type rettyp) in (* give a type A, this function tries to find the equality on A declared previously *) (* nlist = the number of args (A , B , ... ) eqA = the de Bruijn index of the first eq param ndx = how much to translate due to the 2nd Case *) let compute_A_equality rel_list nlist eqA ndx t = let lifti = ndx in let rec aux c = let (c,a) = Reductionops.whd_betaiota_stack env Evd.empty EConstr.(of_constr c) in let (c,a) = EConstr.Unsafe.(to_constr c, List.map to_constr a) in match Constr.kind c with | Rel x -> mkRel (x-nlist+ndx) | Var x -> (* Support for working in a context with "eq_x : x -> x -> bool" *) let eid = Id.of_string ("eq_"^(Id.to_string x)) in let () = try ignore (Environ.lookup_named eid env) with Not_found -> raise (ParameterWithoutEquality (GlobRef.VarRef x)) in mkVar eid | Cast (x,_,_) -> aux (Term.applist (x,a)) | App _ -> assert false | Ind ((kn',i as ind'),u) (*FIXME: universes *) -> if Environ.QMutInd.equal env kn kn' then mkRel(eqA-nlist-i+nb_ind-1) else begin try let c = get_scheme handle (!beq_scheme_kind_aux()) ind' in let eq = mkConst c in let eqa = Array.of_list @@ List.map aux a in let args = Array.append (Array.of_list (List.map (fun x -> lift lifti x) a)) eqa in if Int.equal (Array.length args) 0 then eq else mkApp (eq, args) with Not_found -> raise(EqNotFound (ind', fst ind)) end | Sort _ -> raise InductiveWithSort | Prod _ -> raise InductiveWithProduct | Lambda _-> raise (EqUnknown "abstraction") | LetIn _ -> raise (EqUnknown "let-in") | Const (kn, u) -> (match Environ.constant_opt_value_in env (kn, u) with | Some c -> aux (Term.applist (c,a)) | None -> (* Support for working in a context with "eq_x : x -> x -> bool" *) (* Needs Hints, see test suite *) let eq_lbl = Label.make ("eq_" ^ Label.to_string (Constant.label kn)) in let kneq = Constant.change_label kn eq_lbl in if Environ.mem_constant kneq env then let _ = Environ.constant_opt_value_in env (kneq, u) in Term.applist (mkConst kneq,a) else raise (ParameterWithoutEquality (GlobRef.ConstRef kn))) | Proj _ -> raise (EqUnknown "projection") | Construct _ -> raise (EqUnknown "constructor") | Case _ -> raise (EqUnknown "match") | CoFix _ -> raise (EqUnknown "cofix") | Fix _ -> raise (EqUnknown "fix") | Meta _ -> raise (EqUnknown "meta-variable") | Evar _ -> raise (EqUnknown "existential variable") | Int _ -> raise (EqUnknown "int") | Float _ -> raise (EqUnknown "float") | Array _ -> raise (EqUnknown "array") in aux t in (* construct the predicate for the Case part*) let do_predicate rel_list n = List.fold_left (fun a b -> mkLambda(make_annot Anonymous Sorts.Relevant,b,a)) (mkLambda (make_annot Anonymous Sorts.Relevant, mkFullInd env ind (n+3+(List.length rettyp_l)+nb_ind-1), (bb ()))) (List.rev rettyp_l) in (* make_one_eq *) (* do the [| C1 ... => match Y with ... end ... Cn => match Y with ... end |] part *) let rci = Sorts.Relevant in (* TODO relevance *) let ci = make_case_info env (fst ind) rci MatchStyle in let constrs n = let params = Context.Rel.instance_list mkRel (n+nb_ind-1) mib.mind_params_ctxt in get_constructors env (make_ind_family (ind, params)) in let constrsi = constrs (3+nparrec) in let n = Array.length constrsi in let ar = Array.init n (fun i -> let nb_cstr_args = List.length constrsi.(i).cs_args in let constrsj = constrs (3+nparrec+nb_cstr_args) in let ar2 = Array.init n (fun j -> if Int.equal i j then let cc = match nb_cstr_args with | 0 -> tt () | _ -> let eqs = Array.init nb_cstr_args (fun ndx -> let cc = RelDecl.get_type (List.nth constrsi.(i).cs_args ndx) in let eqA = compute_A_equality rel_list nparrec (nparrec+3+2*nb_cstr_args) (nb_cstr_args+ndx+1) cc in mkApp (eqA, [|mkRel (ndx+1+nb_cstr_args);mkRel (ndx+1)|])) in Array.fold_left (fun a b -> mkApp (andb(),[|b;a|])) eqs.(0) (Array.sub eqs 1 (nb_cstr_args - 1)) in List.fold_left (fun a decl -> mkLambda (RelDecl.get_annot decl, RelDecl.get_type decl, a)) cc constrsj.(j).cs_args else List.fold_left (fun a decl -> mkLambda (RelDecl.get_annot decl, RelDecl.get_type decl, a)) (ff ()) (constrsj.(j).cs_args)) in let pred = EConstr.of_constr (do_predicate rel_list nb_cstr_args) in let case = simple_make_case_or_project env (Evd.from_env env) ci pred NoInvert (EConstr.mkVar (Id.of_string "Y")) (EConstr.of_constr_array ar2) in List.fold_left (fun a decl -> mkLambda (RelDecl.get_annot decl, RelDecl.get_type decl, a)) (EConstr.Unsafe.to_constr case) (constrsi.(i).cs_args)) in let pred = EConstr.of_constr (do_predicate rel_list 0) in let case = simple_make_case_or_project env (Evd.from_env env) ci pred NoInvert (EConstr.mkVar (Id.of_string "X")) (EConstr.of_constr_array ar) in mkNamedLambda (make_annot (Id.of_string "X") Sorts.Relevant) (mkFullInd env ind (nb_ind-1+1)) ( mkNamedLambda (make_annot (Id.of_string "Y") Sorts.Relevant) (mkFullInd env ind (nb_ind-1+2)) ( (EConstr.Unsafe.to_constr case))) in (* build_beq_scheme *) let names = Array.make nb_ind (make_annot Anonymous Sorts.Relevant) and types = Array.make nb_ind mkSet and cores = Array.make nb_ind mkSet in let u = Univ.Instance.empty in for i=0 to (nb_ind-1) do names.(i) <- make_annot (Name (Id.of_string (rec_name i))) Sorts.Relevant; types.(i) <- mkArrow (mkFullInd env ((kn,i),u) 0) Sorts.Relevant (mkArrow (mkFullInd env ((kn,i),u) 1) Sorts.Relevant (bb ())); let c = make_one_eq i in cores.(i) <- c; done; let res = Array.init nb_ind (fun i -> let kelim = Inductive.elim_sort (mib,mib.mind_packets.(i)) in if not (Sorts.family_leq InSet kelim) then raise (NonSingletonProp (kn,i)); let fix = match mib.mind_finite with | CoFinite -> raise NoDecidabilityCoInductive; | Finite -> mkFix (((Array.make nb_ind 0),i),(names,types,cores)) | BiFinite -> (* If the inductive type is not recursive, the fixpoint is not used, so let's replace it with garbage *) let subst = List.init nb_ind (fun _ -> mkProp) in Vars.substl subst cores.(i) in create_input fix) in res, UState.from_env env let beq_scheme_kind = declare_mutual_scheme_object "_beq" ~deps:build_beq_scheme_deps build_beq_scheme let _ = beq_scheme_kind_aux := fun () -> beq_scheme_kind (* This function tryies to get the [inductive] between a constr the constr should be Ind i or App(Ind i,[|args|]) *) let destruct_ind env sigma c = let open EConstr in let (c,v) = Reductionops.whd_all_stack env sigma c in destInd sigma c, Array.of_list v let bl_scheme_kind_aux = ref (fun () -> failwith "Undefined") let lb_scheme_kind_aux = ref (fun () -> failwith "Undefined") (* In the following, avoid is the list of names to avoid. If the args of the Inductive type are A1 ... An then avoid should be [| lb_An ... lb _A1 (resp. bl_An ... bl_A1) eq_An .... eq_A1 An ... A1 |] so from Ai we can find the correct eq_Ai bl_ai or lb_ai *) (* used in the leib -> bool side*) let do_replace_lb handle aavoid narg p q = let open EConstr in let avoid = Array.of_list aavoid in let do_arg env sigma hd v offset = match kind sigma v with | Var s -> let x = narg*offset in let n = Array.length avoid in let rec find i = if Id.equal avoid.(n-i) s then avoid.(n-i-x) else (if i (* Works in specific situations where the args have to be already declared as a Parameter (see example "J" in test file SchemeEquality.v) *) let lbl = Label.to_string (Constant.label cst) in let newlbl = if Int.equal offset 1 then ("eq_" ^ lbl) else (lbl ^ "_lb") in let newcst = Constant.change_label cst (Label.make newlbl) in if Environ.mem_constant newcst env then mkConst newcst else raise (ConstructorWithNonParametricInductiveType (fst hd)) | _ -> raise (ConstructorWithNonParametricInductiveType (fst hd)) in Proofview.Goal.enter begin fun gl -> let type_of_pq = Tacmach.pf_get_type_of gl p in let sigma = Tacmach.project gl in let env = Tacmach.pf_env gl in let u,v = destruct_ind env sigma type_of_pq in let c = get_scheme handle (!lb_scheme_kind_aux ()) (fst u) in let lb_type_of_p = mkConst c in Proofview.tclEVARMAP >>= fun sigma -> let lb_args = Array.append (Array.append v (Array.Smart.map (fun x -> do_arg env sigma u x 1) v)) (Array.Smart.map (fun x -> do_arg env sigma u x 2) v) in let app = if Array.is_empty lb_args then lb_type_of_p else mkApp (lb_type_of_p,lb_args) in Tacticals.tclTHENLIST [ Equality.replace p q ; apply app ; Auto.default_auto] end (* used in the bool -> leb side *) let do_replace_bl handle (ind,u as indu) aavoid narg lft rgt = let open EConstr in let avoid = Array.of_list aavoid in let do_arg env sigma hd v offset = match kind sigma v with | Var s -> let x = narg*offset in let n = Array.length avoid in let rec find i = if Id.equal avoid.(n-i) s then avoid.(n-i-x) else (if i (* Works in specific situations where the args have to be already declared as a Parameter (see example "J" in test file SchemeEquality.v) *) let lbl = Label.to_string (Constant.label cst) in let newlbl = if Int.equal offset 1 then ("eq_" ^ lbl) else (lbl ^ "_bl") in let newcst = Constant.change_label cst (Label.make newlbl) in if Environ.mem_constant newcst env then mkConst newcst else raise (ConstructorWithNonParametricInductiveType (fst hd)) | _ -> raise (ConstructorWithNonParametricInductiveType (fst hd)) in let rec aux l1 l2 = match (l1,l2) with | (t1::q1,t2::q2) -> Proofview.Goal.enter begin fun gl -> let sigma = Tacmach.project gl in let env = Tacmach.pf_env gl in if EConstr.eq_constr sigma t1 t2 then aux q1 q2 else ( let tt1 = Tacmach.pf_get_type_of gl t1 in let u,v = try destruct_ind env sigma tt1 (* trick so that the good sequence is returned*) with e when CErrors.noncritical e -> indu,[||] in if Ind.CanOrd.equal (fst u) ind then Tacticals.tclTHENLIST [Equality.replace t1 t2; Auto.default_auto ; aux q1 q2 ] else ( let c = get_scheme handle (!bl_scheme_kind_aux ()) (fst u) in let bl_t1 = mkConst c in let bl_args = Array.append (Array.append v (Array.Smart.map (fun x -> do_arg env sigma u x 1) v)) (Array.Smart.map (fun x -> do_arg env sigma u x 2) v ) in let app = if Array.is_empty bl_args then bl_t1 else mkApp (bl_t1,bl_args) in Tacticals.tclTHENLIST [ Equality.replace_by t1 t2 (Tacticals.tclTHEN (apply app) (Auto.default_auto)) ; aux q1 q2 ] ) ) end | ([],[]) -> Proofview.tclUNIT () | _ -> Tacticals.tclZEROMSG (str "Both side of the equality must have the same arity.") in Proofview.tclEVARMAP >>= fun sigma -> begin try Proofview.tclUNIT (destApp sigma lft) with DestKO -> Tacticals.tclZEROMSG (str "replace failed.") end >>= fun (ind1,ca1) -> begin try Proofview.tclUNIT (destApp sigma rgt) with DestKO -> Tacticals.tclZEROMSG (str "replace failed.") end >>= fun (ind2,ca2) -> begin try Proofview.tclUNIT (fst (destInd sigma ind1)) with DestKO -> begin try Proofview.tclUNIT (fst (fst (destConstruct sigma ind1))) with DestKO -> Tacticals.tclZEROMSG (str "The expected type is an inductive one.") end end >>= fun (sp1,i1) -> begin try Proofview.tclUNIT (fst (destInd sigma ind2)) with DestKO -> begin try Proofview.tclUNIT (fst (fst (destConstruct sigma ind2))) with DestKO -> Tacticals.tclZEROMSG (str "The expected type is an inductive one.") end end >>= fun (sp2,i2) -> Proofview.tclENV >>= fun env -> if not (Environ.QMutInd.equal env sp1 sp2) || not (Int.equal i1 i2) then Tacticals.tclZEROMSG (str "Eq should be on the same type") else aux (Array.to_list ca1) (Array.to_list ca2) (* create, from a list of ids [i1,i2,...,in] the list [(in,eq_in,in_bl,in_al),,...,(i1,eq_i1,i1_bl_i1_al )] *) let list_id l = List.fold_left ( fun a decl -> let s' = match RelDecl.get_name decl with Name s -> Id.to_string s | Anonymous -> "A" in (Id.of_string s',Id.of_string ("eq_"^s'), Id.of_string (s'^"_bl"), Id.of_string (s'^"_lb")) ::a ) [] l let avoid_of_list_id list_id = List.fold_left (fun avoid (s,seq,sbl,slb) -> List.fold_left (fun avoid id -> Id.Set.add id avoid) avoid [s;seq;sbl;slb]) Id.Set.empty list_id (* build the right eq_I A B.. N eq_A .. eq_N *) let eqI handle ind list_id = let eA = Array.of_list((List.map (fun (s,_,_,_) -> mkVar s) list_id)@ (List.map (fun (_,seq,_,_)-> mkVar seq) list_id )) and e = mkConst (get_scheme handle beq_scheme_kind ind) in mkApp(e,eA) (**********************************************************************) (* Boolean->Leibniz *) open Namegen let compute_bl_goal env handle ind lnamesparrec nparrec = let list_id = list_id lnamesparrec in let eqI = eqI handle ind list_id in let avoid = avoid_of_list_id list_id in let x = next_ident_away (Id.of_string "x") avoid in let y = next_ident_away (Id.of_string "y") (Id.Set.add x avoid) in let create_input c = let bl_typ = List.map (fun (s,seq,_,_) -> mkNamedProd (make_annot x Sorts.Relevant) (mkVar s) ( mkNamedProd (make_annot y Sorts.Relevant) (mkVar s) ( mkArrow ( mkApp(eq (),[|bb (); mkApp(mkVar seq,[|mkVar x;mkVar y|]);tt () |])) Sorts.Relevant ( mkApp(eq (),[|mkVar s;mkVar x;mkVar y|])) )) ) list_id in let bl_input = List.fold_left2 ( fun a (s,_,sbl,_) b -> mkNamedProd (make_annot sbl Sorts.Relevant) b a ) c (List.rev list_id) (List.rev bl_typ) in let eqs_typ = List.map (fun (s,_,_,_) -> mkProd(make_annot Anonymous Sorts.Relevant,mkVar s,mkProd(make_annot Anonymous Sorts.Relevant,mkVar s,(bb ()))) ) list_id in let eq_input = List.fold_left2 ( fun a (s,seq,_,_) b -> mkNamedProd (make_annot seq Sorts.Relevant) b a ) bl_input (List.rev list_id) (List.rev eqs_typ) in List.fold_left (fun a decl -> let x = map_annot (function Name s -> s | Anonymous -> next_ident_away (Id.of_string "A") avoid) (RelDecl.get_annot decl) in mkNamedProd x (RelDecl.get_type decl) a) eq_input lnamesparrec in let u = Univ.Instance.empty in create_input ( mkNamedProd (make_annot x Sorts.Relevant) (mkFullInd env (ind,u) nparrec) ( mkNamedProd (make_annot y Sorts.Relevant) (mkFullInd env (ind,u) (nparrec+1)) ( mkArrow (mkApp(eq (),[|bb ();mkApp(eqI,[|mkVar x;mkVar y|]);tt ()|])) Sorts.Relevant (mkApp(eq (),[|mkFullInd env (ind,u) (nparrec+3);mkVar x;mkVar y|])) ))) let compute_bl_tact handle ind lnamesparrec nparrec = let list_id = list_id lnamesparrec in let first_intros = ( List.map (fun (s,_,_,_) -> s ) list_id ) @ ( List.map (fun (_,seq,_,_ ) -> seq) list_id ) @ ( List.map (fun (_,_,sbl,_ ) -> sbl) list_id ) in intros_using_then first_intros begin fun fresh_first_intros -> Tacticals.tclTHENLIST [ intro_using_then (Id.of_string "x") (fun freshn -> induct_on (EConstr.mkVar freshn)); intro_using_then (Id.of_string "y") (fun freshm -> destruct_on (EConstr.mkVar freshm)); intro_using_then (Id.of_string "Z") begin fun freshz -> Tacticals.tclTHENLIST [ intros; Tacticals.tclTRY ( Tacticals.tclORELSE reflexivity my_discr_tac ); simpl_in_hyp (freshz,Locus.InHyp); (* repeat ( apply andb_prop in z;let z1:= fresh "Z" in destruct z as [z1 z]). *) Tacticals.tclREPEAT ( Tacticals.tclTHENLIST [ Simple.apply_in freshz (EConstr.of_constr (andb_prop())); destruct_on_as (EConstr.mkVar freshz) (IntroOrPattern [[CAst.make @@ IntroNaming (IntroFresh (Id.of_string "Z")); CAst.make @@ IntroNaming (IntroIdentifier freshz)]]) ]); (* Ci a1 ... an = Ci b1 ... bn replace bi with ai; auto || replace bi with ai by apply typeofbi_prod ; auto *) Proofview.Goal.enter begin fun gl -> let concl = Proofview.Goal.concl gl in let sigma = Tacmach.project gl in match EConstr.kind sigma concl with | App (c,ca) -> ( match EConstr.kind sigma c with | Ind (indeq, u) -> if GlobRef.equal (GlobRef.IndRef indeq) Coqlib.(lib_ref "core.eq.type") then Tacticals.tclTHEN (do_replace_bl handle ind (List.rev fresh_first_intros) nparrec (ca.(2)) (ca.(1))) Auto.default_auto else Tacticals.tclZEROMSG (str "Failure while solving Boolean->Leibniz.") | _ -> Tacticals.tclZEROMSG (str" Failure while solving Boolean->Leibniz.") ) | _ -> Tacticals.tclZEROMSG (str "Failure while solving Boolean->Leibniz.") end ] end ] end let make_bl_scheme env handle mind = let mib = Environ.lookup_mind mind env in if not (Int.equal (Array.length mib.mind_packets) 1) then user_err (str "Automatic building of boolean->Leibniz lemmas not supported"); let ind = (mind,0) in let nparams = mib.mind_nparams in let nparrec = mib.mind_nparams_rec in let lnonparrec,lnamesparrec = (* TODO subst *) context_chop (nparams-nparrec) mib.mind_params_ctxt in let bl_goal = compute_bl_goal env handle ind lnamesparrec nparrec in let uctx = UState.from_env env in let bl_goal = EConstr.of_constr bl_goal in let (ans, _, _, _, ctx) = Declare.build_by_tactic ~poly:false ~side_eff:false env ~uctx ~typ:bl_goal (compute_bl_tact handle (ind, EConstr.EInstance.empty) lnamesparrec nparrec) in ([|ans|], ctx) let make_bl_scheme_deps env ind = let inds = get_inductive_deps env ind in let map ind = SchemeMutualDep (ind, !bl_scheme_kind_aux ()) in SchemeMutualDep (ind, beq_scheme_kind) :: List.map map inds let bl_scheme_kind = declare_mutual_scheme_object "_dec_bl" ~deps:make_bl_scheme_deps make_bl_scheme let _ = bl_scheme_kind_aux := fun () -> bl_scheme_kind (**********************************************************************) (* Leibniz->Boolean *) let compute_lb_goal env handle ind lnamesparrec nparrec = let list_id = list_id lnamesparrec in let eq = eq () and tt = tt () and bb = bb () in let avoid = avoid_of_list_id list_id in let eqI = eqI handle ind list_id in let x = next_ident_away (Id.of_string "x") avoid in let y = next_ident_away (Id.of_string "y") (Id.Set.add x avoid) in let create_input c = let lb_typ = List.map (fun (s,seq,_,_) -> mkNamedProd (make_annot x Sorts.Relevant) (mkVar s) ( mkNamedProd (make_annot y Sorts.Relevant) (mkVar s) ( mkArrow ( mkApp(eq,[|mkVar s;mkVar x;mkVar y|])) Sorts.Relevant ( mkApp(eq,[|bb;mkApp(mkVar seq,[|mkVar x;mkVar y|]);tt|])) )) ) list_id in let lb_input = List.fold_left2 ( fun a (s,_,_,slb) b -> mkNamedProd (make_annot slb Sorts.Relevant) b a ) c (List.rev list_id) (List.rev lb_typ) in let eqs_typ = List.map (fun (s,_,_,_) -> mkProd(make_annot Anonymous Sorts.Relevant,mkVar s, mkProd(make_annot Anonymous Sorts.Relevant,mkVar s,bb)) ) list_id in let eq_input = List.fold_left2 ( fun a (s,seq,_,_) b -> mkNamedProd (make_annot seq Sorts.Relevant) b a ) lb_input (List.rev list_id) (List.rev eqs_typ) in List.fold_left (fun a decl -> let x = map_annot (function Name s -> s | Anonymous -> Id.of_string "A") (RelDecl.get_annot decl) in mkNamedProd x (RelDecl.get_type decl) a) eq_input lnamesparrec in let u = Univ.Instance.empty in create_input ( mkNamedProd (make_annot x Sorts.Relevant) (mkFullInd env (ind,u) nparrec) ( mkNamedProd (make_annot y Sorts.Relevant) (mkFullInd env (ind,u) (nparrec+1)) ( mkArrow (mkApp(eq,[|mkFullInd env (ind,u) (nparrec+2);mkVar x;mkVar y|])) Sorts.Relevant (mkApp(eq,[|bb;mkApp(eqI,[|mkVar x;mkVar y|]);tt|])) ))) let compute_lb_tact handle ind lnamesparrec nparrec = let list_id = list_id lnamesparrec in let first_intros = ( List.map (fun (s,_,_,_) -> s ) list_id ) @ ( List.map (fun (_,seq,_,_) -> seq) list_id ) @ ( List.map (fun (_,_,_,slb) -> slb) list_id ) in intros_using_then first_intros begin fun fresh_first_intros -> Tacticals.tclTHENLIST [ intro_using_then (Id.of_string "x") (fun freshn -> induct_on (EConstr.mkVar freshn)); intro_using_then (Id.of_string "y") (fun freshm -> destruct_on (EConstr.mkVar freshm)); intro_using_then (Id.of_string "Z") begin fun freshz -> Tacticals.tclTHENLIST [ intros; Tacticals.tclTRY ( Tacticals.tclORELSE reflexivity my_discr_tac ); my_inj_tac freshz; intros; simpl_in_concl; Auto.default_auto; Tacticals.tclREPEAT ( Tacticals.tclTHENLIST [apply (EConstr.of_constr (andb_true_intro())); simplest_split ;Auto.default_auto ] ); Proofview.Goal.enter begin fun gls -> let concl = Proofview.Goal.concl gls in let sigma = Tacmach.project gls in (* assume the goal to be eq (eq_type ...) = true *) match EConstr.kind sigma concl with | App(c,ca) -> (match (EConstr.kind sigma ca.(1)) with | App(c',ca') -> let n = Array.length ca' in do_replace_lb handle (List.rev fresh_first_intros) nparrec ca'.(n-2) ca'.(n-1) | _ -> Tacticals.tclZEROMSG (str "Failure while solving Leibniz->Boolean.") ) | _ -> Tacticals.tclZEROMSG (str "Failure while solving Leibniz->Boolean.") end ] end ] end let make_lb_scheme env handle mind = let mib = Environ.lookup_mind mind env in if not (Int.equal (Array.length mib.mind_packets) 1) then user_err (str "Automatic building of Leibniz->boolean lemmas not supported"); let ind = (mind,0) in let nparams = mib.mind_nparams in let nparrec = mib.mind_nparams_rec in let lnonparrec,lnamesparrec = context_chop (nparams-nparrec) mib.mind_params_ctxt in let lb_goal = compute_lb_goal env handle ind lnamesparrec nparrec in let uctx = UState.from_env env in let lb_goal = EConstr.of_constr lb_goal in let (ans, _, _, _, ctx) = Declare.build_by_tactic ~poly:false ~side_eff:false env ~uctx ~typ:lb_goal (compute_lb_tact handle ind lnamesparrec nparrec) in ([|ans|], ctx) let make_lb_scheme_deps env ind = let inds = get_inductive_deps env ind in let map ind = SchemeMutualDep (ind, !lb_scheme_kind_aux ()) in SchemeMutualDep (ind, beq_scheme_kind) :: List.map map inds let lb_scheme_kind = declare_mutual_scheme_object "_dec_lb" ~deps:make_lb_scheme_deps make_lb_scheme let _ = lb_scheme_kind_aux := fun () -> lb_scheme_kind (**********************************************************************) (* Decidable equality *) let check_not_is_defined () = if not (Coqlib.has_ref "core.not.type") then raise (UndefinedCst "not") (* {n=m}+{n<>m} part *) let compute_dec_goal env ind lnamesparrec nparrec = check_not_is_defined (); let eq = eq () and tt = tt () and bb = bb () in let list_id = list_id lnamesparrec in let avoid = avoid_of_list_id list_id in let x = next_ident_away (Id.of_string "x") avoid in let y = next_ident_away (Id.of_string "y") (Id.Set.add x avoid) in let create_input c = let lb_typ = List.map (fun (s,seq,_,_) -> mkNamedProd (make_annot x Sorts.Relevant) (mkVar s) ( mkNamedProd (make_annot y Sorts.Relevant) (mkVar s) ( mkArrow ( mkApp(eq,[|mkVar s;mkVar x;mkVar y|])) Sorts.Relevant ( mkApp(eq,[|bb;mkApp(mkVar seq,[|mkVar x;mkVar y|]);tt|])) )) ) list_id in let bl_typ = List.map (fun (s,seq,_,_) -> mkNamedProd (make_annot x Sorts.Relevant) (mkVar s) ( mkNamedProd (make_annot y Sorts.Relevant) (mkVar s) ( mkArrow ( mkApp(eq,[|bb;mkApp(mkVar seq,[|mkVar x;mkVar y|]);tt|])) Sorts.Relevant ( mkApp(eq,[|mkVar s;mkVar x;mkVar y|])) )) ) list_id in let lb_input = List.fold_left2 ( fun a (s,_,_,slb) b -> mkNamedProd (make_annot slb Sorts.Relevant) b a ) c (List.rev list_id) (List.rev lb_typ) in let bl_input = List.fold_left2 ( fun a (s,_,sbl,_) b -> mkNamedProd (make_annot sbl Sorts.Relevant) b a ) lb_input (List.rev list_id) (List.rev bl_typ) in let eqs_typ = List.map (fun (s,_,_,_) -> mkProd(make_annot Anonymous Sorts.Relevant,mkVar s, mkProd(make_annot Anonymous Sorts.Relevant,mkVar s,bb)) ) list_id in let eq_input = List.fold_left2 ( fun a (s,seq,_,_) b -> mkNamedProd (make_annot seq Sorts.Relevant) b a ) bl_input (List.rev list_id) (List.rev eqs_typ) in List.fold_left (fun a decl -> let x = map_annot (function Name s -> s | Anonymous -> Id.of_string "A") (RelDecl.get_annot decl) in mkNamedProd x (RelDecl.get_type decl) a) eq_input lnamesparrec in let eqnm = mkApp(eq,[|mkFullInd env ind (2*nparrec+2);mkVar x;mkVar y|]) in create_input ( mkNamedProd (make_annot x Sorts.Relevant) (mkFullInd env ind (2*nparrec)) ( mkNamedProd (make_annot y Sorts.Relevant) (mkFullInd env ind (2*nparrec+1)) ( mkApp(sumbool(),[|eqnm;mkApp (UnivGen.constr_of_monomorphic_global (Global.env ()) @@ Coqlib.lib_ref "core.not.type",[|eqnm|])|]) ) ) ) let compute_dec_tact handle ind lnamesparrec nparrec = let eq = eq () and tt = tt () and ff = ff () and bb = bb () in let list_id = list_id lnamesparrec in let _ = get_scheme handle beq_scheme_kind ind in (* This is just an assertion? *) let _non_fresh_eqI = eqI handle ind list_id in let eqtrue x = mkApp(eq,[|bb;x;tt|]) in let eqfalse x = mkApp(eq,[|bb;x;ff|]) in let first_intros = ( List.map (fun (s,_,_,_) -> s ) list_id ) @ ( List.map (fun (_,seq,_,_) -> seq) list_id ) @ ( List.map (fun (_,_,sbl,_) -> sbl) list_id ) @ ( List.map (fun (_,_,_,slb) -> slb) list_id ) in let fresh_id s gl = fresh_id_in_env (Id.Set.empty) s (Proofview.Goal.env gl) in intros_using_then first_intros begin fun fresh_first_intros -> let eqI = let a = Array.of_list fresh_first_intros in let n = List.length list_id in assert (Int.equal (Array.length a) (4 * n)); let fresh_list_id = List.init n (fun i -> (Array.get a i, Array.get a (i+n), Array.get a (i+2*n), Array.get a (i+3*n))) in eqI handle ind fresh_list_id in intro_using_then (Id.of_string "x") begin fun freshn -> intro_using_then (Id.of_string "y") begin fun freshm -> Proofview.Goal.enter begin fun gl -> let freshH = fresh_id (Id.of_string "H") gl in let eqbnm = mkApp(eqI,[|mkVar freshn;mkVar freshm|]) in let arfresh = Array.of_list fresh_first_intros in let xargs = Array.sub arfresh 0 (2*nparrec) in let c = get_scheme handle bl_scheme_kind ind in let blI = mkConst c in let c = get_scheme handle lb_scheme_kind ind in let lbI = mkConst c in Tacticals.tclTHENLIST [ (*we do this so we don't have to prove the same goal twice *) assert_by (Name freshH) (EConstr.of_constr ( mkApp(sumbool(),[|eqtrue eqbnm; eqfalse eqbnm|]) )) (Tacticals.tclTHEN (destruct_on (EConstr.of_constr eqbnm)) Auto.default_auto); Proofview.Goal.enter begin fun gl -> let freshH2 = fresh_id (Id.of_string "H") gl in Tacticals.tclTHENS (destruct_on_using (EConstr.mkVar freshH) freshH2) [ (* left *) Tacticals.tclTHENLIST [ simplest_left; apply (EConstr.of_constr (mkApp(blI,Array.map mkVar xargs))); Auto.default_auto ] ; (*right *) Proofview.Goal.enter begin fun gl -> let freshH3 = fresh_id (Id.of_string "H") gl in Tacticals.tclTHENLIST [ simplest_right ; unfold_constr (Coqlib.lib_ref "core.not.type"); intro; Equality.subst_all (); assert_by (Name freshH3) (EConstr.of_constr (mkApp(eq,[|bb;mkApp(eqI,[|mkVar freshm;mkVar freshm|]);tt|]))) (Tacticals.tclTHENLIST [ apply (EConstr.of_constr (mkApp(lbI,Array.map mkVar xargs))); Auto.default_auto ]); Equality.general_rewrite ~where:(Some freshH3) ~l2r:true Locus.AllOccurrences ~freeze:true ~dep:false ~with_evars:true ((EConstr.mkVar freshH2), NoBindings ) ; my_discr_tac ] end ] end ] end end end end let make_eq_decidability env handle mind = let mib = Environ.lookup_mind mind env in if not (Int.equal (Array.length mib.mind_packets) 1) then raise DecidabilityMutualNotSupported; let ind = (mind,0) in let nparams = mib.mind_nparams in let nparrec = mib.mind_nparams_rec in let u = Univ.Instance.empty in let uctx = UState.from_env env in let lnonparrec,lnamesparrec = context_chop (nparams-nparrec) mib.mind_params_ctxt in let (ans, _, _, _, ctx) = Declare.build_by_tactic ~poly:false ~side_eff:false env ~uctx ~typ:(EConstr.of_constr (compute_dec_goal env (ind,u) lnamesparrec nparrec)) (compute_dec_tact handle ind lnamesparrec nparrec) in ([|ans|], ctx) let eq_dec_scheme_kind = declare_mutual_scheme_object "_eq_dec" ~deps:(fun _ ind -> [SchemeMutualDep (ind, bl_scheme_kind); SchemeMutualDep (ind, lb_scheme_kind)]) make_eq_decidability (* The eq_dec_scheme proofs depend on the equality and discr tactics but the inj tactics, that comes with discr, depends on the eq_dec_scheme... *) let _ = Equality.set_eq_dec_scheme_kind eq_dec_scheme_kind coq-8.15.0/vernac/auto_ind_decl.mli000066400000000000000000000031461417001151100171340ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* obj = declare_object {(default_object "CANONICAL-STRUCTURE") with open_function = simple_open ~cat:canon_cat open_canonical_structure; cache_function = cache_canonical_structure; subst_function = (fun (subst,(c,local)) -> Instance.subst subst c, local); classify_function = (fun x -> Substitute x); discharge_function = discharge_canonical_structure } let add_canonical_structure x = Lib.add_anonymous_leaf (inCanonStruc x) let declare_canonical_structure ?(local=false) ref = let env = Global.env () in let sigma = Evd.from_env env in add_canonical_structure (Instance.make env sigma ref, local) coq-8.15.0/vernac/canonical.mli000066400000000000000000000013621417001151100162700ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* GlobRef.t -> unit coq-8.15.0/vernac/classes.ml000066400000000000000000000627071417001151100156370ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* strbrk "The default value for Typeclasses Opaque and Typeclasses \ Transparent locality is currently \"local\" in a section and \ \"global\" otherwise, but is scheduled to change in a future \ release. For the time being, adding typeclass transparency hints outside of \ sections without specifying an explicit locality attribute is \ therefore deprecated. It is recommended to use \"export\" whenever \ possible. Use the attributes #[local], #[global] and #[export] \ depending on your choice. For example: \"#[export] Typeclasses Transparent foo.\"") let default_tc_transparency_locality () = if Global.sections_are_opened () then Hints.Local else let () = warn_deprecated_tc_transparency_without_locality () in Hints.SuperGlobal let tc_transparency_locality = Attributes.hint_locality ~default:default_tc_transparency_locality let set_typeclass_transparency_com ~locality refs b = let refs = List.map (fun x -> Tacred.evaluable_of_global_reference (Global.env ()) (Smartlocate.global_with_alias x)) refs in set_typeclass_transparency ~locality refs b let add_instance_hint inst path ~locality info = Flags.silently (fun () -> Hints.add_hints ~locality [typeclasses_db] (Hints.HintsResolveEntry [info, false, Hints.PathHints path, inst])) () (* short names without opening all Hints *) type locality = Hints.hint_locality = Local | Export | SuperGlobal type instance_obj = { inst_class : GlobRef.t; inst_info: hint_info; (* Sections where the instance should be redeclared, None for discard, Some 0 for none. *) inst_global: Hints.hint_locality; inst_impl: GlobRef.t; } let add_instance_base inst = let locality = match inst.inst_global with | Local -> Local | SuperGlobal -> (* i.e. in a section, declare the hint as local since discharge is managed by rebuild_instance which calls again add_instance_hint; don't ask hints to take discharge into account itself *) if Global.sections_are_opened () then Local else SuperGlobal | Export -> (* Same as above for export *) if Global.sections_are_opened () then Local else Export in add_instance_hint (Hints.hint_globref inst.inst_impl) [inst.inst_impl] ~locality inst.inst_info (* * instances persistent object *) let perform_instance i = let i = { is_class = i.inst_class; is_info = i.inst_info; is_impl = i.inst_impl } in Typeclasses.load_instance i let cache_instance (_, inst) = perform_instance inst let load_instance _ (_, inst) = match inst.inst_global with | Local -> assert false | SuperGlobal -> perform_instance inst | Export -> () let open_instance i (_, inst) = match inst.inst_global with | Local -> assert false | SuperGlobal -> perform_instance inst | Export -> if Int.equal i 1 then perform_instance inst let subst_instance (subst, inst) = { inst with inst_class = fst (subst_global subst inst.inst_class); inst_impl = fst (subst_global subst inst.inst_impl) } let discharge_instance (_, inst) = match inst.inst_global with | Local -> None | SuperGlobal | Export -> assert (not (isVarRef inst.inst_impl)); Some inst let rebuild_instance inst = add_instance_base inst; inst let classify_instance inst = match inst.inst_global with | Local -> Dispose | SuperGlobal | Export -> Substitute inst let instance_input : instance_obj -> obj = declare_object { (default_object "type classes instances state") with cache_function = cache_instance; load_function = load_instance; open_function = simple_open ~cat:Hints.hint_cat open_instance; classify_function = classify_instance; discharge_function = discharge_instance; rebuild_function = rebuild_instance; subst_function = subst_instance } let warn_deprecated_instance_without_locality = let open Pp in CWarnings.create ~name:"deprecated-instance-without-locality" ~category:"deprecated" (fun () -> strbrk "The default value for instance locality is currently \ \"local\" in a section and \"global\" otherwise, but is scheduled to change \ in a future release. For the time being, adding instances outside of sections \ without specifying an explicit locality attribute is therefore deprecated. It is \ recommended to use \"export\" whenever possible. Use the attributes \ #[local], #[global] and #[export] depending on your choice. For example: \ \"#[export] Instance Foo : Bar := baz.\"") let default_locality () = if Global.sections_are_opened () then Local else let () = warn_deprecated_instance_without_locality () in SuperGlobal let instance_locality = Attributes.hint_locality ~default:default_locality let add_instance cl info global impl = let () = match global with | Local -> () | SuperGlobal -> if Global.sections_are_opened () && isVarRef impl then CErrors.user_err (Pp.str "Cannot set Global an instance referring to a section variable.") | Export -> if Global.sections_are_opened () && isVarRef impl then CErrors.user_err (Pp.str "The export attribute cannot be applied to an instance referring to a section variable.") in let i = { inst_class = cl.cl_impl; inst_info = info ; inst_global = global ; inst_impl = impl; } in Lib.add_anonymous_leaf (instance_input i); add_instance_base i let warning_not_a_class = let name = "not-a-class" in let category = "typeclasses" in CWarnings.create ~name ~category (fun (n, ty) -> let env = Global.env () in let evd = Evd.from_env env in Pp.(str "Ignored instance declaration for “" ++ Nametab.pr_global_env Id.Set.empty n ++ str "”: “" ++ Termops.Internal.print_constr_env env evd (EConstr.of_constr ty) ++ str "” is not a class") ) let declare_instance ?(warn = false) env sigma info local glob = let ty, _ = Typeops.type_of_global_in_context env glob in let info = Option.default {hint_priority = None; hint_pattern = None} info in match class_of_constr env sigma (EConstr.of_constr ty) with | Some (rels, ((tc,_), args) as _cl) -> add_instance tc info local glob | None -> if warn then warning_not_a_class (glob, ty) (* * classes persistent object *) let cache_class (_,c) = load_class c let subst_class (subst,cl) = let do_subst_con c = Mod_subst.subst_constant subst c and do_subst c = Mod_subst.subst_mps subst c and do_subst_gr gr = fst (subst_global subst gr) in let do_subst_ctx = List.Smart.map (RelDecl.map_constr do_subst) in let do_subst_meth m = let c = Option.Smart.map do_subst_con m.meth_const in if c == m.meth_const then m else { meth_name = m.meth_name; meth_info = m.meth_info; meth_const = c; } in let do_subst_projs projs = List.Smart.map do_subst_meth projs in { cl_univs = cl.cl_univs; cl_impl = do_subst_gr cl.cl_impl; cl_context = do_subst_ctx cl.cl_context; cl_props = do_subst_ctx cl.cl_props; cl_projs = do_subst_projs cl.cl_projs; cl_strict = cl.cl_strict; cl_unique = cl.cl_unique } let discharge_class (_,cl) = let open CVars in let repl = Lib.replacement_context () in let rel_of_variable_context ctx = List.fold_right ( fun decl (ctx', subst) -> let decl' = decl |> NamedDecl.map_constr (substn_vars 1 subst) |> NamedDecl.to_rel_decl in (decl' :: ctx', NamedDecl.get_id decl :: subst) ) ctx ([], []) in let discharge_rel_context (subst, usubst) n rel = let rel = Context.Rel.map (Cooking.expmod_constr repl) rel in let fold decl (ctx, k) = let map c = subst_univs_level_constr usubst (substn_vars k subst c) in RelDecl.map_constr map decl :: ctx, succ k in let ctx, _ = List.fold_right fold rel ([], n) in ctx in let abs_context cl = let open GlobRef in match cl.cl_impl with | VarRef _ | ConstructRef _ -> assert false | ConstRef cst -> Lib.section_segment_of_constant cst | IndRef (ind,_) -> Lib.section_segment_of_mutual_inductive ind in let discharge_context ctx' subst ctx = discharge_rel_context subst 1 ctx @ ctx' in try let info = abs_context cl in let ctx = info.Declarations.abstr_ctx in let ctx, subst = rel_of_variable_context ctx in let usubst, cl_univs' = Lib.discharge_abstract_universe_context info cl.cl_univs in let context = discharge_context ctx (subst, usubst) cl.cl_context in let props = discharge_rel_context (subst, usubst) (succ (List.length cl.cl_context)) cl.cl_props in let discharge_proj x = x in { cl_univs = cl_univs'; cl_impl = cl.cl_impl; cl_context = context; cl_props = props; cl_projs = List.Smart.map discharge_proj cl.cl_projs; cl_strict = cl.cl_strict; cl_unique = cl.cl_unique } with Not_found -> (* not defined in the current section *) cl let rebuild_class cl = try let cst = Tacred.evaluable_of_global_reference (Global.env ()) cl.cl_impl in set_typeclass_transparency ~locality:Hints.Local [cst] false; cl with e when CErrors.noncritical e -> cl let class_input : typeclass -> obj = declare_object { (default_object "type classes state") with cache_function = cache_class; load_function = (fun _ -> cache_class); classify_function = (fun x -> Substitute x); discharge_function = (fun a -> Some (discharge_class a)); rebuild_function = rebuild_class; subst_function = subst_class } let add_class cl = Lib.add_anonymous_leaf (class_input cl) let add_class env sigma cl = add_class cl; List.iter (fun m -> match m.meth_info with | Some info -> (match m.meth_const with | None -> CErrors.user_err Pp.(str "Non-definable projection can not be declared as a subinstance") | Some b -> declare_instance ~warn:true env sigma (Some info) SuperGlobal (GlobRef.ConstRef b)) | _ -> ()) cl.cl_projs let intern_info {hint_priority;hint_pattern} = let env = Global.env() in let sigma = Evd.from_env env in let hint_pattern = Option.map (Constrintern.intern_constr_pattern env sigma) hint_pattern in {hint_priority;hint_pattern} (** TODO: add subinstances *) let existing_instance glob g info = let c = Nametab.global g in let info = Option.default Hints.empty_hint_info info in let info = intern_info info in let env = Global.env() in let sigma = Evd.from_env env in let instance, _ = Typeops.type_of_global_in_context env c in let ctx, r = Term.decompose_prod_assum instance in match class_of_constr (Environ.push_rel_context ctx env) sigma (EConstr.of_constr r) with | Some (_, ((tc,u), _)) -> add_instance tc info glob c | None -> user_err ?loc:g.CAst.loc (Pp.str "Constant does not build instances of a declared type class.") (* Declare everything in the parameters as implicit, and the class instance as well *) let type_ctx_instance ~program_mode env sigma ctx inst subst = let open Vars in let rec aux (sigma, subst, instctx) l = function decl :: ctx -> let t' = substl subst (RelDecl.get_type decl) in let (sigma, c'), l = match decl with | LocalAssum _ -> interp_casted_constr_evars ~program_mode env sigma (List.hd l) t', List.tl l | LocalDef (_,b,_) -> (sigma, substl subst b), l in let d = RelDecl.get_name decl, Some c', t' in aux (sigma, c' :: subst, d :: instctx) l ctx | [] -> sigma, subst in aux (sigma, subst, []) inst (List.rev ctx) let id_of_class cl = let open GlobRef in match cl.cl_impl with | ConstRef kn -> Label.to_id @@ Constant.label kn | IndRef (kn,i) -> let mip = (Environ.lookup_mind kn (Global.env ())).Declarations.mind_packets in mip.(0).Declarations.mind_typename | _ -> assert false let instance_hook info global ?hook cst = let info = intern_info info in let env = Global.env () in let sigma = Evd.from_env env in declare_instance env sigma (Some info) global cst; (match hook with Some h -> h cst | None -> ()) let declare_instance_constant iinfo global impargs ?hook name udecl poly sigma term termtype = let kind = Decls.(IsDefinition Instance) in let scope = Locality.Global Locality.ImportDefaultBehavior in let cinfo = Declare.CInfo.make ~name ~impargs ~typ:(Some termtype) () in let info = Declare.Info.make ~kind ~scope ~poly ~udecl () in let kn = Declare.declare_definition ~cinfo ~info ~opaque:false ~body:term sigma in instance_hook iinfo global ?hook kn let do_declare_instance sigma ~locality ~poly k u ctx ctx' pri udecl impargs subst name = let subst = List.fold_left2 (fun subst' s decl -> if is_local_assum decl then s :: subst' else subst') [] subst k.cl_context in let (_, ty_constr) = instance_constructor (k,u) subst in let termtype = it_mkProd_or_LetIn ty_constr (ctx' @ ctx) in let sigma, entry = Declare.prepare_parameter ~poly sigma ~udecl ~types:termtype in let cst = Declare.declare_constant ~name ~kind:Decls.(IsAssumption Logical) (Declare.ParameterEntry entry) in let cst = (GlobRef.ConstRef cst) in Impargs.maybe_declare_manual_implicits false cst impargs; instance_hook pri locality cst let declare_instance_program pm env sigma ~locality ~poly name pri impargs udecl term termtype = let hook { Declare.Hook.S.scope; dref; _ } = let cst = match dref with GlobRef.ConstRef kn -> kn | _ -> assert false in let pri = intern_info pri in let env = Global.env () in let sigma = Evd.from_env env in declare_instance env sigma (Some pri) locality (GlobRef.ConstRef cst) in let obls, _, term, typ = RetrieveObl.retrieve_obligations env name sigma 0 term termtype in let hook = Declare.Hook.make hook in let uctx = Evd.evar_universe_context sigma in let scope, kind = Locality.Global Locality.ImportDefaultBehavior, Decls.IsDefinition Decls.Instance in let cinfo = Declare.CInfo.make ~name ~typ ~impargs () in let info = Declare.Info.make ~udecl ~scope ~poly ~kind ~hook () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~term ~uctx obls in pm let declare_instance_open sigma ?hook ~tac ~locality ~poly id pri impargs udecl ids term termtype = (* spiwack: it is hard to reorder the actions to do the pretyping after the proof has opened. As a consequence, we use the low-level primitives to code the refinement manually.*) let future_goals, sigma = Evd.pop_future_goals sigma in let gls = List.rev future_goals.Evd.FutureGoals.comb in let sigma = Evd.push_future_goals sigma in let kind = Decls.(IsDefinition Instance) in let hook = Declare.Hook.(make (fun { S.dref ; _ } -> instance_hook pri locality ?hook dref)) in let info = Declare.Info.make ~hook ~kind ~udecl ~poly () in (* XXX: We need to normalize the type, otherwise Admitted / Qed will fails! This is due to a bug in proof_global :( *) let termtype = Evarutil.nf_evar sigma termtype in let cinfo = Declare.CInfo.make ~name:id ~impargs ~typ:termtype () in let lemma = Declare.Proof.start ~cinfo ~info sigma in (* spiwack: I don't know what to do with the status here. *) let lemma = match term with | Some term -> let init_refine = Tacticals.tclTHENLIST [ Refine.refine ~typecheck:false (fun sigma -> sigma, term); Proofview.Unsafe.tclNEWGOALS (CList.map Proofview.with_empty_state gls); Tactics.New.reduce_after_refine; ] in let lemma, _ = Declare.Proof.by init_refine lemma in lemma | None -> let lemma, _ = Declare.Proof.by (Tactics.auto_intros_tac ids) lemma in lemma in match tac with | Some tac -> let lemma, _ = Declare.Proof.by tac lemma in lemma | None -> lemma let do_instance_subst_constructor_and_ty subst k u ctx = let subst = List.fold_left2 (fun subst' s decl -> if is_local_assum decl then s :: subst' else subst') [] subst (k.cl_props @ k.cl_context) in let (app, ty_constr) = instance_constructor (k,u) subst in let termtype = it_mkProd_or_LetIn ty_constr ctx in let term = it_mkLambda_or_LetIn (Option.get app) ctx in term, termtype let do_instance_resolve_TC termtype sigma env = let sigma = Evarutil.nf_evar_map sigma in let sigma = Typeclasses.resolve_typeclasses ~filter:Typeclasses.no_goals_or_obligations ~fail:true env sigma in (* Try resolving fields that are typeclasses automatically. *) let sigma = Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars ~fail:false env sigma in let sigma = Evarutil.nf_evar_map_undefined sigma in (* Beware of this step, it is required as to minimize universes. *) let sigma = Evd.minimize_universes sigma in (* Check that the type is free of evars now. *) Pretyping.check_evars env sigma termtype; termtype, sigma let do_instance_type_ctx_instance props k env' ctx' sigma ~program_mode subst = let get_id qid = CAst.make ?loc:qid.CAst.loc @@ qualid_basename qid in let props, rest = List.fold_left (fun (props, rest) decl -> if is_local_assum decl then try let is_id (id', _) = match RelDecl.get_name decl, get_id id' with | Name id, {CAst.v=id'} -> Id.equal id id' | Anonymous, _ -> false in let (loc_mid, c) = List.find is_id rest in let rest' = List.filter (fun v -> not (is_id v)) rest in let {CAst.loc;v=mid} = get_id loc_mid in List.iter (fun m -> if Name.equal m.meth_name (Name mid) then Option.iter (fun x -> Dumpglob.add_glob ?loc (GlobRef.ConstRef x)) m.meth_const) k.cl_projs; c :: props, rest' with Not_found -> ((CAst.make @@ CHole (None(* Some Evar_kinds.GoalEvar *), Namegen.IntroAnonymous, None)) :: props), rest else props, rest) ([], props) k.cl_props in match rest with | (n, _) :: _ -> unbound_method env' sigma k.cl_impl (get_id n) | _ -> let kcl_props = List.map (Termops.map_rel_decl of_constr) k.cl_props in let sigma, res = type_ctx_instance ~program_mode (push_rel_context ctx' env') sigma kcl_props props subst in res, sigma let interp_props ~program_mode env' cty k u ctx ctx' subst sigma = function | (true, { CAst.v = CRecord fs; loc }) -> check_duplicate ?loc fs; let subst, sigma = do_instance_type_ctx_instance fs k env' ctx' sigma ~program_mode subst in let term, termtype = do_instance_subst_constructor_and_ty subst k u (ctx' @ ctx) in term, termtype, sigma | (_, term) -> let sigma, def = interp_casted_constr_evars ~program_mode env' sigma term cty in let termtype = it_mkProd_or_LetIn cty ctx in let term = it_mkLambda_or_LetIn def ctx in term, termtype, sigma let do_instance_interactive env env' sigma ?hook ~tac ~locality ~poly cty k u ctx ctx' pri decl imps subst id opt_props = let term, termtype, sigma = match opt_props with | Some props -> on_pi1 (fun x -> Some x) (interp_props ~program_mode:false env' cty k u ctx ctx' subst sigma props) | None -> let term, termtype = if List.is_empty k.cl_props then let term, termtype = do_instance_subst_constructor_and_ty subst k u (ctx' @ ctx) in Some term, termtype else None, it_mkProd_or_LetIn cty ctx in let termtype, sigma = do_instance_resolve_TC termtype sigma env in term, termtype, sigma in Flags.silently (fun () -> declare_instance_open sigma ?hook ~tac ~locality ~poly id pri imps decl (List.map RelDecl.get_name ctx) term termtype) () let do_instance env env' sigma ?hook ~locality ~poly cty k u ctx ctx' pri decl imps subst id props = let term, termtype, sigma = interp_props ~program_mode:false env' cty k u ctx ctx' subst sigma props in let termtype, sigma = do_instance_resolve_TC termtype sigma env in Pretyping.check_evars_are_solved ~program_mode:false env sigma; declare_instance_constant pri locality imps ?hook id decl poly sigma term termtype let do_instance_program ~pm env env' sigma ?hook ~locality ~poly cty k u ctx ctx' pri decl imps subst id opt_props = let term, termtype, sigma = match opt_props with | Some props -> interp_props ~program_mode:true env' cty k u ctx ctx' subst sigma props | None -> let subst, sigma = do_instance_type_ctx_instance [] k env' ctx' sigma ~program_mode:true subst in let term, termtype = do_instance_subst_constructor_and_ty subst k u (ctx' @ ctx) in term, termtype, sigma in let termtype, sigma = do_instance_resolve_TC termtype sigma env in if not (Evd.has_undefined sigma) && not (Option.is_empty opt_props) then let () = declare_instance_constant pri locality imps ?hook id decl poly sigma term termtype in pm else declare_instance_program pm env sigma ~locality ~poly id pri imps decl term termtype let interp_instance_context ~program_mode env ctx pl tclass = let sigma, decl = interp_univ_decl_opt env pl in let sigma, (impls, ((env', ctx), imps)) = interp_context_evars ~program_mode env sigma ctx in let flags = Pretyping.{ all_no_fail_flags with program_mode } in let sigma, (c', imps') = interp_type_evars_impls ~flags ~impls env' sigma tclass in let imps = imps @ imps' in let ctx', c = decompose_prod_assum sigma c' in let ctx'' = ctx' @ ctx in let (k, u), args = Typeclasses.dest_class_app (push_rel_context ctx'' env) sigma c in let u_s = EInstance.kind sigma u in let cl = Typeclasses.typeclass_univ_instance (k, u_s) in let args = List.map of_constr args in let cl_context = List.map (Termops.map_rel_decl of_constr) cl.cl_context in let _, args = List.fold_right (fun decl (args, args') -> match decl with | LocalAssum _ -> (List.tl args, List.hd args :: args') | LocalDef (_,b,_) -> (args, Vars.substl args' b :: args')) cl_context (args, []) in let sigma = Evarutil.nf_evar_map sigma in let sigma = resolve_typeclasses ~filter:Typeclasses.all_evars ~fail:true env sigma in sigma, cl, u, c', ctx', ctx, imps, args, decl let new_instance_common ~program_mode env instid ctx cl = let ({CAst.loc;v=instid}, pl) = instid in let sigma, k, u, cty, ctx', ctx, imps, subst, decl = interp_instance_context ~program_mode env ctx pl cl in (* The name generator should not be here *) let id = match instid with | Name id -> id | Anonymous -> let i = Nameops.add_suffix (id_of_class k) "_instance_0" in Namegen.next_global_ident_away i (Termops.vars_of_env env) in let env' = push_rel_context ctx env in id, env', sigma, k, u, cty, ctx', ctx, imps, subst, decl let new_instance_interactive ~locality ~poly instid ctx cl ?(tac:unit Proofview.tactic option) ?hook pri opt_props = let env = Global.env() in let id, env', sigma, k, u, cty, ctx', ctx, imps, subst, decl = new_instance_common ~program_mode:false env instid ctx cl in id, do_instance_interactive env env' sigma ?hook ~tac ~locality ~poly cty k u ctx ctx' pri decl imps subst id opt_props let new_instance_program ~locality ~pm ~poly instid ctx cl opt_props ?hook pri = let env = Global.env() in let id, env', sigma, k, u, cty, ctx', ctx, imps, subst, decl = new_instance_common ~program_mode:true env instid ctx cl in let pm = do_instance_program ~pm env env' sigma ?hook ~locality ~poly cty k u ctx ctx' pri decl imps subst id opt_props in pm, id let new_instance ~locality ~poly instid ctx cl props ?hook pri = let env = Global.env() in let id, env', sigma, k, u, cty, ctx', ctx, imps, subst, decl = new_instance_common ~program_mode:false env instid ctx cl in do_instance env env' sigma ?hook ~locality ~poly cty k u ctx ctx' pri decl imps subst id props; id let declare_new_instance ~locality ~program_mode ~poly instid ctx cl pri = let env = Global.env() in let ({CAst.loc;v=instid}, pl) = instid in let sigma, k, u, cty, ctx', ctx, imps, subst, decl = interp_instance_context ~program_mode env ctx pl cl in do_declare_instance sigma ~locality ~poly k u ctx ctx' pri decl imps subst instid let refine_att = let open Attributes in let open Notations in attribute_of_list ["refine",single_key_parser ~name:"refine" ~key:"refine" ()] >>= function | None -> return false | Some () -> return true module Internal = struct let add_instance cl info glob r = let glob = if glob then SuperGlobal else Local in add_instance cl info glob r end coq-8.15.0/vernac/classes.mli000066400000000000000000000060001417001151100157700ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* env -> Evd.evar_map -> hint_info option -> Hints.hint_locality -> GlobRef.t -> unit (** Declares the given global reference as an instance of its type. Does nothing — or emit a “not-a-class” warning if the [warn] argument is set — when said type is not a registered type class. *) val existing_instance : Hints.hint_locality -> qualid -> Vernacexpr.hint_info_expr option -> unit (** globality, reference, optional priority and pattern information *) val new_instance_interactive : locality:Hints.hint_locality -> poly:bool -> name_decl -> local_binder_expr list -> constr_expr -> ?tac:unit Proofview.tactic -> ?hook:(GlobRef.t -> unit) -> Vernacexpr.hint_info_expr -> (bool * constr_expr) option -> Id.t * Declare.Proof.t val new_instance : locality:Hints.hint_locality -> poly:bool -> name_decl -> local_binder_expr list -> constr_expr -> (bool * constr_expr) -> ?hook:(GlobRef.t -> unit) -> Vernacexpr.hint_info_expr -> Id.t val new_instance_program : locality:Hints.hint_locality -> pm:Declare.OblState.t -> poly:bool -> name_decl -> local_binder_expr list -> constr_expr -> (bool * constr_expr) option -> ?hook:(GlobRef.t -> unit) -> Vernacexpr.hint_info_expr -> Declare.OblState.t * Id.t val declare_new_instance : locality:Hints.hint_locality -> program_mode:bool -> poly:bool -> ident_decl -> local_binder_expr list -> constr_expr -> Vernacexpr.hint_info_expr -> unit val add_class : env -> Evd.evar_map -> typeclass -> unit (** Setting opacity *) val set_typeclass_transparency : locality:Hints.hint_locality -> Tacred.evaluable_global_reference list -> bool -> unit val tc_transparency_locality : Hints.hint_locality Attributes.attribute val set_typeclass_transparency_com : locality:Hints.hint_locality -> Libnames.qualid list -> bool -> unit (** For generation on names based on classes only *) val id_of_class : typeclass -> Id.t val refine_att : bool Attributes.attribute val instance_locality : Hints.hint_locality Attributes.attribute (** {6 Low level interface used by Add Morphism, do not use } *) module Internal : sig val add_instance : typeclass -> hint_info -> bool -> GlobRef.t -> unit end coq-8.15.0/vernac/comArguments.ml000066400000000000000000000270431417001151100166400ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pretyping.clear_bidirectionality_hint gr | Some nargs -> Pretyping.add_bidirectionality_hint gr nargs let load_bidi_hints _ r = cache_bidi_hints r let subst_bidi_hints (subst, (gr, ohint as orig)) = let gr' = Globnames.subst_global_reference subst gr in if gr == gr' then orig else (gr', ohint) let discharge_bidi_hints (_name, (gr, ohint)) = if Globnames.isVarRef gr && Lib.is_in_section gr then None else let vars = Lib.variable_section_segment_of_reference gr in let n = List.length vars in Some (gr, Option.map ((+) n) ohint) let inBidiHints = let open Libobject in declare_object { (default_object "BIDIRECTIONALITY-HINTS" ) with load_function = load_bidi_hints; cache_function = cache_bidi_hints; classify_function = (fun o -> Substitute o); subst_function = subst_bidi_hints; discharge_function = discharge_bidi_hints; } let warn_arguments_assert = CWarnings.create ~name:"arguments-assert" ~category:"vernacular" Pp.(fun sr -> strbrk "This command is just asserting the names of arguments of " ++ Printer.pr_global sr ++ strbrk". If this is what you want, add " ++ strbrk "': assert' to silence the warning. If you want " ++ strbrk "to clear implicit arguments, add ': clear implicits'. " ++ strbrk "If you want to clear notation scopes, add ': clear scopes'") (* [nargs_for_red] is the number of arguments required to trigger reduction, [args] is the main list of arguments statuses, [more_implicits] is a list of extra lists of implicit statuses *) let vernac_arguments ~section_local reference args more_implicits flags = let env = Global.env () in let sigma = Evd.from_env env in let assert_flag = List.mem `Assert flags in let rename_flag = List.mem `Rename flags in let clear_scopes_flag = List.mem `ClearScopes flags in let extra_scopes_flag = List.mem `ExtraScopes flags in let clear_implicits_flag = List.mem `ClearImplicits flags in let default_implicits_flag = List.mem `DefaultImplicits flags in let never_unfold_flag = List.mem `ReductionNeverUnfold flags in let nomatch_flag = List.mem `ReductionDontExposeCase flags in let clear_bidi_hint = List.mem `ClearBidiHint flags in let err_incompat x y = CErrors.user_err Pp.(str ("Options \""^x^"\" and \""^y^"\" are incompatible.")) in if assert_flag && rename_flag then err_incompat "assert" "rename"; if clear_scopes_flag && extra_scopes_flag then err_incompat "clear scopes" "extra scopes"; if clear_implicits_flag && default_implicits_flag then err_incompat "clear implicits" "default implicits"; let args, nargs_for_red, nargs_before_bidi, _i = List.fold_left (fun (args,red,bidi,i) arg -> match arg with | RealArg arg -> (arg::args,red,bidi,i+1) | VolatileArg -> if Option.has_some red then CErrors.user_err Pp.(str "The \"/\" modifier may only occur once."); (args,Some i,bidi,i) | BidiArg -> if Option.has_some bidi then CErrors.user_err Pp.(str "The \"&\" modifier may only occur once."); (args,red,Some i,i)) ([],None,None,0) args in let args = List.rev args in let sr = smart_global reference in let inf_names = let ty, _ = Typeops.type_of_global_in_context env sr in List.map pi1 (Impargs.compute_implicits_names env sigma (EConstr.of_constr ty)) in let prev_names = try Arguments_renaming.arguments_names sr with Not_found -> inf_names in let num_args = List.length inf_names in assert (Int.equal num_args (List.length prev_names)); let names_of args = List.map (fun a -> a.name) args in (* Checks *) let err_extra_args names = CErrors.user_err Pp.(strbrk "Extra arguments: " ++ prlist_with_sep pr_comma Name.print names ++ str ".") in let err_missing_args names = CErrors.user_err Pp.(strbrk "The following arguments are not declared: " ++ prlist_with_sep pr_comma Name.print names ++ str ".") in let rec check_extra_args extra_args = match extra_args with | [] -> () | { notation_scope = None } :: _ -> CErrors.user_err Pp.(str"Extra arguments should specify a scope.") | { notation_scope = Some _ } :: args -> check_extra_args args in let args, scopes = let scopes = List.map (fun { notation_scope = s } -> s) args in if List.length args > num_args then let args, extra_args = List.chop num_args args in if extra_scopes_flag then (check_extra_args extra_args; (args, scopes)) else err_extra_args (names_of extra_args) else args, scopes in if Option.cata (fun n -> n > num_args) false nargs_for_red then CErrors.user_err Pp.(str "The \"/\" modifier should be put before any extra scope."); if Option.cata (fun n -> n > num_args) false nargs_before_bidi then CErrors.user_err Pp.(str "The \"&\" modifier should be put before any extra scope."); let scopes_specified = List.exists Option.has_some scopes in if scopes_specified && clear_scopes_flag then CErrors.user_err Pp.(str "The \"clear scopes\" flag is incompatible with scope annotations."); let names = List.map (fun { name } -> name) args in let names = names :: List.map (List.map fst) more_implicits in let rename_flag_required = ref false in let example_renaming = ref None in let save_example_renaming renaming = rename_flag_required := !rename_flag_required || not (Name.equal (fst renaming) Anonymous); if Option.is_empty !example_renaming then example_renaming := Some renaming in let rec names_union names1 names2 = match names1, names2 with | [], [] -> [] | _ :: _, [] -> names1 | [], _ :: _ -> names2 | (Name _ as name) :: names1, Anonymous :: names2 | Anonymous :: names1, (Name _ as name) :: names2 -> name :: names_union names1 names2 | name1 :: names1, name2 :: names2 -> if Name.equal name1 name2 then name1 :: names_union names1 names2 else CErrors.user_err Pp.(str "Argument lists should agree on the names they provide.") in let names = List.fold_left names_union [] names in let rec rename prev_names names = match prev_names, names with | [], [] -> [] | [], _ :: _ -> err_extra_args names | _ :: _, [] when assert_flag -> (* Error messages are expressed in terms of original names, not renamed ones. *) err_missing_args (List.lastn (List.length prev_names) inf_names) | _ :: _, [] -> prev_names | prev :: prev_names, Anonymous :: names -> prev :: rename prev_names names | prev :: prev_names, (Name id as name) :: names -> if not (Name.equal prev name) then save_example_renaming (prev,name); name :: rename prev_names names in let names = rename prev_names names in let renaming_specified = Option.has_some !example_renaming in if !rename_flag_required && not rename_flag then begin let msg = let open Pp in match !example_renaming with | None -> strbrk "To rename arguments the \"rename\" flag must be specified." | Some (o,n) -> strbrk "Flag \"rename\" expected to rename " ++ Name.print o ++ strbrk " into " ++ Name.print n ++ str "." in CErrors.user_err msg end; let implicits = List.map (fun { name; implicit_status = i } -> (name,i)) args in let implicits = implicits :: more_implicits in let implicits_specified = match implicits with | [l] -> List.exists (function _, Glob_term.Explicit -> false | _ -> true) l | _ -> true in if implicits_specified && clear_implicits_flag then CErrors.user_err Pp.(str "The \"clear implicits\" flag must be omitted if implicit annotations are given."); if implicits_specified && default_implicits_flag then CErrors.user_err Pp.(str "The \"default implicits\" flag is incompatible with implicit annotations."); let rargs = Util.List.map_filter (function (n, true) -> Some n | _ -> None) (Util.List.map_i (fun i { recarg_like = b } -> i, b) 0 args) in let red_behavior = let open Reductionops.ReductionBehaviour in match never_unfold_flag, nomatch_flag, rargs, nargs_for_red with | true, false, [], None -> Some NeverUnfold | true, true, _, _ -> err_incompat "simpl never" "simpl nomatch" | true, _, _::_, _ -> err_incompat "simpl never" "!" | true, _, _, Some _ -> err_incompat "simpl never" "/" | false, false, [], None -> None | false, false, _, _ -> Some (UnfoldWhen { nargs = nargs_for_red; recargs = rargs; }) | false, true, _, _ -> Some (UnfoldWhenNoMatch { nargs = nargs_for_red; recargs = rargs; }) in let red_modifiers_specified = Option.has_some red_behavior in let bidi_hint_specified = Option.has_some nargs_before_bidi in if bidi_hint_specified && clear_bidi_hint then err_incompat "clear bidirectionality hint" "&"; (* Actions *) if renaming_specified then begin Arguments_renaming.rename_arguments section_local sr names end; if scopes_specified || clear_scopes_flag then begin let scopes = List.map (Option.map (fun {loc;v=k} -> try ignore (Notation.find_scope k); k with CErrors.UserError _ -> Notation.find_delimiters_scope ?loc k)) scopes in Notation.declare_arguments_scope section_local (smart_global reference) scopes end; if implicits_specified || clear_implicits_flag then Impargs.set_implicits section_local (smart_global reference) implicits; if default_implicits_flag then Impargs.declare_implicits section_local (smart_global reference); if red_modifiers_specified then begin match sr with | GlobRef.ConstRef _ -> Reductionops.ReductionBehaviour.set ~local:section_local sr (Option.get red_behavior) | _ -> CErrors.user_err Pp.(strbrk "Modifiers of the behavior of the simpl tactic "++ strbrk "are relevant for constants only.") end; if bidi_hint_specified then begin let n = Option.get nargs_before_bidi in if section_local then Pretyping.add_bidirectionality_hint sr n else Lib.add_anonymous_leaf (inBidiHints (sr, Some n)) end; if clear_bidi_hint then begin if section_local then Pretyping.clear_bidirectionality_hint sr else Lib.add_anonymous_leaf (inBidiHints (sr, None)) end; if not (renaming_specified || implicits_specified || scopes_specified || red_modifiers_specified || bidi_hint_specified) && (List.is_empty flags) then warn_arguments_assert sr coq-8.15.0/vernac/comArguments.mli000066400000000000000000000016241417001151100170060ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Libnames.qualid Constrexpr.or_by_notation -> Vernacexpr.vernac_argument_status list -> (Names.Name.t * Glob_term.binding_kind) list list -> Vernacexpr.arguments_modifier list -> unit coq-8.15.0/vernac/comAssumption.ml000066400000000000000000000264341417001151100170400ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Univ.UContext.instance univs | UState.Monomorphic_entry _ -> Univ.Instance.empty let declare_axiom is_coe ~poly ~local ~kind typ (univs, ubinders) imps nl {CAst.v=name} = let do_instance = let open Decls in match kind with | Context -> true (* The typeclass behaviour of Variable and Context doesn't depend on section status *) | Definitional | Logical | Conjectural -> false in let inl = let open Declaremods in match nl with | NoInline -> None | DefaultInline -> Some (Flags.get_inline_level()) | InlineAt i -> Some i in let kind = Decls.IsAssumption kind in let entry = Declare.parameter_entry ~univs:(univs, ubinders) ?inline:inl typ in let decl = Declare.ParameterEntry entry in let kn = Declare.declare_constant ~name ~local ~kind decl in let gr = GlobRef.ConstRef kn in let () = maybe_declare_manual_implicits false gr imps in let () = Declare.assumption_message name in let env = Global.env () in let sigma = Evd.from_env env in let () = if do_instance then Classes.declare_instance env sigma None Hints.SuperGlobal gr in let local = match local with | Locality.ImportNeedQualified -> true | Locality.ImportDefaultBehavior -> false in let () = if is_coe then ComCoercion.try_add_new_coercion gr ~local ~poly in let inst = instance_of_univ_entry univs in (gr,inst) let interp_assumption ~program_mode env sigma impl_env bl c = let flags = { Pretyping.all_no_fail_flags with program_mode } in let sigma, (impls, ((env_bl, ctx), impls1)) = interp_context_evars ~program_mode ~impl_env env sigma bl in let sigma, (ty, impls2) = interp_type_evars_impls ~flags env_bl sigma ~impls c in let ty = EConstr.it_mkProd_or_LetIn ty ctx in sigma, ty, impls1@impls2 let empty_poly_univ_entry = UState.Polymorphic_entry Univ.UContext.empty, UnivNames.empty_binders let empty_mono_univ_entry = UState.Monomorphic_entry Univ.ContextSet.empty, UnivNames.empty_binders let empty_univ_entry ~poly = if poly then empty_poly_univ_entry else empty_mono_univ_entry (* When declarations are monomorphic (which is always the case in sections, even when universes are treated as polymorphic variables) the universe constraints and universe names are declared with the first declaration only. *) let clear_univs scope univ = match scope, univ with | Locality.Global _, (UState.Polymorphic_entry _, _ as univs) -> univs | _, (UState.Monomorphic_entry _, _) -> empty_univ_entry ~poly:false | Locality.Discharge, (UState.Polymorphic_entry _, _) -> empty_univ_entry ~poly:true let declare_assumptions ~poly ~scope ~kind univs nl l = let _, _ = List.fold_left (fun (subst,univs) ((is_coe,idl),typ,imps) -> (* NB: here univs are ignored when scope=Discharge *) let typ = replace_vars subst typ in let univs,subst' = List.fold_left_map (fun univs id -> let refu = match scope with | Locality.Discharge -> declare_variable is_coe ~kind typ univs imps Glob_term.Explicit id; GlobRef.VarRef id.CAst.v, Univ.Instance.empty | Locality.Global local -> declare_axiom is_coe ~local ~poly ~kind typ univs imps nl id in clear_univs scope univs, (id.CAst.v, Constr.mkRef refu)) univs idl in subst'@subst, clear_univs scope univs) ([], univs) l in () let maybe_error_many_udecls = function | ({CAst.loc;v=id}, Some _) -> user_err ?loc Pp.(str "When declaring multiple axioms in one command, " ++ str "only the first is allowed a universe binder " ++ str "(which will be shared by the whole block).") | (_, None) -> () let process_assumptions_udecls ~scope l = let udecl, first_id = match l with | (coe, ((id, udecl)::rest, c))::rest' -> List.iter maybe_error_many_udecls rest; List.iter (fun (coe, (idl, c)) -> List.iter maybe_error_many_udecls idl) rest'; udecl, id | (_, ([], _))::_ | [] -> assert false in let () = match scope, udecl with | Locality.Discharge, Some _ -> let loc = first_id.CAst.loc in let msg = Pp.str "Section variables cannot be polymorphic." in user_err ?loc msg | _ -> () in udecl, List.map (fun (coe, (idl, c)) -> coe, (List.map fst idl, c)) l let do_assumptions ~program_mode ~poly ~scope ~kind nl l = let open Context.Named.Declaration in let env = Global.env () in let udecl, l = process_assumptions_udecls ~scope l in let sigma, udecl = interp_univ_decl_opt env udecl in let l = if poly then (* Separate declarations so that A B : Type puts A and B in different levels. *) List.fold_right (fun (is_coe,(idl,c)) acc -> List.fold_right (fun id acc -> (is_coe, ([id], c)) :: acc) idl acc) l [] else l in (* We interpret all declarations in the same evar_map, i.e. as a telescope. *) let (sigma,_,_),l = List.fold_left_map (fun (sigma,env,ienv) (is_coe,(idl,c)) -> let sigma,t,imps = interp_assumption ~program_mode env sigma ienv [] c in let r = Retyping.relevance_of_type env sigma t in let env = EConstr.push_named_context (List.map (fun {CAst.v=id} -> LocalAssum (make_annot id r,t)) idl) env in let ienv = List.fold_right (fun {CAst.v=id} ienv -> let impls = compute_internalization_data env sigma id Variable t imps in Id.Map.add id impls ienv) idl ienv in ((sigma,env,ienv),((is_coe,idl),t,imps))) (sigma,env,empty_internalization_env) l in let sigma = solve_remaining_evars all_and_fail_flags env sigma in (* The universe constraints come from the whole telescope. *) let sigma = Evd.minimize_universes sigma in let nf_evar c = EConstr.to_constr sigma c in let uvars, l = List.fold_left_map (fun uvars (coe,t,imps) -> let t = nf_evar t in let uvars = Univ.Level.Set.union uvars (Vars.universes_of_constr t) in uvars, (coe,t,imps)) Univ.Level.Set.empty l in (* XXX: Using `Declare.prepare_parameter` here directly is not possible as we indeed declare several parameters; however, restrict_universe_context should be called in a centralized place IMO, thus I think we should adapt `prepare_parameter` to handle this case too. *) let sigma = Evd.restrict_universe_context sigma uvars in let univs = Evd.check_univ_decl ~poly sigma udecl in declare_assumptions ~poly ~scope ~kind univs nl l let context_subst subst (name,b,t,impl) = name, Option.map (Vars.substl subst) b, Vars.substl subst t, impl let context_insection sigma ~poly ctx = let uctx = Evd.evar_universe_context sigma in let univs = UState.univ_entry ~poly uctx in let fn i subst (name,_,_,_ as d) = let d = context_subst subst d in let univs = if i = 0 then univs else empty_univ_entry ~poly in let () = match d with | name, None, t, impl -> let kind = Decls.Context in declare_variable false ~kind t univs [] impl (CAst.make name) | name, Some b, t, impl -> let entry = Declare.definition_entry ~univs ~types:t b in (* XXX Fixme: Use Declare.prepare_definition *) let kind = Decls.(IsDefinition Definition) in let _ : GlobRef.t = Declare.declare_entry ~name ~scope:Locality.Discharge ~kind ~impargs:[] ~uctx entry in () in Constr.mkVar name :: subst in let _ : Vars.substl = List.fold_left_i fn 0 [] ctx in () let context_nosection sigma ~poly ctx = let (univ_entry,ubinders as univs) = Evd.univ_entry ~poly sigma in let fn i subst d = let (name,b,t,_impl) = context_subst subst d in let kind = Decls.(IsAssumption Logical) in let local = if Lib.is_modtype () then Locality.ImportDefaultBehavior else Locality.ImportNeedQualified in (* Multiple monomorphic axioms: declare universes only on the first declaration *) let univs = if i = 0 then univs else clear_univs (Locality.Global local) univs in let decl = match b with | None -> let entry = Declare.parameter_entry ~univs:(univ_entry, ubinders) t in Declare.ParameterEntry entry | Some b -> let entry = Declare.definition_entry ~univs ~types:t b in Declare.DefinitionEntry entry in let cst = Declare.declare_constant ~name ~kind ~local decl in let () = Declare.assumption_message name in let env = Global.env () in (* why local when is_modtype? *) let locality = if Lib.is_modtype () then Hints.Local else Hints.SuperGlobal in let () = if Lib.is_modtype() || Option.is_empty b then Classes.declare_instance env sigma None locality (GlobRef.ConstRef cst) in Constr.mkConstU (cst,instance_of_univ_entry univ_entry) :: subst in let _ : Vars.substl = List.fold_left_i fn 0 [] ctx in () let context ~poly l = let env = Global.env() in let sigma = Evd.from_env env in let sigma, (_, ((_env, ctx), impls)) = interp_context_evars ~program_mode:false env sigma l in (* Note, we must use the normalized evar from now on! *) let ce t = Pretyping.check_evars env sigma t in let () = List.iter (fun decl -> Context.Rel.Declaration.iter_constr ce decl) ctx in let sigma, ctx = Evarutil.finalize sigma (fun nf -> List.map (RelDecl.map_constr_het nf) ctx) in (* reorder, evar-normalize and add implicit status *) let ctx = List.rev_map (fun d -> let {binder_name=name}, b, t = RelDecl.to_tuple d in let name = match name with | Anonymous -> user_err Pp.(str "Anonymous variables not allowed in contexts.") | Name id -> id in let impl = let open Glob_term in let search x = match x.CAst.v with | Some (Name id',max) when Id.equal name id' -> Some (if max then MaxImplicit else NonMaxImplicit) | _ -> None in try CList.find_map search impls with Not_found -> Explicit in name,b,t,impl) ctx in if Global.sections_are_opened () then context_insection sigma ~poly ctx else context_nosection sigma ~poly ctx coq-8.15.0/vernac/comAssumption.mli000066400000000000000000000034041417001151100172010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Environ.env -> Evd.evar_map -> Constrintern.internalization_env -> Constrexpr.local_binder_expr list -> constr_expr -> Evd.evar_map * EConstr.t * Impargs.manual_implicits val do_assumptions : program_mode:bool -> poly:bool -> scope:Locality.locality -> kind:Decls.assumption_object_kind -> Declaremods.inline -> (ident_decl list * constr_expr) with_coercion list -> unit val declare_variable : coercion_flag -> kind:Decls.assumption_object_kind -> Constr.types -> UState.named_universes_entry -> Impargs.manual_implicits -> Glob_term.binding_kind -> variable CAst.t -> unit val declare_axiom : coercion_flag -> poly:bool -> local:Locality.import_status -> kind:Decls.assumption_object_kind -> Constr.types -> UState.named_universes_entry -> Impargs.manual_implicits -> Declaremods.inline -> variable CAst.t -> GlobRef.t * Univ.Instance.t (** Context command *) val context : poly:bool -> local_binder_expr list -> unit coq-8.15.0/vernac/comCoercion.ml000066400000000000000000000314441417001151100164340ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (Printer.pr_global g ++ str" is already a coercion") | NotAFunction -> (Printer.pr_global g ++ str" is not a function") | NoSource (Some cl) -> (str "Cannot recognize " ++ pr_class cl ++ str " as a source class of " ++ Printer.pr_global g) | NoSource None -> (str ": cannot find the source class of " ++ Printer.pr_global g) | ForbiddenSourceClass cl -> pr_class cl ++ str " cannot be a source class" | NoTarget -> (str"Cannot find the target class") | WrongTarget (clt,cl) -> (str"Found target class " ++ pr_class cl ++ str " instead of " ++ pr_class clt) | NotAClass ref -> (str "Type of " ++ Printer.pr_global ref ++ str " does not end with a sort") (* Verifications pour l'ajout d'une classe *) let check_reference_arity ref = let env = Global.env () in let c, _ = Typeops.type_of_global_in_context env ref in if not (Reductionops.is_arity env (Evd.from_env env) (EConstr.of_constr c)) (* FIXME *) then raise (CoercionError (NotAClass ref)) let check_arity = function | CL_FUN | CL_SORT -> () | CL_CONST cst -> check_reference_arity (GlobRef.ConstRef cst) | CL_PROJ p -> check_reference_arity (GlobRef.ConstRef (Projection.Repr.constant p)) | CL_SECVAR id -> check_reference_arity (GlobRef.VarRef id) | CL_IND kn -> check_reference_arity (GlobRef.IndRef kn) (* Coercions *) (* check that the computed target is the provided one *) let check_target clt = function | Some cl when not (cl_typ_eq cl clt) -> raise (CoercionError (WrongTarget(clt,cl))) | _ -> () (* condition d'heritage uniforme *) let uniform_cond sigma ctx lt = List.for_all2eq (EConstr.eq_constr sigma) lt (Context.Rel.instance_list EConstr.mkRel 0 ctx) let class_of_global = function | GlobRef.ConstRef sp -> (match Structures.PrimitiveProjections.find_opt sp with | Some p -> CL_PROJ p | None -> CL_CONST sp) | GlobRef.IndRef sp -> CL_IND sp | GlobRef.VarRef id -> CL_SECVAR id | GlobRef.ConstructRef _ as c -> user_err (str "Constructors, such as " ++ Printer.pr_global c ++ str ", cannot be used as a class.") (* lp est la liste (inverse'e) des arguments de la coercion ids est le nom de la classe source sps_opt est le sp de la classe source dans le cas des structures retourne: la classe source nbre d'arguments de la classe le constr de la class la liste des variables dont depend la classe source l'indice de la classe source dans la liste lp *) let get_source env lp source = let open Context.Rel.Declaration in match source with | None -> (* Take the latest non let-in argument *) let rec aux = function | [] -> raise Not_found | LocalDef _ :: lt -> aux lt | LocalAssum (_,t1) :: lt -> let cl1,u1,lv1 = find_class_type (push_rel_context lt env) Evd.empty (EConstr.of_constr t1) in cl1,lt,lv1,1 in aux lp | Some cl -> (* Take the first argument that matches *) let rec aux env acc = function | [] -> raise Not_found | LocalDef _ as decl :: lt -> aux (push_rel decl env) (decl::acc) lt | LocalAssum (_,t1) as decl :: lt -> try let cl1,u1,lv1 = find_class_type env Evd.empty (EConstr.of_constr t1) in if cl_typ_eq cl cl1 then cl1,acc,lv1,Context.Rel.nhyps lt+1 else raise Not_found with Not_found -> aux (push_rel decl env) (decl::acc) lt in aux env [] (List.rev lp) let get_target env lp t ind = if (ind > 1) then CL_FUN else match pi1 (find_class_type (push_rel_context lp env) Evd.empty (EConstr.of_constr t)) with | CL_CONST p when Structures.PrimitiveProjections.mem p -> CL_PROJ (Option.get @@ Structures.PrimitiveProjections.find_opt p) | x -> x let strength_of_cl = function | CL_CONST kn -> `GLOBAL | CL_SECVAR id -> `LOCAL | _ -> `GLOBAL let strength_of_global = function | GlobRef.VarRef _ -> `LOCAL | _ -> `GLOBAL let get_strength stre ref cls clt = let stres = strength_of_cl cls in let stret = strength_of_cl clt in let stref = strength_of_global ref in strength_min [stre;stres;stret;stref] let ident_key_of_class = function | CL_FUN -> "Funclass" | CL_SORT -> "Sortclass" | CL_CONST sp -> Label.to_string (Constant.label sp) | CL_PROJ sp -> Label.to_string (Projection.Repr.label sp) | CL_IND (sp,_) -> Label.to_string (MutInd.label sp) | CL_SECVAR id -> Id.to_string id (* Identity coercion *) let error_not_transparent source = user_err (pr_class source ++ str " must be a transparent constant.") let build_id_coercion idf_opt source poly = let env = Global.env () in let sigma = Evd.from_env env in let sigma, vs = match source with | CL_CONST sp -> Evd.fresh_global env sigma (GlobRef.ConstRef sp) | _ -> error_not_transparent source in let vs = EConstr.Unsafe.to_constr vs in let c = match constant_opt_value_in env (destConst vs) with | Some c -> c | None -> error_not_transparent source in let lams,t = decompose_lam_assum c in let val_f = it_mkLambda_or_LetIn (mkLambda (make_annot (Name Namegen.default_dependent_ident) Sorts.Relevant, applistc vs (Context.Rel.instance_list mkRel 0 lams), mkRel 1)) lams in let typ_f = List.fold_left (fun d c -> Term.mkProd_wo_LetIn c d) (mkProd (make_annot Anonymous Sorts.Relevant, applistc vs (Context.Rel.instance_list mkRel 0 lams), lift 1 t)) lams in (* juste pour verification *) let sigma, val_t = Typing.type_of env sigma (EConstr.of_constr val_f) in let () = if not (Reductionops.is_conv_leq env sigma val_t (EConstr.of_constr typ_f)) then user_err (strbrk "Cannot be defined as coercion (maybe a bad number of arguments).") in let name = match idf_opt with | Some idf -> idf | None -> let cl,u,_ = find_class_type env sigma (EConstr.of_constr t) in Id.of_string ("Id_"^(ident_key_of_class source)^"_"^ (ident_key_of_class cl)) in let univs = Evd.univ_entry ~poly sigma in let constr_entry = (* Cast is necessary to express [val_f] is identity *) DefinitionEntry (definition_entry ~types:typ_f ~univs ~inline:true (mkCast (val_f, DEFAULTcast, typ_f))) in let kind = Decls.(IsDefinition IdentityCoercion) in let kn = declare_constant ~name ~kind constr_entry in GlobRef.ConstRef kn let check_source = function | Some (CL_FUN as s) -> raise (CoercionError (ForbiddenSourceClass s)) | _ -> () let cache_coercion (_,c) = let env = Global.env () in let sigma = Evd.from_env env in Coercionops.declare_coercion env sigma c let open_coercion i o = if Int.equal i 1 then cache_coercion o let discharge_coercion (_, c) = if c.coe_local then None else let n = try let ins = Lib.section_instance c.coe_value in Array.length (snd ins) with Not_found -> 0 in let nc = { c with coe_param = n + c.coe_param; coe_is_projection = Option.map Lib.discharge_proj_repr c.coe_is_projection; } in Some nc let rebuild_coercion c = { c with coe_typ = fst (Typeops.type_of_global_in_context (Global.env ()) c.coe_value) } let classify_coercion obj = if obj.coe_local then Dispose else Substitute obj let coe_cat = create_category "coercions" let inCoercion : coe_info_typ -> obj = declare_object {(default_object "COERCION") with open_function = simple_open ~cat:coe_cat open_coercion; cache_function = cache_coercion; subst_function = (fun (subst,c) -> subst_coercion subst c); classify_function = classify_coercion; discharge_function = discharge_coercion; rebuild_function = rebuild_coercion } let declare_coercion coef typ ?(local = false) ~isid ~src:cls ~target:clt ~params:ps () = let isproj = match coef with | GlobRef.ConstRef c -> Structures.PrimitiveProjections.find_opt c | _ -> None in let c = { coe_value = coef; coe_typ = typ; coe_local = local; coe_is_identity = isid; coe_is_projection = isproj; coe_source = cls; coe_target = clt; coe_param = ps; } in Lib.add_anonymous_leaf (inCoercion c) (* nom de la fonction coercion strength de f nom de la classe source (optionnel) sp de la classe source (dans le cas des structures) nom de la classe target (optionnel) booleen "coercion identite'?" lorque source est None alors target est None aussi. *) let warn_uniform_inheritance = CWarnings.create ~name:"uniform-inheritance" ~category:"typechecker" (fun g -> Printer.pr_global g ++ strbrk" does not respect the uniform inheritance condition") let add_new_coercion_core coef stre poly source target isid : unit = check_source source; let env = Global.env () in let t, _ = Typeops.type_of_global_in_context env coef in if coercion_exists coef then raise (CoercionError AlreadyExists); let lp,tg = decompose_prod_assum t in let llp = List.length lp in if Int.equal llp 0 then raise (CoercionError NotAFunction); let (cls,ctx,lvs,ind) = try get_source env lp source with Not_found -> raise (CoercionError (NoSource source)) in check_source (Some cls); if not (uniform_cond Evd.empty (* FIXME - for when possibly called with unresolved evars in the future *) ctx lvs) then warn_uniform_inheritance coef; let clt = try get_target env lp tg ind with Not_found -> raise (CoercionError NoTarget) in check_target clt target; check_arity cls; check_arity clt; let local = match get_strength stre coef cls clt with | `LOCAL -> true | `GLOBAL -> false in declare_coercion coef t ~local ~isid ~src:cls ~target:clt ~params:(List.length lvs) () let try_add_new_coercion_core ref ~local c d e f = try add_new_coercion_core ref (loc_of_bool local) c d e f with CoercionError e -> user_err (explain_coercion_error ref e ++ str ".") let try_add_new_coercion ref ~local ~poly = try_add_new_coercion_core ref ~local poly None None false let try_add_new_coercion_subclass cl ~local ~poly = let coe_ref = build_id_coercion None cl poly in try_add_new_coercion_core coe_ref ~local poly (Some cl) None true let try_add_new_coercion_with_target ref ~local ~poly ~source ~target = try_add_new_coercion_core ref ~local poly (Some source) (Some target) false let try_add_new_identity_coercion id ~local ~poly ~source ~target = let ref = build_id_coercion (Some id) source poly in try_add_new_coercion_core ref ~local poly (Some source) (Some target) true let try_add_new_coercion_with_source ref ~local ~poly ~source = try_add_new_coercion_core ref ~local poly (Some source) None false let add_coercion_hook poly { Declare.Hook.S.scope; dref; _ } = let open Locality in let local = match scope with | Discharge -> assert false (* Local Coercion in section behaves like Local Definition *) | Global ImportNeedQualified -> true | Global ImportDefaultBehavior -> false in let () = try_add_new_coercion dref ~local ~poly in let msg = Nametab.pr_global_env Id.Set.empty dref ++ str " is now a coercion" in Flags.if_verbose Feedback.msg_info msg let add_coercion_hook ~poly = Declare.Hook.make (add_coercion_hook poly) let add_subclass_hook ~poly { Declare.Hook.S.scope; dref; _ } = let open Locality in let stre = match scope with | Discharge -> assert false (* Local Subclass in section behaves like Local Definition *) | Global ImportNeedQualified -> true | Global ImportDefaultBehavior -> false in let cl = class_of_global dref in try_add_new_coercion_subclass cl ~local:stre ~poly let add_subclass_hook ~poly = Declare.Hook.make (add_subclass_hook ~poly) coq-8.15.0/vernac/comCoercion.mli000066400000000000000000000042101417001151100165740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* local:bool -> poly:bool -> source:cl_typ -> target:cl_typ -> unit (** [try_add_new_coercion ref s] declares [ref], assumed to be of type [(x1:T1)...(xn:Tn)src->tg], as a coercion from [src] to [tg] *) val try_add_new_coercion : GlobRef.t -> local:bool -> poly:bool -> unit (** [try_add_new_coercion_subclass cst s] expects that [cst] denotes a transparent constant which unfolds to some class [tg]; it declares an identity coercion from [cst] to [tg], named something like ["Id_cst_tg"] *) val try_add_new_coercion_subclass : cl_typ -> local:bool -> poly:bool -> unit (** [try_add_new_coercion_with_source ref s src] declares [ref] as a coercion from [src] to [tg] where the target is inferred from the type of [ref] *) val try_add_new_coercion_with_source : GlobRef.t -> local:bool -> poly:bool -> source:cl_typ -> unit (** [try_add_new_identity_coercion id s src tg] enriches the environment with a new definition of name [id] declared as an identity coercion from [src] to [tg] *) val try_add_new_identity_coercion : Id.t -> local:bool -> poly:bool -> source:cl_typ -> target:cl_typ -> unit val add_coercion_hook : poly:bool -> Declare.Hook.t val add_subclass_hook : poly:bool -> Declare.Hook.t val class_of_global : GlobRef.t -> cl_typ coq-8.15.0/vernac/comDefinition.ml000066400000000000000000000154311417001151100167610ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* sigma, body | Some red -> let red, _ = reduction_of_red_expr env red in red env sigma body let warn_implicits_in_term = CWarnings.create ~name:"implicits-in-term" ~category:"implicits" (fun () -> strbrk "Implicit arguments declaration relies on type." ++ spc () ++ strbrk "Discarding incompatible declaration in term.") let check_imps ~impsty ~impsbody = let rec aux impsty impsbody = match impsty, impsbody with | a1 :: impsty, a2 :: impsbody -> let () = match a1.CAst.v, a2.CAst.v with | None , None | Some _, None -> () | Some (_,b1) , Some (_,b2) -> if not ((b1:bool) = b2) then warn_implicits_in_term ?loc:a2.CAst.loc () | None, Some _ -> warn_implicits_in_term ?loc:a2.CAst.loc () in aux impsty impsbody | _ :: _, [] | [], _ :: _ -> (* Information only on one side *) () | [], [] -> () in aux impsty impsbody let protect_pattern_in_binder bl c ctypopt = (* We turn "Definition d binders := body : typ" into *) (* "Definition d := fun binders => body:type" *) (* This is a hack while waiting for LocalPattern in regular environments *) if List.exists (function Constrexpr.CLocalPattern _ -> true | _ -> false) bl then let t = match ctypopt with | None -> CAst.make ?loc:c.CAst.loc (Constrexpr.CHole (None,Namegen.IntroAnonymous,None)) | Some t -> t in let loc = Loc.merge_opt c.CAst.loc t.CAst.loc in let c = CAst.make ?loc @@ Constrexpr.CCast (c, Constr.DEFAULTcast, t) in let loc = match List.hd bl with | Constrexpr.CLocalAssum (a::_,_,_) | Constrexpr.CLocalDef (a,_,_) -> a.CAst.loc | Constrexpr.CLocalPattern {CAst.loc} -> loc | Constrexpr.CLocalAssum ([],_,_) -> assert false in let apply_under_binders f env evd c = let rec aux env evd c = let open Constr in let open EConstr in let open Context.Rel.Declaration in match kind evd c with | Lambda (x,t,c) -> let evd,c = aux (push_rel (LocalAssum (x,t)) env) evd c in evd, mkLambda (x,t,c) | LetIn (x,b,t,c) -> let evd,c = aux (push_rel (LocalDef (x,b,t)) env) evd c in evd, mkLetIn (x,t,b,c) | Case (ci,u,pms,p,iv,a,bl) -> let (ci, p, iv, a, bl) = EConstr.expand_case env evd (ci, u, pms, p, iv, a, bl) in let evd,bl = Array.fold_left_map (aux env) evd bl in evd, mkCase (EConstr.contract_case env evd (ci, p, iv, a, bl)) | Cast (c,_,_) -> f env evd c (* we remove the cast we had set *) (* This last case may happen when reaching the proof of an impossible case, as when pattern-matching on a vector of length 1 *) | _ -> (evd,c) in aux env evd c in ([], Constrexpr_ops.mkLambdaCN ?loc:(Loc.merge_opt loc c.CAst.loc) bl c, None, apply_under_binders) else (bl, c, ctypopt, fun f env evd c -> f env evd c) let interp_definition ~program_mode env evd impl_env bl red_option c ctypopt = let flags = Pretyping.{ all_no_fail_flags with program_mode } in let (bl, c, ctypopt, apply_under_binders) = protect_pattern_in_binder bl c ctypopt in (* Build the parameters *) let evd, (impls, ((env_bl, ctx), imps1)) = interp_context_evars ~program_mode ~impl_env env evd bl in (* Build the type *) let evd, tyopt = Option.fold_left_map (interp_type_evars_impls ~flags ~impls env_bl) evd ctypopt in (* Build the body, and merge implicits from parameters and from type/body *) let evd, c, imps, tyopt = match tyopt with | None -> let evd, (c, impsbody) = interp_constr_evars_impls ~program_mode ~impls env_bl evd c in evd, c, imps1@impsbody, None | Some (ty, impsty) -> let evd, (c, impsbody) = interp_casted_constr_evars_impls ~program_mode ~impls env_bl evd c ty in check_imps ~impsty ~impsbody; evd, c, imps1@impsty, Some ty in (* Do the reduction *) let evd, c = apply_under_binders (red_constant_body red_option) env_bl evd c in (* Declare the definition *) let c = EConstr.it_mkLambda_or_LetIn c ctx in let tyopt = Option.map (fun ty -> EConstr.it_mkProd_or_LetIn ty ctx) tyopt in evd, (c, tyopt), imps let definition_using env evd ~body ~types ~using = let terms = Option.List.cons types [body] in Option.map (fun using -> Proof_using.definition_using env evd ~using ~terms) using let do_definition ?hook ~name ~scope ~poly ?typing_flags ~kind ?using udecl bl red_option c ctypopt = let program_mode = false in let env = Global.env() in let env = Environ.update_typing_flags ?typing_flags env in (* Explicitly bound universes and constraints *) let evd, udecl = interp_univ_decl_opt env udecl in let evd, (body, types), impargs = interp_definition ~program_mode env evd empty_internalization_env bl red_option c ctypopt in let using = definition_using env evd ~body ~types ~using in let kind = Decls.IsDefinition kind in let cinfo = Declare.CInfo.make ~name ~impargs ~typ:types ?using () in let info = Declare.Info.make ~scope ~kind ?hook ~udecl ~poly ?typing_flags () in let _ : Names.GlobRef.t = Declare.declare_definition ~info ~cinfo ~opaque:false ~body evd in () let do_definition_program ?hook ~pm ~name ~scope ~poly ?typing_flags ~kind ?using udecl bl red_option c ctypopt = let program_mode = true in let env = Global.env() in let env = Environ.update_typing_flags ?typing_flags env in (* Explicitly bound universes and constraints *) let evd, udecl = interp_univ_decl_opt env udecl in let evd, (body, types), impargs = interp_definition ~program_mode env evd empty_internalization_env bl red_option c ctypopt in let using = definition_using env evd ~body ~types ~using in let term, typ, uctx, obls = Declare.Obls.prepare_obligation ~name ~body ~types evd in let pm, _ = let cinfo = Declare.CInfo.make ~name ~typ ~impargs ?using () in let info = Declare.Info.make ~udecl ~scope ~poly ~kind ?hook ?typing_flags () in Declare.Obls.add_definition ~pm ~cinfo ~info ~term ~uctx obls in pm coq-8.15.0/vernac/comDefinition.mli000066400000000000000000000034151417001151100171310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Environ.env -> Evd.evar_map -> Constrintern.internalization_env -> Constrexpr.local_binder_expr list -> red_expr option -> constr_expr -> constr_expr option -> Evd.evar_map * (EConstr.t * EConstr.t option) * Impargs.manual_implicits val do_definition : ?hook:Declare.Hook.t -> name:Id.t -> scope:Locality.locality -> poly:bool -> ?typing_flags:Declarations.typing_flags -> kind:Decls.definition_object_kind -> ?using:Vernacexpr.section_subset_expr -> universe_decl_expr option -> local_binder_expr list -> red_expr option -> constr_expr -> constr_expr option -> unit val do_definition_program : ?hook:Declare.Hook.t -> pm:Declare.OblState.t -> name:Id.t -> scope:Locality.locality -> poly:bool -> ?typing_flags:Declarations.typing_flags -> kind:Decls.logical_kind -> ?using:Vernacexpr.section_subset_expr -> universe_decl_expr option -> local_binder_expr list -> red_expr option -> constr_expr -> constr_expr option -> Declare.OblState.t coq-8.15.0/vernac/comFixpoint.ml000066400000000000000000000375551417001151100165040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ('a * ('a,'a list) union) list *) let rec partial_order cmp = function | [] -> [] | (x,xge)::rest -> let rec browse res xge' = function | [] -> let res = List.map (function | (z, Inr zge) when List.mem_f cmp x zge -> (z, Inr (List.union cmp zge xge')) | r -> r) res in (x,Inr xge')::res | y::xge -> let rec link y = try match List.assoc_f cmp y res with | Inl z -> link z | Inr yge -> if List.mem_f cmp x yge then let res = List.remove_assoc_f cmp y res in let res = List.map (function | (z, Inl t) -> if cmp t y then (z, Inl x) else (z, Inl t) | (z, Inr zge) -> if List.mem_f cmp y zge then (z, Inr (List.add_set cmp x (List.remove cmp y zge))) else (z, Inr zge)) res in browse ((y,Inl x)::res) xge' (List.union cmp xge yge) else browse res (List.add_set cmp y (List.union cmp xge' yge)) xge with Not_found -> browse res (List.add_set cmp y xge') xge in link y in browse (partial_order cmp rest) [] xge let non_full_mutual_message x xge y yge isfix rest = let reason = if Id.List.mem x yge then Id.print y ++ str " depends on " ++ Id.print x ++ strbrk " but not conversely" else if Id.List.mem y xge then Id.print x ++ str " depends on " ++ Id.print y ++ strbrk " but not conversely" else Id.print y ++ str " and " ++ Id.print x ++ strbrk " are not mutually dependent" in let e = if List.is_empty rest then reason else strbrk "e.g., " ++ reason in let k = if isfix then "fixpoint" else "cofixpoint" in let w = if isfix then strbrk "Well-foundedness check may fail unexpectedly." ++ fnl() else mt () in strbrk "Not a fully mutually defined " ++ str k ++ fnl () ++ str "(" ++ e ++ str ")." ++ fnl () ++ w let warn_non_full_mutual = CWarnings.create ~name:"non-full-mutual" ~category:"fixpoints" (fun (x,xge,y,yge,isfix,rest) -> non_full_mutual_message x xge y yge isfix rest) let warn_non_recursive = CWarnings.create ~name:"non-recursive" ~category:"fixpoints" (fun (x,isfix) -> let k = if isfix then "fixpoint" else "cofixpoint" in strbrk "Not a truly recursive " ++ str k ++ str ".") let check_true_recursivity env evd ~isfix fixl = let names = List.map fst fixl in let preorder = List.map (fun (id,def) -> (id, List.filter (fun id' -> Termops.occur_var env evd id' def) names)) fixl in let po = partial_order Id.equal preorder in match List.filter (function (_,Inr _) -> true | _ -> false) po with | (x,Inr xge)::(y,Inr yge)::rest -> warn_non_full_mutual (x,xge,y,yge,isfix,rest) | _ -> match po with | [x,Inr []] -> warn_non_recursive (x,isfix) | _ -> () let interp_fix_context ~program_mode ~cofix env sigma fix = let before, after = if not cofix then Constrexpr_ops.split_at_annot fix.Vernacexpr.binders fix.Vernacexpr.rec_order else [], fix.Vernacexpr.binders in let sigma, (impl_env, ((env', ctx), imps)) = interp_context_evars ~program_mode env sigma before in let sigma, (impl_env', ((env'', ctx'), imps')) = interp_context_evars ~program_mode ~impl_env env' sigma after in let annot = Option.map (fun _ -> List.length (Termops.assums_of_rel_context ctx)) fix.Vernacexpr.rec_order in sigma, ((env'', ctx' @ ctx), (impl_env',imps @ imps'), annot) let interp_fix_ccl ~program_mode sigma impls (env,_) fix = let flags = Pretyping.{ all_no_fail_flags with program_mode } in let sigma, (c, impl) = interp_type_evars_impls ~flags ~impls env sigma fix.Vernacexpr.rtype in let r = Retyping.relevance_of_type env sigma c in sigma, (c, r, impl) let interp_fix_body ~program_mode env_rec sigma impls (_,ctx) fix ccl = let open EConstr in Option.cata (fun body -> let env = push_rel_context ctx env_rec in let sigma, body = interp_casted_constr_evars ~program_mode env sigma ~impls body ccl in sigma, Some (it_mkLambda_or_LetIn body ctx)) (sigma, None) fix.Vernacexpr.body_def let build_fix_type (_,ctx) ccl = EConstr.it_mkProd_or_LetIn ccl ctx let prepare_recursive_declaration fixnames fixrs fixtypes fixdefs = let defs = List.map (Vars.subst_vars (List.rev fixnames)) fixdefs in let names = List.map2 (fun id r -> Context.make_annot (Name id) r) fixnames fixrs in (Array.of_list names, Array.of_list fixtypes, Array.of_list defs) (* Jump over let-bindings. *) let compute_possible_guardness_evidences (ctx,_,recindex) = (* A recursive index is characterized by the number of lambdas to skip before finding the relevant inductive argument *) match recindex with | Some i -> [i] | None -> (* If recursive argument was not given by user, we try all args. An earlier approach was to look only for inductive arguments, but doing it properly involves delta-reduction, and it finally doesn't seem to worth the effort (except for huge mutual fixpoints ?) *) List.interval 0 (Context.Rel.nhyps ctx - 1) type ('constr, 'types) recursive_preentry = Id.t list * Sorts.relevance list * 'constr option list * 'types list (* Wellfounded definition *) let fix_proto sigma = Evd.fresh_global (Global.env ()) sigma (Coqlib.lib_ref "program.tactic.fix_proto") let interp_recursive env ~program_mode ~cofix (fixl : 'a Vernacexpr.fix_expr_gen list) = let open Context.Named.Declaration in let open EConstr in let fixnames = List.map (fun fix -> fix.Vernacexpr.fname.CAst.v) fixl in (* Interp arities allowing for unresolved types *) let all_universes = List.fold_right (fun sfe acc -> match sfe.Vernacexpr.univs , acc with | None , acc -> acc | x , None -> x | Some ls , Some us -> let open UState in let lsu = ls.univdecl_instance and usu = us.univdecl_instance in if not (CList.for_all2eq (fun x y -> Id.equal x.CAst.v y.CAst.v) lsu usu) then CErrors.user_err Pp.(str "(co)-recursive definitions should all have the same universe binders"); Some us) fixl None in let sigma, decl = interp_univ_decl_opt env all_universes in let sigma, (fixctxs, fiximppairs, fixannots) = on_snd List.split3 @@ List.fold_left_map (fun sigma -> interp_fix_context ~program_mode env sigma ~cofix) sigma fixl in let fixctximpenvs, fixctximps = List.split fiximppairs in let sigma, (fixccls,fixrs,fixcclimps) = on_snd List.split3 @@ List.fold_left3_map (interp_fix_ccl ~program_mode) sigma fixctximpenvs fixctxs fixl in let fixtypes = List.map2 build_fix_type fixctxs fixccls in let fixtypes = List.map (fun c -> Evarutil.nf_evar sigma c) fixtypes in let fiximps = List.map3 (fun ctximps cclimps (_,ctx) -> ctximps@cclimps) fixctximps fixcclimps fixctxs in let sigma, rec_sign = List.fold_left2 (fun (sigma, env') id t -> if program_mode then let sigma, sort = Typing.type_of ~refresh:true env sigma t in let sigma, fixprot = try let sigma, h_term = fix_proto sigma in let app = mkApp (h_term, [|sort; t|]) in Typing.solve_evars env sigma app with e when CErrors.noncritical e -> sigma, t in sigma, LocalAssum (Context.make_annot id Sorts.Relevant,fixprot) :: env' else sigma, LocalAssum (Context.make_annot id Sorts.Relevant,t) :: env') (sigma,[]) fixnames fixtypes in let env_rec = push_named_context rec_sign env in (* Get interpretation metadatas *) let impls = compute_internalization_env env sigma Recursive fixnames fixtypes fiximps in (* Interp bodies with rollback because temp use of notations/implicit *) let sigma, fixdefs = Metasyntax.with_syntax_protection (fun () -> let notations = List.map_append (fun { Vernacexpr.notations } -> List.map Metasyntax.prepare_where_notation notations) fixl in List.iter (Metasyntax.set_notation_for_interpretation env_rec impls) notations; List.fold_left4_map (fun sigma fixctximpenv -> interp_fix_body ~program_mode env_rec sigma (Id.Map.fold Id.Map.add fixctximpenv impls)) sigma fixctximpenvs fixctxs fixl fixccls) () in (* Instantiate evars and check all are resolved *) let sigma = Evarconv.solve_unif_constraints_with_heuristics env_rec sigma in let sigma = Evd.minimize_universes sigma in let fixctxs = List.map (fun (_,ctx) -> ctx) fixctxs in (* Build the fix declaration block *) (env,rec_sign,decl,sigma), (fixnames,fixrs,fixdefs,fixtypes), List.combine3 fixctxs fiximps fixannots let check_recursive ~isfix env evd (fixnames,_,fixdefs,_) = if List.for_all Option.has_some fixdefs then begin let fixdefs = List.map Option.get fixdefs in check_true_recursivity env evd ~isfix (List.combine fixnames fixdefs) end let ground_fixpoint env evd (fixnames,fixrs,fixdefs,fixtypes) = Pretyping.check_evars_are_solved ~program_mode:false env evd; let fixdefs = List.map (fun c -> Option.map EConstr.(to_constr evd) c) fixdefs in let fixtypes = List.map EConstr.(to_constr evd) fixtypes in Evd.evar_universe_context evd, (fixnames,fixrs,fixdefs,fixtypes) (* XXX: Unify with interp_recursive *) let interp_fixpoint ?(check_recursivity=true) ?typing_flags ~cofix l : ( (Constr.t, Constr.types) recursive_preentry * UState.universe_decl * UState.t * (EConstr.rel_context * Impargs.manual_implicits * int option) list) = let env = Global.env () in let env = Environ.update_typing_flags ?typing_flags env in let (env,_,pl,evd),fix,info = interp_recursive env ~program_mode:false ~cofix l in if check_recursivity then check_recursive ~isfix:(not cofix) env evd fix; let evd = Pretyping.(solve_remaining_evars all_no_fail_flags env evd) in let uctx,fix = ground_fixpoint env evd fix in (fix,pl,uctx,info) let build_recthms ~indexes ?using fixnames fixtypes fiximps = let fix_kind, cofix = match indexes with | Some indexes -> Decls.Fixpoint, false | None -> Decls.CoFixpoint, true in let thms = List.map3 (fun name typ (ctx,impargs,_) -> let env = Global.env() in let evd = Evd.from_env env in let terms = [EConstr.of_constr typ] in let using = Option.map (fun using -> Proof_using.definition_using env evd ~using ~terms) using in let args = List.map Context.Rel.Declaration.get_name ctx in Declare.CInfo.make ~name ~typ ~args ~impargs ?using () ) fixnames fixtypes fiximps in fix_kind, cofix, thms let declare_fixpoint_interactive_generic ?indexes ~scope ~poly ?typing_flags ((fixnames,_fixrs,fixdefs,fixtypes),udecl,ctx,fiximps) ntns = let fix_kind, cofix, thms = build_recthms ~indexes fixnames fixtypes fiximps in let indexes = Option.default [] indexes in let init_terms = Some fixdefs in let evd = Evd.from_ctx ctx in let info = Declare.Info.make ~poly ~scope ~kind:(Decls.IsDefinition fix_kind) ~udecl ?typing_flags () in let lemma = Declare.Proof.start_mutual_with_initialization ~info evd ~mutual_info:(cofix,indexes,init_terms) ~cinfo:thms None in (* Declare notations *) List.iter (Metasyntax.add_notation_interpretation ~local:(scope=Locality.Discharge) (Global.env())) ntns; lemma let declare_fixpoint_generic ?indexes ~scope ~poly ?typing_flags ?using ((fixnames,fixrs,fixdefs,fixtypes),udecl,uctx,fiximps) ntns = (* We shortcut the proof process *) let fix_kind, cofix, fixitems = build_recthms ~indexes ?using fixnames fixtypes fiximps in let fixdefs = List.map Option.get fixdefs in let rec_declaration = prepare_recursive_declaration fixnames fixrs fixtypes fixdefs in let fix_kind = Decls.IsDefinition fix_kind in let info = Declare.Info.make ~scope ~kind:fix_kind ~poly ~udecl ?typing_flags () in let cinfo = fixitems in let _ : GlobRef.t list = Declare.declare_mutually_recursive ~cinfo ~info ~opaque:false ~uctx ~possible_indexes:indexes ~ntns ~rec_declaration in () let extract_decreasing_argument ~structonly { CAst.v = v; _ } = let open Constrexpr in match v with | CStructRec na -> na | (CWfRec (na,_) | CMeasureRec (Some na,_,_)) when not structonly -> na | CMeasureRec (None,_,_) when not structonly -> CErrors.user_err Pp.(str "Decreasing argument must be specified in measure clause.") | _ -> CErrors.user_err Pp.(str "Well-founded induction requires Program Fixpoint or Function.") (* This is a special case: if there's only one binder, we pick it as the recursive argument if none is provided. *) let adjust_rec_order ~structonly binders rec_order = let rec_order = Option.map (fun rec_order -> let open Constrexpr in match binders, rec_order with | [CLocalAssum([{ CAst.v = Name x }],_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } -> CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel) | [CLocalDef({ CAst.v = Name x },_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } -> CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel) | _, x -> x) rec_order in Option.map (extract_decreasing_argument ~structonly) rec_order let do_fixpoint_common ?typing_flags (fixl : Vernacexpr.fixpoint_expr list) = let fixl = List.map (fun fix -> Vernacexpr.{ fix with rec_order = adjust_rec_order ~structonly:true fix.binders fix.rec_order }) fixl in let ntns = List.map_append (fun { Vernacexpr.notations } -> List.map Metasyntax.prepare_where_notation notations ) fixl in let (_, _, _, info as fix) = interp_fixpoint ~cofix:false ?typing_flags fixl in fixl, ntns, fix, List.map compute_possible_guardness_evidences info let do_fixpoint_interactive ~scope ~poly ?typing_flags l : Declare.Proof.t = let fixl, ntns, fix, possible_indexes = do_fixpoint_common ?typing_flags l in let lemma = declare_fixpoint_interactive_generic ~indexes:possible_indexes ~scope ~poly ?typing_flags fix ntns in lemma let do_fixpoint ~scope ~poly ?typing_flags ?using l = let fixl, ntns, fix, possible_indexes = do_fixpoint_common ?typing_flags l in declare_fixpoint_generic ~indexes:possible_indexes ~scope ~poly ?typing_flags ?using fix ntns let do_cofixpoint_common (fixl : Vernacexpr.cofixpoint_expr list) = let fixl = List.map (fun fix -> {fix with Vernacexpr.rec_order = None}) fixl in let ntns = List.map_append (fun { Vernacexpr.notations } -> List.map Metasyntax.prepare_where_notation notations ) fixl in interp_fixpoint ~cofix:true fixl, ntns let do_cofixpoint_interactive ~scope ~poly l = let cofix, ntns = do_cofixpoint_common l in let lemma = declare_fixpoint_interactive_generic ~scope ~poly cofix ntns in lemma let do_cofixpoint ~scope ~poly ?using l = let cofix, ntns = do_cofixpoint_common l in declare_fixpoint_generic ~scope ~poly ?using cofix ntns coq-8.15.0/vernac/comFixpoint.mli000066400000000000000000000056561417001151100166520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* poly:bool -> ?typing_flags:Declarations.typing_flags -> fixpoint_expr list -> Declare.Proof.t val do_fixpoint : scope:Locality.locality -> poly:bool -> ?typing_flags:Declarations.typing_flags -> ?using:Vernacexpr.section_subset_expr -> fixpoint_expr list -> unit val do_cofixpoint_interactive : scope:Locality.locality -> poly:bool -> cofixpoint_expr list -> Declare.Proof.t val do_cofixpoint : scope:Locality.locality -> poly:bool -> ?using:Vernacexpr.section_subset_expr -> cofixpoint_expr list -> unit (************************************************************************) (** Internal API *) (************************************************************************) (** Typing global fixpoints and cofixpoint_expr *) val adjust_rec_order : structonly:bool -> Constrexpr.local_binder_expr list -> Constrexpr.recursion_order_expr option -> lident option (** names / relevance / defs / types *) type ('constr, 'types) recursive_preentry = Id.t list * Sorts.relevance list * 'constr option list * 'types list (** Exported for Program *) val interp_recursive : Environ.env -> (* Misc arguments *) program_mode:bool -> cofix:bool -> (* Notations of the fixpoint / should that be folded in the previous argument? *) lident option fix_expr_gen list -> (* env / signature / univs / evar_map *) (Environ.env * EConstr.named_context * UState.universe_decl * Evd.evar_map) * (* names / defs / types *) (EConstr.t, EConstr.types) recursive_preentry * (* ctx per mutual def / implicits / struct annotations *) (EConstr.rel_context * Impargs.manual_implicits * int option) list (** Exported for Funind *) val interp_fixpoint : ?check_recursivity:bool -> ?typing_flags:Declarations.typing_flags -> cofix:bool -> lident option fix_expr_gen list -> (Constr.t, Constr.types) recursive_preentry * UState.universe_decl * UState.t * (EConstr.rel_context * Impargs.manual_implicits * int option) list (** Very private function, do not use *) val compute_possible_guardness_evidences : ('a, 'b) Context.Rel.pt * 'c * int option -> int list coq-8.15.0/vernac/comHints.ml000066400000000000000000000135711417001151100157610ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (a, b) | _ -> assert false in let p = if l2r then lib_ref "core.iff.proj1" else lib_ref "core.iff.proj2" in let sigma, p = Evd.fresh_global env sigma p in let c = Reductionops.whd_beta env sigma (mkApp (c, Context.Rel.instance mkRel 0 sign)) in let c = it_mkLambda_or_LetIn (mkApp ( p , [| mkArrow a Sorts.Relevant (Vars.lift 1 b) ; mkArrow b Sorts.Relevant (Vars.lift 1 a) ; c |] )) sign in let name = Nameops.add_suffix (Nametab.basename_of_global gr) ("_proj_" ^ if l2r then "l2r" else "r2l") in let ctx = Evd.univ_entry ~poly sigma in let c = EConstr.to_constr sigma c in let cb = Declare.(DefinitionEntry (definition_entry ~univs:ctx ~opaque:false c)) in let c = Declare.declare_constant ~local:Locality.ImportDefaultBehavior ~name ~kind:Decls.(IsDefinition Definition) cb in let info = {Typeclasses.hint_priority = pri; hint_pattern = None} in (info, true, Hints.PathAny, Hints.hint_globref (GlobRef.ConstRef c)) let warn_deprecated_hint_constr = CWarnings.create ~name:"fragile-hint-constr" ~category:"automation" (fun () -> Pp.strbrk "Declaring arbitrary terms as hints is fragile; it is recommended to \ declare a toplevel constant instead") (* Only error when we have to (axioms may be instantiated if from functors) XXX maybe error if not from a functor argument? *) let soft_evaluable = let open GlobRef in let open Tacred in function | ConstRef c -> EvalConstRef c | VarRef id -> EvalVarRef id | (IndRef _ | ConstructRef _) as r -> Tacred.error_not_evaluable r let interp_hints ~poly h = let env = Global.env () in let sigma = Evd.from_env env in let fref r = let gr = Smartlocate.global_with_alias r in Dumpglob.add_glob ?loc:r.CAst.loc gr; gr in let fr r = soft_evaluable (fref r) in let fi c = let open Hints in let open Vernacexpr in match c with | HintsReference c -> let gr = Smartlocate.global_with_alias c in (PathHints [gr], hint_globref gr) | HintsConstr c -> let () = warn_deprecated_hint_constr () in let env = Global.env () in let sigma = Evd.from_env env in let c, uctx = Constrintern.interp_constr env sigma c in let uctx = UState.normalize_variables uctx in let c = Evarutil.nf_evar (Evd.from_ctx uctx) c in let diff = UState.context_set uctx in let c = if poly then (c, Some diff) else let () = DeclareUctx.declare_universe_context ~poly:false diff in (c, None) in (PathAny, Hints.hint_constr c) [@ocaml.warning "-3"] in let fp = Constrintern.intern_constr_pattern env sigma in let fres (info, b, r) = let path, gr = fi r in let info = { info with Typeclasses.hint_pattern = Option.map fp info.Typeclasses.hint_pattern } in (info, b, path, gr) in let open Hints in let open Vernacexpr in let ft = function | HintsVariables -> HintsVariables | HintsConstants -> HintsConstants | HintsReferences lhints -> HintsReferences (List.map fr lhints) in let fp = Constrintern.intern_constr_pattern (Global.env ()) in match h with | HintsResolve lhints -> HintsResolveEntry (List.map fres lhints) | HintsResolveIFF (l2r, lc, n) -> HintsResolveEntry (List.map (project_hint ~poly n l2r) lc) | HintsImmediate lhints -> HintsImmediateEntry (List.map fi lhints) | HintsUnfold lhints -> HintsUnfoldEntry (List.map fr lhints) | HintsTransparency (t, b) -> HintsTransparencyEntry (ft t, b) | HintsMode (r, l) -> HintsModeEntry (fref r, l) | HintsConstructors lqid -> let constr_hints_of_ind qid = let ind = Smartlocate.global_inductive_with_alias qid in Dumpglob.dump_reference ?loc:qid.CAst.loc "<>" (Libnames.string_of_qualid qid) "ind"; List.init (Inductiveops.nconstructors env ind) (fun i -> let c = (ind, i + 1) in let gr = GlobRef.ConstructRef c in ( empty_hint_info , true , PathHints [gr] , hint_globref gr )) in HintsResolveEntry (List.flatten (List.map constr_hints_of_ind lqid)) | HintsExtern (pri, patcom, tacexp) -> let pat = Option.map (fp sigma) patcom in let l = match pat with None -> [] | Some (l, _) -> l in let ltacvars = List.fold_left (fun accu x -> Id.Set.add x accu) Id.Set.empty l in let env = Genintern.{(empty_glob_sign env) with ltacvars} in let _, tacexp = Genintern.generic_intern env tacexp in HintsExternEntry ({Typeclasses.hint_priority = Some pri; hint_pattern = pat}, tacexp) coq-8.15.0/vernac/comHints.mli000066400000000000000000000013571417001151100161310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Vernacexpr.hints_expr -> Hints.hints_entry coq-8.15.0/vernac/comInductive.ml000066400000000000000000000701141417001151100166220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.(strbrk "Automatically declaring " ++ Id.print id ++ strbrk " as template polymorphic. Use attributes or " ++ strbrk "disable Auto Template Polymorphism to avoid this warning.")) let should_auto_template = let open Goptions in let auto = ref true in let () = declare_bool_option { optdepr = false; optkey = ["Auto";"Template";"Polymorphism"]; optread = (fun () -> !auto); optwrite = (fun b -> auto := b); } in fun id would_auto -> let b = !auto && would_auto in if b then warn_auto_template id; b let push_types env idl rl tl = List.fold_left3 (fun env id r t -> EConstr.push_rel (LocalAssum (make_annot (Name id) r,t)) env) env idl rl tl type structured_one_inductive_expr = { ind_name : Id.t; ind_arity : constr_expr; ind_lc : (Id.t * constr_expr) list } exception Same of Id.t let check_all_names_different indl = let rec elements = function | [] -> Id.Set.empty | id :: l -> let s = elements l in if Id.Set.mem id s then raise (Same id) else Id.Set.add id s in let ind_names = List.map (fun ind -> ind.ind_name) indl in let cstr_names = List.map_append (fun ind -> List.map fst ind.ind_lc) indl in let ind_names = match elements ind_names with | s -> s | exception (Same t) -> raise (InductiveError (SameNamesTypes t)) in let cstr_names = match elements cstr_names with | s -> s | exception (Same c) -> raise (InductiveError (SameNamesConstructors c)) in let l = Id.Set.inter ind_names cstr_names in if not (Id.Set.is_empty l) then raise (InductiveError (SameNamesOverlap (Id.Set.elements l))) (** Make the arity conclusion flexible to avoid generating an upper bound universe now, only if the universe does not appear anywhere else. This is really a hack to stay compatible with the semantics of template polymorphic inductives which are recognized when a "Type" appears at the end of the conlusion in the source syntax. *) let rec check_type_conclusion ind = let open Glob_term in match DAst.get ind with | GSort (UAnonymous {rigid=true}) -> (Some true) | GSort (UNamed _) -> (Some false) | GProd ( _, _, _, e) | GLetIn (_, _, _, e) | GLambda (_, _, _, e) | GApp (e, _) | GCast (e, _, _) -> check_type_conclusion e | _ -> None let make_anonymous_conclusion_flexible sigma = function | None -> sigma | Some (false, _) -> sigma | Some (true, s) -> (match EConstr.ESorts.kind sigma s with | Type u -> (match Univ.universe_level u with | Some u -> Evd.make_flexible_variable sigma ~algebraic:true u | None -> sigma) | _ -> sigma) let intern_ind_arity env sigma ind = let c = intern_gen IsType env sigma ind.ind_arity in let impls = Implicit_quantifiers.implicits_of_glob_constr ~with_products:true c in let pseudo_poly = check_type_conclusion c in (constr_loc ind.ind_arity, c, impls, pseudo_poly) let pretype_ind_arity env sigma (loc, c, impls, pseudo_poly) = let sigma,t = understand_tcc env sigma ~expected_type:IsType c in match Reductionops.sort_of_arity env sigma t with | exception Reduction.NotArity -> user_err ?loc (str "Not an arity") | s -> let concl = match pseudo_poly with | Some b -> Some (b, s) | None -> None in sigma, (t, Retyping.relevance_of_sort s, concl, impls) (* ind_rel is the Rel for this inductive in the context without params. n is how many arguments there are in the constructor. *) let model_conclusion env sigma ind_rel params n arity_indices = let model_head = EConstr.mkRel (n + Context.Rel.length params + ind_rel) in let model_params = Context.Rel.instance EConstr.mkRel n params in let sigma,model_indices = List.fold_right (fun (_,t) (sigma, subst) -> let t = EConstr.Vars.substl subst (EConstr.Vars.liftn n (List.length subst + 1) t) in let sigma, c = Evarutil.new_evar env sigma t in sigma, c::subst) arity_indices (sigma, []) in sigma, EConstr.mkApp (EConstr.mkApp (model_head, model_params), Array.of_list (List.rev model_indices)) let interp_cstrs env (sigma, ind_rel) impls params ind arity = let cnames,ctyps = List.split ind.ind_lc in let arity_indices, cstr_sort = Reductionops.splay_arity env sigma arity in (* Interpret the constructor types *) let interp_cstr sigma ctyp = let flags = Pretyping.{ all_no_fail_flags with use_typeclasses = UseTCForConv; solve_unification_constraints = false } in let sigma, (ctyp, cimpl) = interp_type_evars_impls ~flags env sigma ~impls ctyp in let ctx, concl = Reductionops.splay_prod_assum env sigma ctyp in let concl_env = EConstr.push_rel_context ctx env in let sigma_with_model_evars, model = model_conclusion concl_env sigma ind_rel params (Context.Rel.length ctx) arity_indices in (* unify the expected with the provided conclusion *) let sigma = try Evarconv.unify concl_env sigma_with_model_evars Reduction.CONV concl model with Evarconv.UnableToUnify (sigma,e) -> user_err (Himsg.explain_pretype_error concl_env sigma (Pretype_errors.CannotUnify (concl, model, (Some e)))) in sigma, (ctyp, cimpl) in let sigma, (ctyps, cimpls) = on_snd List.split @@ List.fold_left_map interp_cstr sigma ctyps in (sigma, pred ind_rel), (cnames, ctyps, cimpls) let sign_level env evd sign = fst (List.fold_right (fun d (lev,env) -> match d with | LocalDef _ -> lev, push_rel d env | LocalAssum _ -> let s = Retyping.get_sort_of env evd (EConstr.of_constr (RelDecl.get_type d)) in let u = univ_of_sort s in (Univ.sup u lev, push_rel d env)) sign (Univ.Universe.sprop,env)) let sup_list min = List.fold_left Univ.sup min let extract_level env evd min tys = let sorts = List.map (fun ty -> let ctx, concl = Reduction.dest_prod_assum env ty in sign_level env evd (LocalAssum (make_annot Anonymous Sorts.Relevant, concl) :: ctx)) tys in sup_list min sorts let is_flexible_sort evd u = match Univ.Universe.level u with | Some l -> Evd.is_flexible_level evd l | None -> false (**********************************************************************) (* Tools for template polymorphic inductive types *) (* Miscellaneous functions to remove or test local univ assumed to occur only in the le constraints *) (* Solve a system of universe constraint of the form u_s11, ..., u_s1p1, w1 <= u1 ... u_sn1, ..., u_snpn, wn <= un where - the ui (1 <= i <= n) are universe variables, - the sjk select subsets of the ui for each equations, - the wi are arbitrary complex universes that do not mention the ui. *) let is_direct_sort_constraint s v = match s with | Some u -> Univ.univ_level_mem u v | None -> false let solve_constraints_system levels level_bounds = let open Univ in let levels = Array.mapi (fun i o -> match o with | Some u -> (match Universe.level u with | Some u -> Some u | _ -> level_bounds.(i) <- Universe.sup level_bounds.(i) u; None) | None -> None) levels in let v = Array.copy level_bounds in let nind = Array.length v in let clos = Array.map (fun _ -> Int.Set.empty) levels in (* First compute the transitive closure of the levels dependencies *) for i=0 to nind-1 do for j=0 to nind-1 do if not (Int.equal i j) && is_direct_sort_constraint levels.(j) v.(i) then clos.(i) <- Int.Set.add j clos.(i); done; done; let rec closure () = let continue = ref false in Array.iteri (fun i deps -> let deps' = Int.Set.fold (fun j acc -> Int.Set.union acc clos.(j)) deps deps in if Int.Set.equal deps deps' then () else (clos.(i) <- deps'; continue := true)) clos; if !continue then closure () else () in closure (); for i=0 to nind-1 do for j=0 to nind-1 do if not (Int.equal i j) && Int.Set.mem j clos.(i) then (v.(i) <- Universe.sup v.(i) level_bounds.(j)); done; done; v let inductive_levels env evd arities inds = let destarities = List.map (fun x -> x, Reduction.dest_arity env x) arities in let levels = List.map (fun (x,(ctx,a)) -> if Sorts.is_prop a || Sorts.is_sprop a then None else Some (univ_of_sort a)) destarities in let cstrs_levels, sizes = CList.split (List.map2 (fun (_,tys) (arity,(ctx,du)) -> let len = List.length tys in let minlev = Sorts.univ_of_sort du in let minlev = if len > 1 && not (is_impredicative_sort env du) then Univ.sup minlev Univ.type0_univ else minlev in let minlev = (* Indices contribute. *) if indices_matter env then begin let ilev = sign_level env evd ctx in Univ.sup ilev minlev end else minlev in let clev = extract_level env evd minlev tys in (clev, len)) inds destarities) in (* Take the transitive closure of the system of constructors *) (* level constraints and remove the recursive dependencies *) let levels' = solve_constraints_system (Array.of_list levels) (Array.of_list cstrs_levels) in let evd, arities = CList.fold_left3 (fun (evd, arities) cu (arity,(ctx,du)) len -> if is_impredicative_sort env du then (* Any product is allowed here. *) evd, (false, arity) :: arities else (* If in a predicative sort, or asked to infer the type, we take the max of: - indices (if in indices-matter mode) - constructors - Type(1) if there is more than 1 constructor *) (* Constructors contribute. *) let evd = if Sorts.is_set du then if not (Evd.check_leq evd cu Univ.type0_univ) then raise (InductiveError LargeNonPropInductiveNotInType) else evd else evd in let evd = if len >= 2 && Univ.is_type0m_univ cu then (* "Polymorphic" type constraint and more than one constructor, should not land in Prop. Add constraint only if it would land in Prop directly (no informative arguments as well). *) Evd.set_leq_sort env evd Sorts.set du else evd in let duu = Sorts.univ_of_sort du in let template_prop, evd = if not (Univ.is_small_univ duu) && Univ.Universe.equal cu duu then if is_flexible_sort evd duu && not (Evd.check_leq evd Univ.type0_univ duu) then if Term.isArity arity (* If not a syntactic arity, the universe may be used in a polymorphic instance and so cannot be lowered to Prop. See #13300. *) then true, Evd.set_eq_sort env evd Sorts.prop du else false, Evd.set_eq_sort env evd Sorts.set du else false, evd else false, Evd.set_eq_sort env evd (sort_of_univ cu) du in (evd, (template_prop, arity) :: arities)) (evd,[]) (Array.to_list levels') destarities sizes in evd, List.rev arities let check_named {CAst.loc;v=na} = match na with | Name _ -> () | Anonymous -> let msg = str "Parameters must be named." in user_err ?loc msg let template_polymorphism_candidate ~ctor_levels uctx params concl = match uctx with | UState.Monomorphic_entry uctx -> let concltemplate = Option.cata (fun s -> not (Sorts.is_small s)) false concl in if not concltemplate then false else let conclu = Option.cata Sorts.univ_of_sort Univ.type0m_univ concl in Option.has_some @@ IndTyping.template_polymorphic_univs ~ctor_levels uctx params conclu | UState.Polymorphic_entry _ -> false let check_param = function | CLocalDef (na, _, _) -> check_named na | CLocalAssum (nas, Default _, _) -> List.iter check_named nas | CLocalAssum (nas, Generalized _, _) -> () | CLocalPattern {CAst.loc} -> Loc.raise ?loc (Stream.Error "pattern with quote not allowed here") let restrict_inductive_universes sigma ctx_params arities constructors = let merge_universes_of_constr c = Univ.Level.Set.union (EConstr.universes_of_constr sigma (EConstr.of_constr c)) in let uvars = Univ.Level.Set.empty in let uvars = Context.Rel.(fold_outside (Declaration.fold_constr merge_universes_of_constr) ctx_params ~init:uvars) in let uvars = List.fold_right merge_universes_of_constr arities uvars in let uvars = List.fold_right (fun (_,ctypes) -> List.fold_right merge_universes_of_constr ctypes) constructors uvars in Evd.restrict_universe_context sigma uvars let check_trivial_variances variances = Array.iter (function | None | Some Univ.Variance.Invariant -> () | Some _ -> CErrors.user_err Pp.(strbrk "Universe variance was specified but this inductive will not be cumulative.")) variances let variance_of_entry ~cumulative ~variances uctx = match uctx with | Monomorphic_ind_entry | Template_ind_entry _ -> check_trivial_variances variances; None | Polymorphic_ind_entry uctx -> if not cumulative then begin check_trivial_variances variances; None end else let lvs = Array.length variances in let lus = Univ.UContext.size uctx in assert (lvs <= lus); Some (Array.append variances (Array.make (lus - lvs) None)) let interp_mutual_inductive_constr ~sigma ~template ~udecl ~variances ~ctx_params ~indnames ~arities ~arityconcl ~constructors ~env_ar_params ~cumulative ~poly ~private_ind ~finite = (* Compute renewed arities *) let sigma = Evd.minimize_universes sigma in let nf = Evarutil.nf_evars_universes sigma in let constructors = List.map (on_snd (List.map nf)) constructors in let arities = List.map EConstr.(to_constr sigma) arities in let sigma = List.fold_left make_anonymous_conclusion_flexible sigma arityconcl in let sigma, arities = inductive_levels env_ar_params sigma arities constructors in let sigma = Evd.minimize_universes sigma in let nf = Evarutil.nf_evars_universes sigma in let arities = List.map (on_snd nf) arities in let constructors = List.map (on_snd (List.map nf)) constructors in let ctx_params = List.map Termops.(map_rel_decl (EConstr.to_constr sigma)) ctx_params in let arityconcl = List.map (Option.map (fun (_anon, s) -> EConstr.ESorts.kind sigma s)) arityconcl in let sigma = restrict_inductive_universes sigma ctx_params (List.map snd arities) constructors in let univ_entry, binders = Evd.check_univ_decl ~poly sigma udecl in (* Build the inductive entries *) let entries = List.map4 (fun indname (templatearity, arity) concl (cnames,ctypes) -> { mind_entry_typename = indname; mind_entry_arity = arity; mind_entry_consnames = cnames; mind_entry_lc = ctypes }) indnames arities arityconcl constructors in let template = List.map4 (fun indname (templatearity, _) concl (_, ctypes) -> let template_candidate () = templatearity || let ctor_levels = let add_levels c levels = Univ.Level.Set.union levels (Vars.universes_of_constr c) in let param_levels = List.fold_left (fun levels d -> match d with | LocalAssum _ -> levels | LocalDef (_,b,t) -> add_levels b (add_levels t levels)) Univ.Level.Set.empty ctx_params in List.fold_left (fun levels c -> add_levels c levels) param_levels ctypes in template_polymorphism_candidate ~ctor_levels univ_entry ctx_params concl in match template with | Some template -> if poly && template then user_err Pp.(strbrk "Template-polymorphism and universe polymorphism are not compatible."); template | None -> should_auto_template indname (template_candidate ()) ) indnames arities arityconcl constructors in let is_template = List.for_all (fun t -> t) template in let univ_entry, ctx = match univ_entry with | UState.Monomorphic_entry ctx -> if is_template then Template_ind_entry ctx, Univ.ContextSet.empty else Monomorphic_ind_entry, ctx | UState.Polymorphic_entry uctx -> Polymorphic_ind_entry uctx, Univ.ContextSet.empty in let variance = variance_of_entry ~cumulative ~variances univ_entry in (* Build the mutual inductive entry *) let mind_ent = { mind_entry_params = ctx_params; mind_entry_record = None; mind_entry_finite = finite; mind_entry_inds = entries; mind_entry_private = if private_ind then Some false else None; mind_entry_universes = univ_entry; mind_entry_variance = variance; } in mind_ent, binders, ctx let interp_params env udecl uparamsl paramsl = let sigma, udecl, variances = interp_cumul_univ_decl_opt env udecl in let sigma, (uimpls, ((env_uparams, ctx_uparams), useruimpls)) = interp_context_evars ~program_mode:false env sigma uparamsl in let sigma, (impls, ((env_params, ctx_params), userimpls)) = interp_context_evars ~program_mode:false ~impl_env:uimpls env_uparams sigma paramsl in (* Names of parameters as arguments of the inductive type (defs removed) *) sigma, env_params, (ctx_params, env_uparams, ctx_uparams, userimpls, useruimpls, impls, udecl, variances) (* When a hole remains for a param, pretend the param is uniform and do the unification. [env_ar_par] is [uparams; inds; params] *) let maybe_unify_params_in env_ar_par sigma ~ninds ~nparams ~binders:k c = let is_ind sigma k c = match EConstr.kind sigma c with | Constr.Rel n -> (* env is [uparams; inds; params; k other things] *) n > k + nparams && n <= k + nparams + ninds | _ -> false in let rec aux (env,k as envk) sigma c = match EConstr.kind sigma c with | Constr.App (h,args) when is_ind sigma k h -> Array.fold_left_i (fun i sigma arg -> if i >= nparams || not (EConstr.isEvar sigma arg) then sigma else begin try Evarconv.unify_delay env sigma arg (EConstr.mkRel (k+nparams-i)) with Evarconv.UnableToUnify _ -> (* ignore errors, we will get a "Cannot infer ..." error instead *) sigma end) sigma args | _ -> Termops.fold_constr_with_full_binders env sigma (fun d (env,k) -> EConstr.push_rel d env, k+1) aux envk sigma c in aux (env_ar_par,k) sigma c let interp_mutual_inductive_gen env0 ~template udecl (uparamsl,paramsl,indl) notations ~cumulative ~poly ~private_ind finite = check_all_names_different indl; List.iter check_param paramsl; if not (List.is_empty uparamsl) && not (List.is_empty notations) then user_err (str "Inductives with uniform parameters may not have attached notations."); let indnames = List.map (fun ind -> ind.ind_name) indl in let ninds = List.length indl in (* In case of template polymorphism, we need to compute more constraints *) let env0 = if poly then env0 else Environ.set_universes_lbound env0 UGraph.Bound.Prop in let sigma, env_params, (ctx_params, env_uparams, ctx_uparams, userimpls, useruimpls, impls, udecl, variances) = interp_params env0 udecl uparamsl paramsl in (* Interpret the arities *) let arities = List.map (intern_ind_arity env_params sigma) indl in let sigma, arities = List.fold_left_map (pretype_ind_arity env_params) sigma arities in let arities, relevances, arityconcl, indimpls = List.split4 arities in let lift_ctx n ctx = let t = EConstr.it_mkProd_or_LetIn EConstr.mkProp ctx in let t = EConstr.Vars.lift n t in let ctx, _ = EConstr.decompose_prod_assum sigma t in ctx in let ctx_params_lifted, fullarities = lift_ctx ninds ctx_params, CList.map_i (fun i c -> EConstr.Vars.lift i (EConstr.it_mkProd_or_LetIn c ctx_params)) 0 arities in let env_ar = push_types env_uparams indnames relevances fullarities in let env_ar_params = EConstr.push_rel_context ctx_params_lifted env_ar in (* Compute interpretation metadatas *) let indimpls = List.map (fun impls -> userimpls @ impls) indimpls in let impls = compute_internalization_env env_uparams sigma ~impls Inductive indnames fullarities indimpls in let ntn_impls = compute_internalization_env env_uparams sigma Inductive indnames fullarities indimpls in let (sigma, _), constructors = Metasyntax.with_syntax_protection (fun () -> (* Temporary declaration of notations and scopes *) List.iter (Metasyntax.set_notation_for_interpretation env_params ntn_impls) notations; (* Interpret the constructor types *) List.fold_left2_map (fun (sigma, ind_rel) ind arity -> interp_cstrs env_ar_params (sigma, ind_rel) impls ctx_params_lifted ind (EConstr.Vars.liftn ninds (Rel.length ctx_params + 1) arity)) (sigma, ninds) indl arities) () in let nparams = Context.Rel.length ctx_params in let sigma = List.fold_left (fun sigma (_,ctyps,_) -> List.fold_left (fun sigma ctyp -> maybe_unify_params_in env_ar_params sigma ~ninds ~nparams ~binders:0 ctyp) sigma ctyps) sigma constructors in (* generalize over the uniform parameters *) let nuparams = Context.Rel.length ctx_uparams in let uargs = Context.Rel.instance EConstr.mkRel 0 ctx_uparams in let uparam_subst = List.init ninds EConstr.(fun i -> mkApp (mkRel (i + 1 + nuparams), uargs)) @ List.init nuparams EConstr.(fun i -> mkRel (i + 1)) in let generalize_constructor c = EConstr.Unsafe.to_constr (EConstr.Vars.substnl uparam_subst nparams c) in let cimpls = List.map pi3 constructors in let constructors = List.map (fun (cnames,ctypes,cimpls) -> (cnames,List.map generalize_constructor ctypes)) constructors in let ctx_params = ctx_params @ ctx_uparams in let userimpls = useruimpls @ userimpls in let indimpls = List.map (fun iimpl -> useruimpls @ iimpl) indimpls in let fullarities = List.map (fun c -> EConstr.it_mkProd_or_LetIn c ctx_uparams) fullarities in let env_ar = push_types env0 indnames relevances fullarities in let env_ar_params = EConstr.push_rel_context ctx_params env_ar in (* Try further to solve evars, and instantiate them *) let sigma = solve_remaining_evars all_and_fail_flags env_params sigma in let impls = List.map2 (fun indimpls cimpls -> indimpls, List.map (fun impls -> userimpls @ impls) cimpls) indimpls cimpls in let mie, binders, ctx = interp_mutual_inductive_constr ~template ~sigma ~ctx_params ~udecl ~variances ~arities ~arityconcl ~constructors ~env_ar_params ~poly ~finite ~cumulative ~private_ind ~indnames in (mie, binders, impls, ctx) (* Very syntactical equality *) let eq_local_binders bl1 bl2 = List.equal local_binder_eq bl1 bl2 let eq_params (up1,p1) (up2,p2) = eq_local_binders up1 up2 && Option.equal eq_local_binders p1 p2 let extract_coercions indl = let mkqid (_,({CAst.v=id},_)) = qualid_of_ident id in let extract lc = List.filter (fun (iscoe,_) -> iscoe) lc in List.map mkqid (List.flatten(List.map (fun (_,_,_,lc) -> extract lc) indl)) let extract_params indl = let paramsl = List.map (fun (_,params,_,_) -> params) indl in match paramsl with | [] -> anomaly (Pp.str "empty list of inductive types.") | params::paramsl -> if not (List.for_all (eq_params params) paramsl) then user_err Pp.(str "Parameters should be syntactically the same for each inductive type."); params let extract_inductive indl = List.map (fun ({CAst.v=indname},_,ar,lc) -> { ind_name = indname; ind_arity = Option.cata (fun x -> x) (CAst.make @@ CSort (Glob_term.UAnonymous {rigid=true})) ar; ind_lc = List.map (fun (_,({CAst.v=id},t)) -> (id,t)) lc }) indl let extract_mutual_inductive_declaration_components indl = let indl,ntnl = List.split indl in let params = extract_params indl in let coes = extract_coercions indl in let indl = extract_inductive indl in (params,indl), coes, List.flatten ntnl type uniform_inductive_flag = | UniformParameters | NonUniformParameters let do_mutual_inductive ~template udecl indl ~cumulative ~poly ?typing_flags ~private_ind ~uniform finite = let (params,indl),coes,ntns = extract_mutual_inductive_declaration_components indl in let ntns = List.map Metasyntax.prepare_where_notation ntns in (* Interpret the types *) let indl = match params with | uparams, Some params -> (uparams, params, indl) | params, None -> match uniform with | UniformParameters -> (params, [], indl) | NonUniformParameters -> ([], params, indl) in let env = Global.env () in let env = Environ.update_typing_flags ?typing_flags env in let mie,binders,impls,ctx = interp_mutual_inductive_gen env ~template udecl indl ntns ~cumulative ~poly ~private_ind finite in (* Slightly hackish global universe declaration due to template types. *) let binders = match mie.mind_entry_universes with | Monomorphic_ind_entry -> (UState.Monomorphic_entry ctx, binders) | Template_ind_entry ctx -> (UState.Monomorphic_entry ctx, binders) | Polymorphic_ind_entry uctx -> (UState.Polymorphic_entry uctx, UnivNames.empty_binders) in (* Declare the global universes *) DeclareUctx.declare_universe_context ~poly:false ctx; (* Declare the mutual inductive block with its associated schemes *) ignore (DeclareInd.declare_mutual_inductive_with_eliminations ?typing_flags mie binders impls); (* Declare the possible notations of inductive types *) List.iter (Metasyntax.add_notation_interpretation ~local:false (Global.env ())) ntns; (* Declare the coercions *) List.iter (fun qid -> ComCoercion.try_add_new_coercion (Nametab.locate qid) ~local:false ~poly) coes (** Prepare a "match" template for a given inductive type. For each branch of the match, we list the constructor name followed by enough pattern variables. [Not_found] is raised if the given string isn't the qualid of a known inductive type. *) (* HH notes in PR #679: The Show Match could also be made more robust, for instance in the presence of let in the branch of a constructor. A decompose_prod_assum would probably suffice for that, but then, it is a Context.Rel.Declaration.t which needs to be matched and not just a pair (name,type). Otherwise, this is OK. After all, the API on inductive types is not so canonical in general, and in this simple case, working at the low-level of mind_nf_lc seems reasonable (compared to working at the higher-level of Inductiveops). *) let make_cases ind = let open Declarations in let mib, mip = Global.lookup_inductive ind in Util.Array.fold_right_i (fun i (ctx, _) l -> let al = Util.List.skipn (List.length mib.mind_params_ctxt) (List.rev ctx) in let rec rename avoid = function | [] -> [] | RelDecl.LocalDef _ :: l -> "_" :: rename avoid l | RelDecl.LocalAssum (n, _)::l -> let n' = Namegen.next_name_away_with_default (Id.to_string Namegen.default_dependent_ident) n.Context.binder_name avoid in Id.to_string n' :: rename (Id.Set.add n' avoid) l in let al' = rename Id.Set.empty al in let consref = GlobRef.ConstructRef (ith_constructor_of_inductive ind (i + 1)) in (Libnames.string_of_qualid (Nametab.shortest_qualid_of_global Id.Set.empty consref) :: al') :: l) mip.mind_nf_lc [] coq-8.15.0/vernac/comInductive.mli000066400000000000000000000075451417001151100170030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* cumul_univ_decl_expr option -> (one_inductive_expr * decl_notation list) list -> cumulative:bool -> poly:bool -> ?typing_flags:Declarations.typing_flags -> private_ind:bool -> uniform:uniform_inductive_flag -> Declarations.recursivity_kind -> unit (** User-interface API *) (** Prepare a "match" template for a given inductive type. For each branch of the match, we list the constructor name followed by enough pattern variables. [Not_found] is raised if the given string isn't the qualid of a known inductive type. *) val make_cases : Names.inductive -> string list list val interp_mutual_inductive_constr : sigma:Evd.evar_map -> template:bool option -> udecl:UState.universe_decl -> variances:Entries.variance_entry -> ctx_params:(EConstr.t, EConstr.t) Context.Rel.Declaration.pt list -> indnames:Names.Id.t list -> arities:EConstr.t list -> arityconcl:(bool * EConstr.ESorts.t) option list -> constructors:(Names.Id.t list * Constr.constr list) list -> env_ar_params:Environ.env (** Environment with the inductives and parameters in the rel_context *) -> cumulative:bool -> poly:bool -> private_ind:bool -> finite:Declarations.recursivity_kind -> Entries.mutual_inductive_entry * UnivNames.universe_binders * Univ.ContextSet.t (************************************************************************) (** Internal API, exported for Record *) (************************************************************************) val should_auto_template : Id.t -> bool -> bool (** [should_auto_template x b] is [true] when [b] is [true] and we automatically use template polymorphism. [x] is the name of the inductive under consideration. *) val template_polymorphism_candidate : ctor_levels:Univ.Level.Set.t -> UState.universes_entry -> Constr.rel_context -> Sorts.t option -> bool (** [template_polymorphism_candidate ~ctor_levels uctx params conclsort] is [true] iff an inductive with params [params], conclusion [conclsort] and universe levels appearing in the constructor arguments [ctor_levels] would be definable as template polymorphic. It should have at least one universe in its monomorphic universe context that can be made parametric in its conclusion sort, if one is given. *) val maybe_unify_params_in : Environ.env -> Evd.evar_map -> ninds:int -> nparams:int -> binders:int -> EConstr.t -> Evd.evar_map (** [nparams] is the number of parameters which aren't treated as uniform, ie the length of params (including letins) where the env is [uniform params, inductives, params, binders]. *) val variance_of_entry : cumulative:bool -> variances:Entries.variance_entry -> Entries.inductive_universes_entry -> Entries.variance_entry option (** Will return None if non-cumulative, and resize if there are more universes than originally specified. If monomorphic, [cumulative] is treated as [false]. *) coq-8.15.0/vernac/comPrimitive.ml000066400000000000000000000051701417001151100166400ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* if Option.has_some udecl then CErrors.user_err ?loc Pp.(strbrk "Cannot use a universe declaration without a type when declaring primitives."); let e = Declare.primitive_entry prim in declare id e | Some typ -> let env = Global.env () in let evd, udecl = Constrintern.interp_univ_decl_opt env udecl in let auctx = CPrimitives.op_or_type_univs prim in let evd, u = Evd.with_context_set UState.univ_flexible evd (UnivGen.fresh_instance auctx) in let expected_typ = EConstr.of_constr @@ Typeops.type_of_prim_or_type env u prim in let evd, (typ,impls) = Constrintern.(interp_type_evars_impls ~impls:empty_internalization_env) env evd typ in let evd = try Evarconv.unify_delay env evd typ expected_typ with Evarconv.UnableToUnify (evd,e) as exn -> let _, info = Exninfo.capture exn in Exninfo.iraise (Pretype_errors.( PretypeError (env,evd,CannotUnify (typ,expected_typ,Some e)),info)) in Pretyping.check_evars_are_solved ~program_mode:false env evd; let evd = Evd.minimize_universes evd in let uvars = EConstr.universes_of_constr evd typ in let evd = Evd.restrict_universe_context evd uvars in let typ = EConstr.to_constr evd typ in let univ_entry = Evd.check_univ_decl ~poly:(not (Univ.AbstractContext.is_empty auctx)) evd udecl in let entry = Declare.primitive_entry ~types:(typ, univ_entry) prim in declare id entry coq-8.15.0/vernac/comPrimitive.mli000066400000000000000000000014711417001151100170110ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Constrexpr.universe_decl_expr option -> CPrimitives.op_or_type -> Constrexpr.constr_expr option -> unit coq-8.15.0/vernac/comProgramFixpoint.ml000066400000000000000000000426051417001151100200240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* SPropF | InProp -> PropF | InSet | InType -> TypeF let get_sigmatypes sigma ~sort ~predsort = let open EConstr in let which, sigsort = match predsort, sort with | SPropF, _ | _, SPropF -> user_err Pp.(str "SProp arguments not supported by Program Fixpoint yet.") | PropF, PropF -> "ex", PropF | PropF, TypeF -> "sig", TypeF | TypeF, (PropF|TypeF) -> "sigT", TypeF in let sigma, ty = Evd.fresh_global (Global.env ()) sigma (lib_ref ("core."^which^".type")) in let uinstance = snd (destRef sigma ty) in let intro = mkRef (lib_ref ("core."^which^".intro"), uinstance) in let p1 = mkRef (lib_ref ("core."^which^".proj1"), uinstance) in let p2 = mkRef (lib_ref ("core."^which^".proj2"), uinstance) in sigma, ty, intro, p1, p2, sigsort let rec telescope sigma l = let open EConstr in let open Vars in match l with | [] -> assert false | [LocalAssum (n, t), _] -> sigma, t, [LocalDef (n, mkRel 1, t)], mkRel 1 | (LocalAssum (n, t), tsort) :: tl -> let sigma, ty, _tysort, tys, (k, constr) = List.fold_left (fun (sigma, ty, tysort, tys, (k, constr)) (decl,sort) -> let t = RelDecl.get_type decl in let pred = mkLambda (RelDecl.get_annot decl, t, ty) in let sigma, ty, intro, p1, p2, sigsort = get_sigmatypes sigma ~predsort:tysort ~sort in let sigty = mkApp (ty, [|t; pred|]) in let intro = mkApp (intro, [|lift k t; lift k pred; mkRel k; constr|]) in (sigma, sigty, sigsort, (pred, p1, p2) :: tys, (succ k, intro))) (sigma, t, tsort, [], (2, mkRel 1)) tl in let sigma, last, subst = List.fold_right2 (fun (pred,p1,p2) (decl,_) (sigma, prev, subst) -> let t = RelDecl.get_type decl in let proj1 = applist (p1, [t; pred; prev]) in let proj2 = applist (p2, [t; pred; prev]) in (sigma, lift 1 proj2, LocalDef (get_annot decl, proj1, t) :: subst)) (List.rev tys) tl (sigma, mkRel 1, []) in sigma, ty, (LocalDef (n, last, t) :: subst), constr | (LocalDef (n, b, t), _) :: tl -> let sigma, ty, subst, term = telescope sigma tl in sigma, ty, (LocalDef (n, b, t) :: subst), lift 1 term let telescope env sigma l = let l, _ = List.fold_right_map (fun d env -> let s = Retyping.get_sort_family_of env sigma (RelDecl.get_type d) in let env = EConstr.push_rel d env in (d, family_of_sort_family s), env) l env in telescope sigma l let nf_evar_context sigma ctx = List.map (map_constr (fun c -> Evarutil.nf_evar sigma c)) ctx let build_wellfounded pm (recname,pl,bl,arityc,body) poly ?typing_flags ?using r measure notation = let open EConstr in let open Vars in Coqlib.check_required_library ["Coq";"Program";"Wf"]; let env = Global.env() in let sigma, udecl = interp_univ_decl_opt env pl in let sigma, (_, ((env', binders_rel), impls)) = interp_context_evars ~program_mode:true env sigma bl in let len = List.length binders_rel in let top_env = push_rel_context binders_rel env in let sigma, top_arity = interp_type_evars ~program_mode:true top_env sigma arityc in let full_arity = it_mkProd_or_LetIn top_arity binders_rel in let sigma, argtyp, letbinders, make = telescope env sigma binders_rel in let argname = Id.of_string "recarg" in let arg = LocalAssum (make_annot (Name argname) Sorts.Relevant, argtyp) in let binders = letbinders @ [arg] in let binders_env = push_rel_context binders_rel env in let sigma, (rel, _) = interp_constr_evars_impls ~program_mode:true env sigma r in let relty = Retyping.get_type_of env sigma rel in let relargty = let error () = user_err ?loc:(constr_loc r) (Printer.pr_econstr_env env sigma rel ++ str " is not an homogeneous binary relation.") in try let ctx, ar = Reductionops.splay_prod_n env sigma 2 relty in match ctx, EConstr.kind sigma ar with | [LocalAssum (_,t); LocalAssum (_,u)], Sort s when Sorts.is_prop (ESorts.kind sigma s) && Reductionops.is_conv env sigma t u -> t | _, _ -> error () with e when CErrors.noncritical e -> error () in let sigma, measure = interp_casted_constr_evars ~program_mode:true binders_env sigma measure relargty in let sigma, wf_rel, wf_rel_fun, measure_fn = let measure_body, measure = it_mkLambda_or_LetIn measure letbinders, it_mkLambda_or_LetIn measure binders in let sigma, comb = Evd.fresh_global (Global.env ()) sigma (delayed_force measure_on_R_ref) in let wf_rel = mkApp (comb, [| argtyp; relargty; rel; measure |]) in let wf_rel_fun x y = mkApp (rel, [| subst1 x measure_body; subst1 y measure_body |]) in sigma, wf_rel, wf_rel_fun, measure in let sigma, wf_term = well_founded sigma in let wf_proof = mkApp (wf_term, [| argtyp ; wf_rel |]) in let argid' = Id.of_string (Id.to_string argname ^ "'") in let wfarg sigma len = let sigma, ss_term = mkSubset sigma (Name argid') argtyp (wf_rel_fun (mkRel 1) (mkRel (len + 1))) in sigma, LocalAssum (make_annot (Name argid') Sorts.Relevant, ss_term) in let sigma, intern_bl = let sigma, wfa = wfarg sigma 1 in sigma, wfa :: [arg] in let _intern_env = push_rel_context intern_bl env in let sigma, proj = Evd.fresh_global (Global.env ()) sigma (delayed_force build_sigma).Coqlib.proj1 in let wfargpred = mkLambda (make_annot (Name argid') Sorts.Relevant, argtyp, wf_rel_fun (mkRel 1) (mkRel 3)) in let projection = (* in wfarg :: arg :: before *) mkApp (proj, [| argtyp ; wfargpred ; mkRel 1 |]) in let top_arity_let = it_mkLambda_or_LetIn top_arity letbinders in let intern_arity = substl [projection] top_arity_let in (* substitute the projection of wfarg for something, now intern_arity is in wfarg :: arg *) let sigma, wfa = wfarg sigma 1 in let intern_fun_arity_prod = it_mkProd_or_LetIn intern_arity [wfa] in let intern_fun_binder = LocalAssum (make_annot (Name (add_suffix recname "'")) Sorts.Relevant, intern_fun_arity_prod) in let sigma, curry_fun = let wfpred = mkLambda (make_annot (Name argid') Sorts.Relevant, argtyp, wf_rel_fun (mkRel 1) (mkRel (2 * len + 4))) in let sigma, intro = Evd.fresh_global (Global.env ()) sigma (delayed_force build_sigma).Coqlib.intro in let arg = mkApp (intro, [| argtyp; wfpred; lift 1 make; mkRel 1 |]) in let app = mkApp (mkRel (2 * len + 2 (* recproof + orig binders + current binders *)), [| arg |]) in let rcurry = mkApp (rel, [| measure; lift len measure |]) in let lam = LocalAssum (make_annot (Name (Id.of_string "recproof")) Sorts.Relevant, rcurry) in let body = it_mkLambda_or_LetIn app (lam :: binders_rel) in let ty = it_mkProd_or_LetIn (lift 1 top_arity) (lam :: binders_rel) in sigma, LocalDef (make_annot (Name recname) Sorts.Relevant, body, ty) in let fun_bl = intern_fun_binder :: [arg] in let lift_lets = lift_rel_context 1 letbinders in let sigma, intern_body = let ctx = LocalAssum (make_annot (Name recname) Sorts.Relevant, get_type curry_fun) :: binders_rel in let interning_data = Constrintern.compute_internalization_data env sigma recname Constrintern.Recursive full_arity impls in let newimpls = Id.Map.singleton recname (Constrintern.extend_internalization_data interning_data (Some ((Name (Id.of_string "recproof"),1,None), Impargs.Manual, (true, false))) None) in interp_casted_constr_evars ~program_mode:true (push_rel_context ctx env) sigma ~impls:newimpls body (lift 1 top_arity) in let intern_body_lam = it_mkLambda_or_LetIn intern_body (curry_fun :: lift_lets @ fun_bl) in let prop = mkLambda (make_annot (Name argname) Sorts.Relevant, argtyp, top_arity_let) in (* XXX: Previous code did parallel evdref update, so possible old weak ordering semantics may bite here. *) let sigma, def = let sigma, h_a_term = Evd.fresh_global (Global.env ()) sigma (delayed_force fix_sub_ref) in let sigma, h_e_term = Evarutil.new_evar env sigma ~src:(Loc.tag @@ Evar_kinds.QuestionMark { Evar_kinds.default_question_mark with Evar_kinds.qm_obligation=Evar_kinds.Define false; }) wf_proof in let sigma = Evd.set_obligation_evar sigma (fst (destEvar sigma h_e_term)) in sigma, mkApp (h_a_term, [| argtyp ; wf_rel ; h_e_term; prop |]) in let sigma, def = Typing.solve_evars env sigma def in let sigma = Evarutil.nf_evar_map sigma in let def = mkApp (def, [|intern_body_lam|]) in let binders_rel = nf_evar_context sigma binders_rel in let binders = nf_evar_context sigma binders in let top_arity = Evarutil.nf_evar sigma top_arity in let hook, recname, typ = if List.length binders_rel > 1 then let name = add_suffix recname "_func" in (* XXX: Mutating the evar_map in the hook! *) (* XXX: Likely the sigma is out of date when the hook is called .... *) let hook sigma { Declare.Hook.S.dref; _ } = let sigma, h_body = Evd.fresh_global (Global.env ()) sigma dref in let body = it_mkLambda_or_LetIn (mkApp (h_body, [|make|])) binders_rel in let ty = it_mkProd_or_LetIn top_arity binders_rel in let ty = EConstr.Unsafe.to_constr ty in let univs = Evd.check_univ_decl ~poly sigma udecl in (*FIXME poly? *) let ce = definition_entry ~types:ty ~univs (EConstr.to_constr sigma body) in (* FIXME: include locality *) let c = Declare.declare_constant ~name:recname ~kind:Decls.(IsDefinition Definition) (DefinitionEntry ce) in let gr = GlobRef.ConstRef c in if Impargs.is_implicit_args () || not (List.is_empty impls) then Impargs.declare_manual_implicits false gr impls in let typ = it_mkProd_or_LetIn top_arity binders in hook, name, typ else let typ = it_mkProd_or_LetIn top_arity binders_rel in let hook sigma { Declare.Hook.S.dref; _ } = if Impargs.is_implicit_args () || not (List.is_empty impls) then Impargs.declare_manual_implicits false dref impls in hook, recname, typ in (* XXX: Capturing sigma here... bad bad *) let hook = Declare.Hook.make (hook sigma) in RetrieveObl.check_evars env sigma; let evars, _, evars_def, evars_typ = RetrieveObl.retrieve_obligations env recname sigma 0 def typ in let using = let terms = List.map EConstr.of_constr [evars_def; evars_typ] in Option.map (fun using -> Proof_using.definition_using env sigma ~using ~terms) using in let uctx = Evd.evar_universe_context sigma in let cinfo = Declare.CInfo.make ~name:recname ~typ:evars_typ ?using () in let info = Declare.Info.make ~udecl ~poly ~hook ?typing_flags () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~term:evars_def ~uctx evars in pm let out_def = function | Some def -> def | None -> user_err Pp.(str "Program Fixpoint needs defined bodies.") let collect_evars_of_term evd c ty = let evars = Evar.Set.union (Evd.evars_of_term evd c) (Evd.evars_of_term evd ty) in Evar.Set.fold (fun ev acc -> Evd.add acc ev (Evd.find_undefined evd ev)) evars (Evd.from_ctx (Evd.evar_universe_context evd)) let do_program_recursive ~pm ~scope ~poly ?typing_flags ?using fixkind fixl = let cofix = fixkind = Declare.Obls.IsCoFixpoint in let (env, rec_sign, udecl, evd), fix, info = let env = Global.env () in let env = Environ.update_typing_flags ?typing_flags env in interp_recursive env ~cofix ~program_mode:true fixl in (* Program-specific code *) (* Get the interesting evars, those that were not instantiated *) let evd = Typeclasses.resolve_typeclasses ~filter:Typeclasses.no_goals ~fail:true env evd in (* Solve remaining evars *) let evd = nf_evar_map_undefined evd in let collect_evars name def typ impargs = (* Generalize by the recursive prototypes *) let terms = [def; typ] in let using = Option.map (fun using -> Proof_using.definition_using env evd ~using ~terms) using in let def = nf_evar evd (Termops.it_mkNamedLambda_or_LetIn def rec_sign) in let typ = nf_evar evd (Termops.it_mkNamedProd_or_LetIn typ rec_sign) in let evm = collect_evars_of_term evd def typ in let evars, _, def, typ = RetrieveObl.retrieve_obligations env name evm (List.length rec_sign) def typ in let cinfo = Declare.CInfo.make ~name ~typ ~impargs ?using () in (cinfo, def, evars) in let (fixnames,fixrs,fixdefs,fixtypes) = fix in let fiximps = List.map pi2 info in let fixdefs = List.map out_def fixdefs in let defs = List.map4 collect_evars fixnames fixdefs fixtypes fiximps in let () = if not cofix then begin let possible_indexes = List.map ComFixpoint.compute_possible_guardness_evidences info in (* XXX: are we allowed to have evars here? *) let fixtypes = List.map (EConstr.to_constr ~abort_on_undefined_evars:false evd) fixtypes in let fixdefs = List.map (EConstr.to_constr ~abort_on_undefined_evars:false evd) fixdefs in let fixdecls = Array.of_list (List.map2 (fun x r -> make_annot (Name x) r) fixnames fixrs), Array.of_list fixtypes, Array.of_list (List.map (subst_vars (List.rev fixnames)) fixdefs) in let indexes = let env = Global.env () in let env = Environ.update_typing_flags ?typing_flags env in Pretyping.search_guard env possible_indexes fixdecls in let env = Environ.update_typing_flags ?typing_flags env in List.iteri (fun i _ -> Inductive.check_fix env ((indexes,i),fixdecls)) fixl end in let uctx = Evd.evar_universe_context evd in let kind = match fixkind with | Declare.Obls.IsFixpoint _ -> Decls.(IsDefinition Fixpoint) | Declare.Obls.IsCoFixpoint -> Decls.(IsDefinition CoFixpoint) in let ntns = List.map_append (fun { Vernacexpr.notations } -> List.map Metasyntax.prepare_where_notation notations ) fixl in let info = Declare.Info.make ~poly ~scope ~kind ~udecl ?typing_flags () in Declare.Obls.add_mutual_definitions ~pm defs ~info ~uctx ~ntns fixkind let do_fixpoint ~pm ~scope ~poly ?typing_flags ?using l = let g = List.map (fun { Vernacexpr.rec_order } -> rec_order) l in match g, l with | [Some { CAst.v = CWfRec (n,r) }], [ Vernacexpr.{fname={CAst.v=id}; univs; binders; rtype; body_def; notations} ] -> let recarg = mkIdentC n.CAst.v in build_wellfounded pm (id, univs, binders, rtype, out_def body_def) poly ?typing_flags ?using r recarg notations | [Some { CAst.v = CMeasureRec (n, m, r) }], [Vernacexpr.{fname={CAst.v=id}; univs; binders; rtype; body_def; notations }] -> (* We resolve here a clash between the syntax of Program Fixpoint and the one of funind *) let r = match n, r with | Some id, None -> let loc = id.CAst.loc in Some (CAst.make ?loc @@ CRef(qualid_of_ident ?loc id.CAst.v,None)) | Some _, Some _ -> user_err Pp.(str"Measure takes only two arguments in Program Fixpoint.") | _, _ -> r in build_wellfounded pm (id, univs, binders, rtype, out_def body_def) poly ?typing_flags ?using (Option.default (CAst.make @@ CRef (lt_ref,None)) r) m notations | _, _ when List.for_all (fun ro -> match ro with None | Some { CAst.v = CStructRec _} -> true | _ -> false) g -> let annots = List.map (fun fix -> Vernacexpr.(ComFixpoint.adjust_rec_order ~structonly:true fix.binders fix.rec_order)) l in let fixkind = Declare.Obls.IsFixpoint annots in let l = List.map2 (fun fix rec_order -> { fix with Vernacexpr.rec_order }) l annots in do_program_recursive ~pm ~scope ~poly ?typing_flags ?using fixkind l | _, _ -> CErrors.user_err (str "Well-founded fixpoints not allowed in mutually recursive blocks.") let do_cofixpoint ~pm ~scope ~poly ?using fixl = let fixl = List.map (fun fix -> { fix with Vernacexpr.rec_order = None }) fixl in do_program_recursive ~pm ~scope ~poly ?using Declare.Obls.IsCoFixpoint fixl coq-8.15.0/vernac/comProgramFixpoint.mli000066400000000000000000000022131417001151100201640ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* scope:Locality.locality -> poly:bool -> ?typing_flags:Declarations.typing_flags -> ?using:Vernacexpr.section_subset_expr -> fixpoint_expr list -> Declare.OblState.t val do_cofixpoint : pm:Declare.OblState.t -> scope:Locality.locality -> poly:bool -> ?using:Vernacexpr.section_subset_expr -> cofixpoint_expr list -> Declare.OblState.t coq-8.15.0/vernac/comSearch.ml000066400000000000000000000141141417001151100160730ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* user_err ?loc:qid.CAst.loc (str "Module/Section " ++ Ppconstr.pr_qualid qid ++ str " not found.") let interp_search_restriction = function | SearchOutside l -> (List.map global_module l, true) | SearchInside l -> (List.map global_module l, false) let kind_searcher = Decls.(function (* Kinds referring to the keyword introducing the object *) | IsAssumption _ | IsDefinition (Definition | Example | Fixpoint | CoFixpoint | Method | StructureComponent | Let) | IsProof _ | IsPrimitive as k -> Inl k (* Kinds referring to the status of the object *) | IsDefinition (Coercion | SubClass | IdentityCoercion as k') -> let coercions = Coercionops.coercions () in Inr (fun gr -> List.exists (fun c -> GlobRef.equal c.Coercionops.coe_value gr && (k' <> SubClass && k' <> IdentityCoercion || c.Coercionops.coe_is_identity)) coercions) | IsDefinition CanonicalStructure -> let canonproj = Structures.CSTable.entries () in Inr (fun gr -> List.exists (fun c -> GlobRef.equal c.Structures.CSTable.solution gr) canonproj) | IsDefinition Scheme -> let schemes = DeclareScheme.all_schemes () in Inr (fun gr -> Indset.exists (fun c -> GlobRef.equal (GlobRef.IndRef c) gr) schemes) | IsDefinition Instance -> let instances = Typeclasses.all_instances () in Inr (fun gr -> List.exists (fun c -> GlobRef.equal c.Typeclasses.is_impl gr) instances)) let interp_search_item env sigma = function | SearchSubPattern ((where,head),pat) -> let expected_type = Pretyping.(if head then IsType else WithoutTypeConstraint) in let pat = try Constrintern.interp_constr_pattern env sigma ~expected_type pat with e when CErrors.noncritical e -> (* We cannot ensure (yet?) that a typable pattern will actually be typed, consider e.g. (forall A, A -> A /\ A) which fails, not seeing that A can be Prop; so we use an untyped pattern as a fallback (i.e w/o no insertion of coercions, no compilation of pattern-matching) *) snd (Constrintern.intern_constr_pattern env sigma ~as_type:head pat) in GlobSearchSubPattern (where,head,pat) | SearchString ((Anywhere,false),s,None) when Id.is_valid_ident_part s && String.equal (String.drop_simple_quotes s) s -> GlobSearchString s | SearchString ((where,head),s,sc) -> (try let ref = Notation.interp_notation_as_global_reference ~head:false (fun _ -> true) s sc in GlobSearchSubPattern (where,head,Pattern.PRef ref) with UserError _ -> user_err (str "Unable to interpret " ++ quote (str s) ++ str " as a reference.")) | SearchKind k -> match kind_searcher k with | Inl k -> GlobSearchKind k | Inr f -> GlobSearchFilter f let rec interp_search_request env sigma = function | b, SearchLiteral i -> b, GlobSearchLiteral (interp_search_item env sigma i) | b, SearchDisjConj l -> b, GlobSearchDisjConj (List.map (List.map (interp_search_request env sigma)) l) (* 05f22a5d6d5b8e3e80f1a37321708ce401834430 introduced the `search_output_name_only` option to avoid excessive printing when searching. The motivation was to make search usable for IDE completion, however, it is still too slow due to the non-indexed nature of the underlying search mechanism. In the future we should deprecate the option and provide a fast, indexed name-searching interface. *) let search_output_name_only = ref false let () = declare_bool_option { optdepr = false; optkey = ["Search";"Output";"Name";"Only"]; optread = (fun () -> !search_output_name_only); optwrite = (:=) search_output_name_only } let interp_search env sigma s r = let r = interp_search_restriction r in let get_pattern c = snd (Constrintern.intern_constr_pattern env sigma c) in let warnlist = ref [] in let pr_search ref kind env c = let pr = pr_global ref in let pp = if !search_output_name_only then pr else begin let open Impargs in let impls = implicits_of_global ref in let impargs = select_stronger_impargs impls in let impargs = List.map binding_kind_of_status impargs in if List.length impls > 1 || List.exists Glob_term.(function Explicit -> false | MaxImplicit | NonMaxImplicit -> true) (List.skipn_at_least (Termops.nb_prod_modulo_zeta Evd.(from_env env) (EConstr.of_constr c)) impargs) then warnlist := pr :: !warnlist; let pc = pr_ltype_env env Evd.(from_env env) ~impargs c in hov 2 (pr ++ str":" ++ spc () ++ pc) end in Feedback.msg_notice pp in (match s with | SearchPattern c -> (Search.search_pattern env sigma (get_pattern c) r |> Search.prioritize_search) pr_search | SearchRewrite c -> (Search.search_rewrite env sigma (get_pattern c) r |> Search.prioritize_search) pr_search | Search sl -> (Search.search env sigma (List.map (interp_search_request env Evd.(from_env env)) sl) r |> Search.prioritize_search) pr_search); if !warnlist <> [] then Feedback.msg_notice (str "(" ++ hov 0 (strbrk "use \"About\" for full details on the implicit arguments of " ++ pr_enum (fun x -> x) !warnlist ++ str ")")) coq-8.15.0/vernac/comSearch.mli000066400000000000000000000015011417001151100162400ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Evd.evar_map -> Vernacexpr.searchable -> Vernacexpr.search_restriction -> unit coq-8.15.0/vernac/comTactic.ml000066400000000000000000000062161417001151100161010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* interpretable type 'a tactic_interpreter = Interpreter of ('a -> interpretable) let register_tactic_interpreter na f = let t = Dyn.create na in interp_map := DMap.add t f !interp_map; Interpreter (fun x -> I (t,x)) let interp_tac (I (tag,t)) = let f = DMap.find tag !interp_map in f t type parallel_solver = pstate:Declare.Proof.t -> info:int option -> interpretable -> abstract:bool -> with_end_tac:bool -> Declare.Proof.t let print_info_trace = declare_intopt_option_and_ref ~depr:false ~key:["Info" ; "Level"] let solve_core ~pstate n ~info t ~with_end_tac:b = let pstate, status = Declare.Proof.map_fold_endline ~f:(fun etac p -> let with_end_tac = if b then Some etac else None in let info = Option.append info (print_info_trace ()) in let (p,status) = Proof.solve n info t ?with_end_tac p in (* in case a strict subtree was completed, go back to the top of the prooftree *) let p = Proof.maximal_unfocus Vernacentries.command_focus p in p,status) pstate in if not status then Feedback.feedback Feedback.AddedAxiom; pstate let solve ~pstate n ~info t ~with_end_tac = let t = interp_tac t in solve_core ~pstate n ~info t ~with_end_tac let check_par_applicable pstate = Declare.Proof.fold pstate ~f:(fun p -> (Proof.data p).Proof.goals |> List.iter (fun goal -> let is_ground = let { Proof.sigma = sigma0 } = Declare.Proof.fold pstate ~f:Proof.data in let g = Evd.find sigma0 goal in let concl, hyps = Evd.evar_concl g, Evd.evar_context g in Evarutil.is_ground_term sigma0 concl && List.for_all (Context.Named.Declaration.for_all (Evarutil.is_ground_term sigma0)) hyps in if not is_ground then CErrors.user_err Pp.(strbrk("The par: goal selector does not support goals with existential variables")))) let par_implementation = ref (fun ~pstate ~info t ~abstract ~with_end_tac -> let t = interp_tac t in let t = Proofview.Goal.enter (fun _ -> if abstract then Abstract.tclABSTRACT None ~opaque:true t else t) in solve_core ~pstate Goal_select.SelectAll ~info t ~with_end_tac) let set_par_implementation f = par_implementation := f let solve_parallel ~pstate ~info t ~abstract ~with_end_tac = check_par_applicable pstate; !par_implementation ~pstate ~info t ~abstract ~with_end_tac coq-8.15.0/vernac/comTactic.mli000066400000000000000000000043041417001151100162460ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* interpretable) (** ['a] should be marshallable if ever used with [par:]. Must be called no more than once per process with a particular string: make sure to use partial application. *) val register_tactic_interpreter : string -> ('a -> unit Proofview.tactic) -> 'a tactic_interpreter (** Entry point for toplevel tactic expression execution. It calls Proof.solve after having interpreted the tactic, and after the tactic runs it unfocus as much as needed to put a goal under focus. *) val solve : pstate:Declare.Proof.t -> Goal_select.t -> info:int option -> interpretable -> with_end_tac:bool -> Declare.Proof.t (** [par: tac] runs tac on all goals, possibly in parallel using a worker pool. If tac is [abstract tac1], then [abstract] is passed explicitly to the solver and [tac1] passed to worker since it is up to master to opacify the sub proofs produced by the workers. *) type parallel_solver = pstate:Declare.Proof.t -> info:int option -> interpretable -> abstract:bool -> (* the tactic result has to be opacified as per abstract *) with_end_tac:bool -> Declare.Proof.t (** Entry point when the goal selector is par: *) val solve_parallel : parallel_solver (** By default par: is implemented with all: (sequential). The STM and LSP document manager provide "more parallel" implementations *) val set_par_implementation : parallel_solver -> unit coq-8.15.0/vernac/debugHook.ml000066400000000000000000000053341417001151100161020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* = 2 && s.[0] == '"' && s.[String.length s - 1] == '"' then String.sub s 1 (String.length s - 2) else s let parse_complex inst : (t, string) result = if 'r' = String.get inst 0 then let arg = String.(trim (sub inst 1 (length inst - 1))) in if arg <> "" then match int_of_string_opt arg with | Some num -> if num < 0 then Error "number must be positive" else Ok (RunCnt num) | None -> Ok (RunBreakpoint (possibly_unquote arg)) else Error ("invalid input: " ^ inst) else Error ("invalid input: " ^ inst) (* XXX: Should be moved to the clients *) let parse inst : (t, string) result = match inst with | "" -> Ok StepIn | "s" -> Ok Skip | "x" -> Ok Interrupt | "h"| "?" -> Ok Help | _ -> parse_complex inst end module Answer = struct type t = | Prompt of Pp.t | Goal of Pp.t | Output of Pp.t | Init | Stack of (string * (string * int list) option) list | Vars of (string * Pp.t) list end module Intf = struct type t = { read_cmd : unit -> Action.t (** request a debugger command from the client *) ; submit_answer : Answer.t -> unit (** receive a debugger answer from Ltac *) ; isTerminal : bool (** whether the debugger is running as a terminal (non-visual) *) } let ltac_debug_ref : t option ref = ref None let set hooks = ltac_debug_ref := Some hooks let get () = !ltac_debug_ref end coq-8.15.0/vernac/debugHook.mli000066400000000000000000000107551417001151100162560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Coq sends Answer.Init <- IDE sends zero or more initialization requests such as Action.UpdBpts <- IDE sends Action.Configd Stopping in the debugger generates Answer.Prompt and Answer.Goal messages, at which point the IDE will typically call GetStack and GetVars. When the IDE sends with StepIn..Continue, the debugger will execute more code. At that point, Coq won't try to read more messages from the IDE until the debugger stops again or exits. *) module Action : sig type t = | StepIn (* execute a single step in the tactic *) | StepOver (* execute steps until DB is back in the current stack frame *) | StepOut (* execute steps until DB exits current stack frame *) | Continue (* execute steps until a breakpoint or the debugger exits *) | Skip (* legacy: continue execution with no further debugging *) | Interrupt (* exit the debugger *) | Help (* legacy: print help text *) | UpdBpts of ((string * int) * bool) list (* sets or clears breakpoints. Values are: - absolute pathname of the the file - byte offset in the UTF-8 representation of the file - true to set, false to clear *) | Configd (* "config done" - indicates that the debugger has been configured, debugger does a Continue *) | GetStack (* request the call stack, returned as Answer.Stack *) | GetVars of int (* request the variables defined for stack frame N, returned as Answer.Vars. 0 is the topmost frame, followed by 1,2,3, ... *) | RunCnt of int (* legacy: run for N steps *) | RunBreakpoint of string (* legacy: run until an idtac prints the string *) | Command of string (* legacy: user-typed command to the debugger *) | Failed (* legacy: user command doesn't parse *) | Ignore (* internal: do nothing, read another command *) (* XXX: Should be moved to the clients *) val parse : string -> (t, string) result end module Answer : sig type t = | Prompt of Pp.t (* output signalling the debugger has stopped Should be printed as a prompt for user input, e.g. in color without a newline at the end *) | Goal of Pp.t (* goal for the current proof state *) | Output of Pp.t (* general output *) | Init (* signals initialization of the debugger *) | Stack of (string * (string * int list) option) list (* The call stack, starting from TOS. Values are: - description of the frame (eg tactic name, line number, module) - absolute pathname of the file - array containing Loc.bp and Loc.ep of the corresponding code *) | Vars of (string * Pp.t) list (* The variable values for the specified stack frame. Values are variable name and variable value *) end module Intf : sig type t = { read_cmd : unit -> Action.t (** request a debugger command from the client *) ; submit_answer : Answer.t -> unit (** receive a debugger answer from Ltac *) ; isTerminal : bool (** whether the debugger is running as a terminal (non-visual) *) } val set : t -> unit val get : unit -> t option end coq-8.15.0/vernac/declare.ml000066400000000000000000002776101417001151100156020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a -> 'a) CEphemeron.key type t = unit g let make_g hook = CEphemeron.create hook let make (hook : S.t -> unit) : t = CEphemeron.create (fun x () -> hook x) let hcall hook x s = CEphemeron.default hook (fun _ x -> x) x s let call_g ?hook x s = Option.cata (fun hook -> hcall hook x s) s hook let call ?hook x = Option.iter (fun hook -> hcall hook x ()) hook end module CInfo = struct type 'constr t = { name : Id.t (** Name of theorem *) ; typ : 'constr (** Type of theorem *) ; args : Name.t list (** Names to pre-introduce *) ; impargs : Impargs.manual_implicits (** Explicitily declared implicit arguments *) ; using : Proof_using.t option (** Explicit declaration of section variables used by the constant *) } let make ~name ~typ ?(args=[]) ?(impargs=[]) ?using () = { name; typ; args; impargs; using } let to_constr sigma thm = { thm with typ = EConstr.to_constr sigma thm.typ } let get_typ { typ; _ } = typ let get_name { name; _ } = name end (** Information for a declaration, interactive or not, includes parameters shared by mutual constants *) module Info = struct type t = { poly : bool ; inline : bool ; kind : Decls.logical_kind ; udecl : UState.universe_decl ; scope : Locality.locality ; hook : Hook.t option ; typing_flags : Declarations.typing_flags option } (** Note that [opaque] doesn't appear here as it is not known at the start of the proof in the interactive case. *) let make ?(poly=false) ?(inline=false) ?(kind=Decls.(IsDefinition Definition)) ?(udecl=UState.default_univ_decl) ?(scope=Locality.Global Locality.ImportDefaultBehavior) ?hook ?typing_flags () = { poly; inline; kind; udecl; scope; hook; typing_flags } end (** Declaration of constants and parameters *) type 'a pproof_entry = { proof_entry_body : 'a; (* List of section variables *) proof_entry_secctx : Id.Set.t option; (* State id on which the completion of type checking is reported *) proof_entry_feedback : Stateid.t option; proof_entry_type : Constr.types option; proof_entry_universes : UState.named_universes_entry; proof_entry_opaque : bool; proof_entry_inline_code : bool; } type proof_entry = Evd.side_effects Opaques.const_entry_body pproof_entry type parameter_entry = { parameter_entry_secctx : Id.Set.t option; parameter_entry_type : Constr.types; parameter_entry_universes : UState.named_universes_entry; parameter_entry_inline_code : Entries.inline; } type primitive_entry = { prim_entry_type : (Constr.types * UState.named_universes_entry) option; prim_entry_content : CPrimitives.op_or_type; } let default_univ_entry = UState.Monomorphic_entry Univ.ContextSet.empty let default_named_univ_entry = default_univ_entry, UnivNames.empty_binders (** [univsbody] are universe-constraints attached to the body-only, used in vio-delayed opaque constants and private poly universes *) let definition_entry_core ?(opaque=false) ?using ?(inline=false) ?types ?(univs=default_named_univ_entry) ?(eff=Evd.empty_side_effects) ?(univsbody=Univ.ContextSet.empty) body = { proof_entry_body = Future.from_val ((body,univsbody), eff); proof_entry_secctx = using; proof_entry_type = types; proof_entry_universes = univs; proof_entry_opaque = opaque; proof_entry_feedback = None; proof_entry_inline_code = inline} let definition_entry = definition_entry_core ?eff:None ?univsbody:None let parameter_entry ?inline ?(univs=default_named_univ_entry) typ = { parameter_entry_secctx = None; parameter_entry_type = typ; parameter_entry_universes = univs; parameter_entry_inline_code = inline; } let primitive_entry ?types c = { prim_entry_type = types; prim_entry_content = c; } type constant_entry = | DefinitionEntry of proof_entry | ParameterEntry of parameter_entry | PrimitiveEntry of primitive_entry let local_csts = Summary.ref ~name:"local-csts" Cset_env.empty let is_local_constant c = Cset_env.mem c !local_csts type constant_obj = { cst_kind : Decls.logical_kind; cst_locl : Locality.import_status; } let load_constant i ((sp,kn), obj) = if Nametab.exists_cci sp then raise (DeclareUniv.AlreadyDeclared (None, Libnames.basename sp)); let con = Global.constant_of_delta_kn kn in Nametab.push (Nametab.Until i) sp (GlobRef.ConstRef con); Dumpglob.add_constant_kind con obj.cst_kind; begin match obj.cst_locl with | Locality.ImportNeedQualified -> local_csts := Cset_env.add con !local_csts | Locality.ImportDefaultBehavior -> () end (* Opening means making the name without its module qualification available *) let open_constant i ((sp,kn), obj) = (* Never open a local definition *) match obj.cst_locl with | Locality.ImportNeedQualified -> () | Locality.ImportDefaultBehavior -> let con = Global.constant_of_delta_kn kn in Nametab.push (Nametab.Exactly i) sp (GlobRef.ConstRef con) let exists_name id = Decls.variable_exists id || Global.exists_objlabel (Label.of_id id) let check_exists id = if exists_name id then raise (DeclareUniv.AlreadyDeclared (None, id)) let cache_constant ((sp,kn), obj) = (* Invariant: the constant must exist in the logical environment *) let kn' = if Global.exists_objlabel (Label.of_id (Libnames.basename sp)) then Constant.make1 kn else CErrors.anomaly Pp.(str"Missing constant " ++ Id.print(Libnames.basename sp) ++ str".") in assert (Environ.QConstant.equal (Global.env ()) kn' (Constant.make1 kn)); Nametab.push (Nametab.Until 1) sp (GlobRef.ConstRef (Constant.make1 kn)); Dumpglob.add_constant_kind (Constant.make1 kn) obj.cst_kind let discharge_constant ((sp, kn), obj) = Some obj let classify_constant cst = Libobject.Substitute cst let (objConstant : constant_obj Libobject.Dyn.tag) = let open Libobject in declare_object_full { (default_object "CONSTANT") with cache_function = cache_constant; load_function = load_constant; open_function = simple_open open_constant; classify_function = classify_constant; subst_function = ident_subst_function; discharge_function = discharge_constant } let inConstant v = Libobject.Dyn.Easy.inj v objConstant let update_tables c = Impargs.declare_constant_implicits c; Notation.declare_ref_arguments_scope Evd.empty (GlobRef.ConstRef c) let register_constant kn kind local = let o = inConstant { cst_kind = kind; cst_locl = local; } in let id = Label.to_id (Constant.label kn) in let _ = Lib.add_leaf id o in update_tables kn let register_side_effect (c, body, role) = (* Register the body in the opaque table *) let () = match body with | None -> () | Some opaque -> Opaques.declare_private_opaque opaque in let () = register_constant c Decls.(IsProof Theorem) Locality.ImportDefaultBehavior in match role with | None -> () | Some (Evd.Schema (ind, kind)) -> DeclareScheme.declare_scheme kind [|ind,c|] let get_roles export eff = let map (c, body) = let role = try Some (Cmap.find c eff.Evd.seff_roles) with Not_found -> None in (c, body, role) in List.map map export let export_side_effects eff = let export = Global.export_private_constants eff.Evd.seff_private in let export = get_roles export eff in List.iter register_side_effect export let record_aux env s_ty s_bo = let open Environ in let in_ty = keep_hyps env s_ty in let v = String.concat " " (CList.map_filter (fun decl -> let id = NamedDecl.get_id decl in if List.exists (NamedDecl.get_id %> Id.equal id) in_ty then None else Some (Id.to_string id)) (keep_hyps env s_bo)) in Aux_file.record_in_aux "context_used" v let pure_definition_entry ?(opaque=false) ?(inline=false) ?types ?(univs=default_named_univ_entry) body = { proof_entry_body = ((body,Univ.ContextSet.empty), ()); proof_entry_secctx = None; proof_entry_type = types; proof_entry_universes = univs; proof_entry_opaque = opaque; proof_entry_feedback = None; proof_entry_inline_code = inline} let delayed_definition_entry ~opaque ?feedback_id ~using ~univs ?types body = { proof_entry_body = body ; proof_entry_secctx = using ; proof_entry_type = types ; proof_entry_universes = univs ; proof_entry_opaque = opaque ; proof_entry_feedback = feedback_id ; proof_entry_inline_code = false } let extract_monomorphic = function | UState.Monomorphic_entry ctx -> Entries.Monomorphic_entry, ctx | UState.Polymorphic_entry uctx -> Entries.Polymorphic_entry uctx, Univ.ContextSet.empty let cast_proof_entry e = let (body, ctx), () = e.proof_entry_body in let univ_entry = if Univ.ContextSet.is_empty ctx then fst (e.proof_entry_universes) else match fst (e.proof_entry_universes) with | UState.Monomorphic_entry ctx' -> (* This can actually happen, try compiling EqdepFacts for instance *) UState.Monomorphic_entry (Univ.ContextSet.union ctx' ctx) | UState.Polymorphic_entry _ -> CErrors.anomaly Pp.(str "Local universes in non-opaque polymorphic definition."); in let univ_entry, ctx = extract_monomorphic univ_entry in { Entries.const_entry_body = body; const_entry_secctx = e.proof_entry_secctx; const_entry_type = e.proof_entry_type; const_entry_universes = univ_entry; const_entry_inline_code = e.proof_entry_inline_code; }, ctx type ('a, 'b) effect_entry = | EffectEntry : (private_constants Opaques.const_entry_body, unit) effect_entry | PureEntry : (unit Entries.proof_output, Constr.constr) effect_entry let cast_opaque_proof_entry (type a b) (entry : (a, b) effect_entry) (e : a pproof_entry) : b Entries.opaque_entry * _ = let typ = match e.proof_entry_type with | None -> assert false | Some typ -> typ in let secctx = match e.proof_entry_secctx with | None -> let open Environ in let env = Global.env () in let hyp_typ, hyp_def = if List.is_empty (Environ.named_context env) then Id.Set.empty, Id.Set.empty else let ids_typ = global_vars_set env typ in let pf, env = match entry with | PureEntry -> let (pf, _), () = e.proof_entry_body in pf, env | EffectEntry -> let (pf, _), eff = Future.force e.proof_entry_body in let env = Safe_typing.push_private_constants env eff in pf, env in let vars = global_vars_set env pf in ids_typ, vars in let () = if Aux_file.recording () then record_aux env hyp_typ hyp_def in Environ.really_needed env (Id.Set.union hyp_typ hyp_def) | Some hyps -> hyps in let (body, (univ_entry, ctx) : b * _) = match entry with | PureEntry -> let (body, uctx), () = e.proof_entry_body in let univ_entry = match fst (e.proof_entry_universes) with | UState.Monomorphic_entry uctx' -> Entries.Monomorphic_entry, (Univ.ContextSet.union uctx uctx') | UState.Polymorphic_entry uctx' -> assert (Univ.ContextSet.is_empty uctx); Entries.Polymorphic_entry uctx', Univ.ContextSet.empty in body, univ_entry | EffectEntry -> (), extract_monomorphic (fst (e.proof_entry_universes)) in { Entries.opaque_entry_body = body; opaque_entry_secctx = secctx; opaque_entry_type = typ; opaque_entry_universes = univ_entry; }, ctx let feedback_axiom () = Feedback.(feedback AddedAxiom) let is_unsafe_typing_flags flags = let flags = Option.default (Global.typing_flags ()) flags in let open Declarations in not (flags.check_universes && flags.check_guarded && flags.check_positive) let make_ubinders uctx (univs, ubinders as u) = match univs with | UState.Polymorphic_entry _ -> u | UState.Monomorphic_entry _ -> (UState.Monomorphic_entry uctx, ubinders) let declare_constant_core ~name ~typing_flags cd = (* Logically define the constant and its subproofs, no libobject tampering *) let decl, unsafe, ubinders, delayed = match cd with | DefinitionEntry de -> (* We deal with side effects *) if not de.proof_entry_opaque then let body, eff = Future.force de.proof_entry_body in (* This globally defines the side-effects in the environment and registers their libobjects. *) let () = export_side_effects eff in let de = { de with proof_entry_body = body, () } in let e, ctx = cast_proof_entry de in let ubinders = make_ubinders ctx de.proof_entry_universes in (* We register the global universes after exporting side-effects, since the latter depend on the former. *) let () = DeclareUctx.declare_universe_context ~poly:false ctx in let cd = Entries.DefinitionEntry e in ConstantEntry cd, false, ubinders, None else let map (body, eff) = body, eff.Evd.seff_private in let body = Future.chain de.proof_entry_body map in let feedback_id = de.proof_entry_feedback in let de = { de with proof_entry_body = body } in let cd, ctx = cast_opaque_proof_entry EffectEntry de in let ubinders = make_ubinders ctx de.proof_entry_universes in let () = DeclareUctx.declare_universe_context ~poly:false ctx in OpaqueEntry cd, false, ubinders, Some (body, feedback_id) | ParameterEntry e -> let univ_entry, ctx = extract_monomorphic (fst e.parameter_entry_universes) in let ubinders = make_ubinders ctx e.parameter_entry_universes in let () = DeclareUctx.declare_universe_context ~poly:false ctx in let e = { Entries.parameter_entry_secctx = e.parameter_entry_secctx; Entries.parameter_entry_type = e.parameter_entry_type; Entries.parameter_entry_universes = univ_entry; Entries.parameter_entry_inline_code = e.parameter_entry_inline_code; } in ConstantEntry (Entries.ParameterEntry e), not (Lib.is_modtype_strict()), ubinders, None | PrimitiveEntry e -> let typ, ubinders, ctx = match e.prim_entry_type with | None -> None, UnivNames.empty_binders, Univ.ContextSet.empty | Some (typ, (univs, ubinders)) -> let univ_entry, ctx = extract_monomorphic univs in Some (typ, univ_entry), ubinders, ctx in let () = DeclareUctx.declare_universe_context ~poly:false ctx in let e = { Entries.prim_entry_type = typ; Entries.prim_entry_content = e.prim_entry_content; } in let ubinders = (UState.Monomorphic_entry ctx, ubinders) in ConstantEntry (Entries.PrimitiveEntry e), false, ubinders, None in let kn = Global.add_constant ?typing_flags name decl in let () = DeclareUniv.declare_univ_binders (GlobRef.ConstRef kn) ubinders in if unsafe || is_unsafe_typing_flags typing_flags then feedback_axiom(); kn, delayed let declare_constant ?(local = Locality.ImportDefaultBehavior) ~name ~kind ~typing_flags cd = let () = check_exists name in let kn, delayed = declare_constant_core ~typing_flags ~name cd in (* Register the libobjects attached to the constants *) let () = match delayed with | None -> () | Some (body, feedback_id) -> let open Declarations in match (Global.lookup_constant kn).const_body with | OpaqueDef o -> let (_, _, _, i) = Opaqueproof.repr o in Opaques.declare_defined_opaque ?feedback_id i body | Def _ | Undef _ | Primitive _ -> assert false in let () = register_constant kn kind local in kn let declare_private_constant ?role ?(local = Locality.ImportDefaultBehavior) ~name ~kind de = let de, ctx = if not de.proof_entry_opaque then let de, ctx = cast_proof_entry de in DefinitionEff de, ctx else let de, ctx = cast_opaque_proof_entry PureEntry de in OpaqueEff de, ctx in let kn, eff = Global.add_private_constant name ctx de in let () = register_constant kn kind local in let seff_roles = match role with | None -> Cmap.empty | Some r -> Cmap.singleton kn r in let eff = { Evd.seff_private = eff; Evd.seff_roles; } in kn, eff let inline_private_constants ~uctx env ce = let body, eff = ce.proof_entry_body in let cb, ctx = Safe_typing.inline_private_constants env (body, eff.Evd.seff_private) in let uctx = UState.merge ~sideff:true Evd.univ_rigid uctx ctx in cb, uctx (** Declaration of section variables and local definitions *) type variable_declaration = | SectionLocalDef of proof_entry | SectionLocalAssum of { typ:Constr.types; impl:Glob_term.binding_kind ; univs:UState.named_universes_entry } (* This object is only for things which iterate over objects to find variables (only Prettyp.print_context AFAICT) *) let objVariable : unit Libobject.Dyn.tag = let open Libobject in declare_object_full { (default_object "VARIABLE") with classify_function = (fun () -> Dispose)} let inVariable v = Libobject.Dyn.Easy.inj v objVariable let declare_variable_core ~name ~kind d = (* Variables are distinguished by only short names *) if Decls.variable_exists name then raise (DeclareUniv.AlreadyDeclared (None, name)); let impl,opaque,univs = match d with (* Fails if not well-typed *) | SectionLocalAssum {typ;impl;univs} -> let poly, uctx = match fst univs with | UState.Monomorphic_entry uctx -> false, uctx | UState.Polymorphic_entry uctx -> true, Univ.ContextSet.of_context uctx in let () = DeclareUctx.declare_universe_context ~poly uctx in let () = Global.push_named_assum (name,typ) in impl, true, univs | SectionLocalDef (de) -> (* The body should already have been forced upstream because it is a section-local definition, but it's not enforced by typing *) let ((body, body_uctx), eff) = Future.force de.proof_entry_body in let () = export_side_effects eff in let poly, type_uctx = match fst de.proof_entry_universes with | UState.Monomorphic_entry uctx -> false, uctx | UState.Polymorphic_entry uctx -> true, Univ.ContextSet.of_context uctx in let univs = Univ.ContextSet.union body_uctx type_uctx in (* We must declare the universe constraints before type-checking the term. *) let () = DeclareUctx.declare_universe_context ~poly univs in let se = { Entries.secdef_body = body; secdef_secctx = de.proof_entry_secctx; secdef_type = de.proof_entry_type; } in let () = Global.push_named_def (name, se) in Glob_term.Explicit, de.proof_entry_opaque, de.proof_entry_universes in Nametab.push (Nametab.Until 1) (Libnames.make_path DirPath.empty name) (GlobRef.VarRef name); Decls.(add_variable_data name {opaque;kind}); ignore(Lib.add_leaf name (inVariable ()) : Libobject.object_name); Impargs.declare_var_implicits ~impl name; Notation.declare_ref_arguments_scope Evd.empty (GlobRef.VarRef name) let declare_variable ~name ~kind ~typ ~impl ~univs = declare_variable_core ~name ~kind (SectionLocalAssum { typ; impl; univs }) (* Declaration messages *) let pr_rank i = pr_nth (i+1) let fixpoint_message indexes l = Flags.if_verbose Feedback.msg_info (match l with | [] -> CErrors.anomaly (Pp.str "no recursive definition.") | [id] -> Id.print id ++ str " is recursively defined" ++ (match indexes with | Some [|i|] -> str " (guarded on "++pr_rank i++str " argument)" | _ -> mt ()) | l -> hov 0 (prlist_with_sep pr_comma Id.print l ++ spc () ++ str "are recursively defined" ++ match indexes with | Some a -> spc () ++ str "(guarded respectively on " ++ prvect_with_sep pr_comma pr_rank a ++ str " arguments)" | None -> mt ())) let cofixpoint_message l = Flags.if_verbose Feedback.msg_info (match l with | [] -> CErrors.anomaly (Pp.str "No corecursive definition.") | [id] -> Id.print id ++ str " is corecursively defined" | l -> hov 0 (prlist_with_sep pr_comma Id.print l ++ spc () ++ str "are corecursively defined")) let recursive_message isfix i l = (if isfix then fixpoint_message i else cofixpoint_message) l let definition_message id = Flags.if_verbose Feedback.msg_info (Id.print id ++ str " is defined") let assumption_message id = (* Changing "assumed" to "declared", "assuming" referring more to the type of the object than to the name of the object (see discussion on coqdev: "Chapter 4 of the Reference Manual", 8/10/2015) *) Flags.if_verbose Feedback.msg_info (Id.print id ++ str " is declared") module Internal = struct let pmap_entry_body ~f entry = { entry with proof_entry_body = f entry.proof_entry_body } let map_entry_body ~f entry = { entry with proof_entry_body = Future.chain entry.proof_entry_body f } let map_entry_type ~f entry = { entry with proof_entry_type = f entry.proof_entry_type } let set_opacity ~opaque entry = { entry with proof_entry_opaque = opaque } let rec decompose len c t accu = let open Constr in let open Context.Rel.Declaration in if len = 0 then (c, t, accu) else match kind c, kind t with | Lambda (na, u, c), Prod (_, _, t) -> decompose (pred len) c t (LocalAssum (na, u) :: accu) | LetIn (na, b, u, c), LetIn (_, _, _, t) -> decompose (pred len) c t (LocalDef (na, b, u) :: accu) | _ -> assert false let rec shrink ctx sign c t accu = let open Constr in let open Vars in match ctx, sign with | [], [] -> (c, t, accu) | p :: ctx, decl :: sign -> if noccurn 1 c && noccurn 1 t then let c = subst1 mkProp c in let t = subst1 mkProp t in shrink ctx sign c t accu else let c = Term.mkLambda_or_LetIn p c in let t = Term.mkProd_or_LetIn p t in let accu = if Context.Rel.Declaration.is_local_assum p then mkVar (NamedDecl.get_id decl) :: accu else accu in shrink ctx sign c t accu | _ -> assert false let shrink_entry sign const = let typ = match const.proof_entry_type with | None -> assert false | Some t -> t in let ((body, uctx), eff) = const.proof_entry_body in let (body, typ, ctx) = decompose (List.length sign) body typ [] in let (body, typ, args) = shrink ctx sign body typ [] in { const with proof_entry_body = ((body, uctx), eff) ; proof_entry_type = Some typ }, args module Constant = struct type t = constant_obj let tag = objConstant let kind obj = obj.cst_kind end let objVariable = objVariable end let declare_definition_scheme ~internal ~univs ~role ~name c = let kind = Decls.(IsDefinition Scheme) in let entry = pure_definition_entry ~univs c in let kn, eff = declare_private_constant ~role ~kind ~name entry in let () = if internal then () else definition_message name in kn, eff (* Locality stuff *) let declare_entry_core ~name ~scope ~kind ~typing_flags ?hook ~obls ~impargs ~uctx entry = let should_suggest = entry.proof_entry_opaque && not (List.is_empty (Global.named_context())) && Option.is_empty entry.proof_entry_secctx in let dref = match scope with | Locality.Discharge -> let () = declare_variable_core ~name ~kind (SectionLocalDef entry) in if should_suggest then Proof_using.suggest_variable (Global.env ()) name; Names.GlobRef.VarRef name | Locality.Global local -> let kn = declare_constant ~name ~local ~kind ~typing_flags (DefinitionEntry entry) in let gr = Names.GlobRef.ConstRef kn in if should_suggest then Proof_using.suggest_constant (Global.env ()) kn; gr in let () = Impargs.maybe_declare_manual_implicits false dref impargs in let () = definition_message name in Hook.call ?hook { Hook.S.uctx; obls; scope; dref }; dref let declare_entry = declare_entry_core ~obls:[] let mutual_make_bodies ~typing_flags ~fixitems ~rec_declaration ~possible_indexes = match possible_indexes with | Some possible_indexes -> let env = Global.env() in let env = Environ.update_typing_flags ?typing_flags env in let indexes = Pretyping.search_guard env possible_indexes rec_declaration in let vars = Vars.universes_of_constr (Constr.mkFix ((indexes,0),rec_declaration)) in let fixdecls = CList.map_i (fun i _ -> Constr.mkFix ((indexes,i),rec_declaration)) 0 fixitems in vars, fixdecls, Some indexes | None -> let fixdecls = CList.map_i (fun i _ -> Constr.mkCoFix (i,rec_declaration)) 0 fixitems in let vars = Vars.universes_of_constr (List.hd fixdecls) in vars, fixdecls, None let declare_mutually_recursive_core ~info ~cinfo ~opaque ~ntns ~uctx ~rec_declaration ~possible_indexes ?(restrict_ucontext=true) () = let { Info.poly; udecl; scope; kind; typing_flags; _ } = info in let vars, fixdecls, indexes = mutual_make_bodies ~typing_flags ~fixitems:cinfo ~rec_declaration ~possible_indexes in let uctx, univs = (* XXX: Obligations don't do this, this seems like a bug? *) if restrict_ucontext then let uctx = UState.restrict uctx vars in let univs = UState.check_univ_decl ~poly uctx udecl in uctx, univs else let univs = UState.univ_entry ~poly uctx in uctx, univs in let csts = CList.map2 (fun CInfo.{ name; typ; impargs; using } body -> let entry = definition_entry ~opaque ~types:typ ~univs ?using body in declare_entry ~name ~scope ~kind ~impargs ~uctx ~typing_flags entry) cinfo fixdecls in let isfix = Option.has_some possible_indexes in let fixnames = List.map (fun { CInfo.name } -> name) cinfo in recursive_message isfix indexes fixnames; List.iter (Metasyntax.add_notation_interpretation ~local:(scope=Locality.Discharge) (Global.env())) ntns; csts let declare_mutually_recursive = declare_mutually_recursive_core ~restrict_ucontext:true () let warn_let_as_axiom = CWarnings.create ~name:"let-as-axiom" ~category:"vernacular" Pp.(fun id -> strbrk "Let definition" ++ spc () ++ Names.Id.print id ++ spc () ++ strbrk "declared as an axiom.") let declare_parameter ~name ~scope ~hook ~impargs ~uctx pe = let local = match scope with | Locality.Discharge -> warn_let_as_axiom name; Locality.ImportNeedQualified | Locality.Global local -> local in let kind = Decls.(IsAssumption Conjectural) in let decl = ParameterEntry pe in let kn = declare_constant ~name ~local ~kind ~typing_flags:None decl in let dref = Names.GlobRef.ConstRef kn in let () = Impargs.maybe_declare_manual_implicits false dref impargs in let () = assumption_message name in let () = Hook.(call ?hook { S.uctx; obls = []; scope; dref}) in dref (* Preparing proof entries *) let error_unresolved_evars env sigma t evars = let pr_unresolved_evar e = hov 2 (str"- " ++ Printer.pr_existential_key env sigma e ++ str ": " ++ Himsg.explain_pretype_error env sigma (Pretype_errors.UnsolvableImplicit (e,None))) in CErrors.user_err (hov 0 begin str "The following term contains unresolved implicit arguments:"++ fnl () ++ str " " ++ Printer.pr_econstr_env env sigma t ++ fnl () ++ str "More precisely: " ++ fnl () ++ v 0 (prlist_with_sep cut pr_unresolved_evar (Evar.Set.elements evars)) end) let check_evars_are_solved env sigma t = let t = EConstr.of_constr t in let evars = Evarutil.undefined_evars_of_term sigma t in if not (Evar.Set.is_empty evars) then error_unresolved_evars env sigma t evars let prepare_definition ~info ~opaque ?using ~body ~typ sigma = let { Info.poly; udecl; inline; _ } = info in let env = Global.env () in let sigma, (body, types) = Evarutil.finalize ~abort_on_undefined_evars:false sigma (fun nf -> nf body, Option.map nf typ) in Option.iter (check_evars_are_solved env sigma) types; check_evars_are_solved env sigma body; let univs = Evd.check_univ_decl ~poly sigma udecl in let entry = definition_entry ~opaque ?using ~inline ?types ~univs body in let uctx = Evd.evar_universe_context sigma in entry, uctx let declare_definition_core ~info ~cinfo ~opaque ~obls ~body sigma = let { CInfo.name; impargs; typ; using; _ } = cinfo in let entry, uctx = prepare_definition ~info ~opaque ?using ~body ~typ sigma in let { Info.scope; kind; hook; typing_flags; _ } = info in declare_entry_core ~name ~scope ~kind ~impargs ~typing_flags ~obls ?hook ~uctx entry, uctx let declare_definition ~info ~cinfo ~opaque ~body sigma = declare_definition_core ~obls:[] ~info ~cinfo ~opaque ~body sigma |> fst let prepare_obligation ~name ~types ~body sigma = let env = Global.env () in let types = match types with | Some t -> t | None -> Retyping.get_type_of env sigma body in let sigma, (body, types) = Evarutil.finalize ~abort_on_undefined_evars:false sigma (fun nf -> nf body, nf types) in RetrieveObl.check_evars env sigma; let body, types = EConstr.(of_constr body, of_constr types) in let obls, _, body, cty = RetrieveObl.retrieve_obligations env name sigma 0 body types in let uctx = Evd.evar_universe_context sigma in body, cty, uctx, obls let prepare_parameter ~poly ~udecl ~types sigma = let env = Global.env () in Pretyping.check_evars_are_solved ~program_mode:false env sigma; let sigma, typ = Evarutil.finalize ~abort_on_undefined_evars:true sigma (fun nf -> nf types) in let univs = Evd.check_univ_decl ~poly sigma udecl in let pe = { parameter_entry_secctx = None; parameter_entry_type = typ; parameter_entry_universes = univs; parameter_entry_inline_code = None; } in sigma, pe type progress = Remain of int | Dependent | Defined of GlobRef.t module Obls_ = struct open Constr type 'a obligation_body = DefinedObl of 'a | TermObl of constr module Obligation = struct type t = { obl_name : Id.t ; obl_type : types ; obl_location : Evar_kinds.t Loc.located ; obl_body : pconstant obligation_body option ; obl_status : bool * Evar_kinds.obligation_definition_status ; obl_deps : Int.Set.t ; obl_tac : unit Proofview.tactic option } let set_type ~typ obl = {obl with obl_type = typ} end type obligations = {obls : Obligation.t array; remaining : int} type fixpoint_kind = IsFixpoint of lident option list | IsCoFixpoint module ProgramDecl = struct type 'a t = { prg_cinfo : constr CInfo.t ; prg_info : Info.t ; prg_opaque : bool ; prg_hook : 'a Hook.g option ; prg_body : constr ; prg_uctx : UState.t ; prg_obligations : obligations ; prg_deps : Id.t list ; prg_fixkind : fixpoint_kind option ; prg_notations : Metasyntax.where_decl_notation list ; prg_reduce : constr -> constr } open Obligation let make ~info ~cinfo ~opaque ~ntns ~reduce ~deps ~uctx ~body ~fixpoint_kind ?obl_hook obls = let obls', body = match body with | None -> assert (Int.equal (Array.length obls) 0); let n = Nameops.add_suffix cinfo.CInfo.name "_obligation" in ( [| { obl_name = n ; obl_body = None ; obl_location = Loc.tag Evar_kinds.InternalHole ; obl_type = cinfo.CInfo.typ ; obl_status = (false, Evar_kinds.Expand) ; obl_deps = Int.Set.empty ; obl_tac = None } |] , mkVar n ) | Some b -> ( Array.mapi (fun i (n, t, l, o, d, tac) -> { obl_name = n ; obl_body = None ; obl_location = l ; obl_type = t ; obl_status = o ; obl_deps = d ; obl_tac = tac }) obls , b ) in let prg_uctx = UState.make_flexible_nonalgebraic uctx in { prg_cinfo = { cinfo with CInfo.typ = reduce cinfo.CInfo.typ } ; prg_info = info ; prg_hook = obl_hook ; prg_opaque = opaque ; prg_body = body ; prg_uctx ; prg_obligations = {obls = obls'; remaining = Array.length obls'} ; prg_deps = deps ; prg_fixkind = fixpoint_kind ; prg_notations = ntns ; prg_reduce = reduce } let show prg = let { CInfo.name; typ; _ } = prg.prg_cinfo in let env = Global.env () in let sigma = Evd.from_env env in Id.print name ++ spc () ++ str ":" ++ spc () ++ Printer.pr_constr_env env sigma typ ++ spc () ++ str ":=" ++ fnl () ++ Printer.pr_constr_env env sigma prg.prg_body module Internal = struct let get_name prg = prg.prg_cinfo.CInfo.name let get_uctx prg = prg.prg_uctx let set_uctx ~uctx prg = {prg with prg_uctx = uctx} let get_poly prg = prg.prg_info.Info.poly let get_obligations prg = prg.prg_obligations let get_using prg = prg.prg_cinfo.CInfo.using end end open Obligation open ProgramDecl (* Saving an obligation *) (* XXX: Is this the right place for this? *) let it_mkLambda_or_LetIn_or_clean t ctx = let open Context.Rel.Declaration in let fold t decl = if is_local_assum decl then Term.mkLambda_or_LetIn decl t else if Vars.noccurn 1 t then Vars.subst1 mkProp t else Term.mkLambda_or_LetIn decl t in Context.Rel.fold_inside fold ctx ~init:t (* XXX: Is this the right place for this? *) let decompose_lam_prod c ty = let open Context.Rel.Declaration in let rec aux ctx c ty = match (Constr.kind c, Constr.kind ty) with | LetIn (x, b, t, c), LetIn (x', b', t', ty) when Constr.equal b b' && Constr.equal t t' -> let ctx' = Context.Rel.add (LocalDef (x, b', t')) ctx in aux ctx' c ty | _, LetIn (x', b', t', ty) -> let ctx' = Context.Rel.add (LocalDef (x', b', t')) ctx in aux ctx' (lift 1 c) ty | LetIn (x, b, t, c), _ -> let ctx' = Context.Rel.add (LocalDef (x, b, t)) ctx in aux ctx' c (lift 1 ty) | Lambda (x, b, t), Prod (x', b', t') (* By invariant, must be convertible *) -> let ctx' = Context.Rel.add (LocalAssum (x, b')) ctx in aux ctx' t t' | Cast (c, _, _), _ -> aux ctx c ty | _, _ -> (ctx, c, ty) in aux Context.Rel.empty c ty (* XXX: What's the relation of this with Abstract.shrink ? *) let shrink_body c ty = let ctx, b, ty = match ty with | None -> let ctx, b = Term.decompose_lam_assum c in (ctx, b, None) | Some ty -> let ctx, b, ty = decompose_lam_prod c ty in (ctx, b, Some ty) in let b', ty', n, args = List.fold_left (fun (b, ty, i, args) decl -> if Vars.noccurn 1 b && Option.cata (Vars.noccurn 1) true ty then (Vars.subst1 mkProp b, Option.map (Vars.subst1 mkProp) ty, succ i, args) else let open Context.Rel.Declaration in let args = if is_local_assum decl then mkRel i :: args else args in ( Term.mkLambda_or_LetIn decl b , Option.map (Term.mkProd_or_LetIn decl) ty , succ i , args )) (b, ty, 1, []) ctx in (ctx, b', ty', Array.of_list args) (***********************************************************************) (* Saving an obligation *) (***********************************************************************) let unfold_entry cst = Hints.HintsUnfoldEntry [Tacred.EvalConstRef cst] let add_hint local prg cst = (* XXX checking sections here is suspicious but matches historical (unintended?) behaviour *) let locality = if local || Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in Hints.add_hints ~locality [Id.to_string prg.prg_cinfo.CInfo.name] (unfold_entry cst) let declare_obligation prg obl ~uctx ~types ~body = let poly = prg.prg_info.Info.poly in let univs = UState.univ_entry ~poly uctx in let body = prg.prg_reduce body in let types = Option.map prg.prg_reduce types in match obl.obl_status with | _, Evar_kinds.Expand -> (false, {obl with obl_body = Some (TermObl body)}, []) | force, Evar_kinds.Define opaque -> let opaque = (not force) && opaque in let poly = prg.prg_info.Info.poly in let ctx, body, ty, args = if not poly then shrink_body body types else ([], body, types, [||]) in let ce = definition_entry ?types:ty ~opaque ~univs body in (* ppedrot: seems legit to have obligations as local *) let constant = declare_constant ~name:obl.obl_name ~typing_flags:prg.prg_info.Info.typing_flags ~local:Locality.ImportNeedQualified ~kind:Decls.(IsProof Property) (DefinitionEntry ce) in if not opaque then add_hint (Locality.make_section_locality None) prg constant; definition_message obl.obl_name; let body = match fst univs with | UState.Polymorphic_entry uctx -> Some (DefinedObl (constant, Univ.UContext.instance uctx)) | UState.Monomorphic_entry _ -> Some (TermObl (it_mkLambda_or_LetIn_or_clean (mkApp (mkConst constant, args)) ctx)) in (true, {obl with obl_body = body}, [GlobRef.ConstRef constant]) (* Updating the obligation meta-info on close *) let not_transp_msg = Pp.( str "Obligation should be transparent but was declared opaque." ++ spc () ++ str "Use 'Defined' instead.") let err_not_transp () = CErrors.user_err not_transp_msg module ProgMap = Id.Map module State = struct type t = t ProgramDecl.t CEphemeron.key ProgMap.t let empty = ProgMap.empty let pending pm = ProgMap.filter (fun _ v -> (CEphemeron.get v).prg_obligations.remaining > 0) pm let num_pending pm = pending pm |> ProgMap.cardinal let first_pending pm = pending pm |> ProgMap.choose_opt |> Option.map (fun (_, v) -> CEphemeron.get v) let get_unique_open_prog pm name : (_, Id.t list) result = match name with | Some n -> Option.cata (fun p -> Ok (CEphemeron.get p)) (Error []) (ProgMap.find_opt n pm) | None -> ( let n = num_pending pm in match n with | 0 -> Error [] | 1 -> Option.cata (fun p -> Ok p) (Error []) (first_pending pm) | _ -> let progs = Id.Set.elements (ProgMap.domain pm) in Error progs ) let add t key prg = ProgMap.add key (CEphemeron.create prg) t let fold t ~f ~init = let f k v acc = f k (CEphemeron.get v) acc in ProgMap.fold f t init let all pm = ProgMap.bindings pm |> List.map (fun (_,v) -> CEphemeron.get v) let find m t = ProgMap.find_opt t m |> Option.map CEphemeron.get end (* In all cases, the use of the map is read-only so we don't expose the ref *) let map_keys m = ProgMap.fold (fun k _ l -> k :: l) m [] let check_solved_obligations ~pm ~what_for : unit = if not (ProgMap.is_empty pm) then let keys = map_keys pm in let have_string = if Int.equal (List.length keys) 1 then " has " else " have " in CErrors.user_err Pp.( str "Unsolved obligations when closing " ++ what_for ++ str ":" ++ spc () ++ prlist_with_sep spc (fun x -> Id.print x) keys ++ str have_string ++ str "unsolved obligations." ) let map_replace k v m = ProgMap.add k (CEphemeron.create v) (ProgMap.remove k m) let progmap_remove pm prg = ProgMap.remove prg.prg_cinfo.CInfo.name pm let progmap_replace prg' pm = map_replace prg'.prg_cinfo.CInfo.name prg' pm let obligations_solved prg = Int.equal prg.prg_obligations.remaining 0 let obligations_message rem = Format.asprintf "%s %s remaining" (if rem > 0 then string_of_int rem else "No more") (CString.plural rem "obligation") |> Pp.str |> Flags.if_verbose Feedback.msg_info let get_obligation_body expand obl = match obl.obl_body with | None -> None | Some c -> ( if expand && snd obl.obl_status == Evar_kinds.Expand then match c with | DefinedObl pc -> Some (Environ.constant_value_in (Global.env ()) pc) | TermObl c -> Some c else match c with DefinedObl pc -> Some (mkConstU pc) | TermObl c -> Some c ) let obl_substitution expand obls deps = Int.Set.fold (fun x acc -> let xobl = obls.(x) in match get_obligation_body expand xobl with | None -> acc | Some oblb -> (xobl.obl_name, (xobl.obl_type, oblb)) :: acc) deps [] let rec intset_to = function | -1 -> Int.Set.empty | n -> Int.Set.add n (intset_to (pred n)) let obligation_substitution expand prg = let obls = prg.prg_obligations.obls in let ints = intset_to (pred (Array.length obls)) in obl_substitution expand obls ints let subst_prog subst prg = let subst' = List.map (fun (n, (_, b)) -> (n, b)) subst in ( Vars.replace_vars subst' prg.prg_body , Vars.replace_vars subst' (* Termops.refresh_universes *) prg.prg_cinfo.CInfo.typ ) let declare_definition ~pm prg = let varsubst = obligation_substitution true prg in let sigma = Evd.from_ctx prg.prg_uctx in let body, types = subst_prog varsubst prg in let body, types = EConstr.(of_constr body, Some (of_constr types)) in let cinfo = { prg.prg_cinfo with CInfo.typ = types } in let name, info, opaque = prg.prg_cinfo.CInfo.name, prg.prg_info, prg.prg_opaque in let obls = List.map (fun (id, (_, c)) -> (id, c)) varsubst in (* XXX: This is doing normalization twice *) let kn, uctx = declare_definition_core ~cinfo ~info ~obls ~body ~opaque sigma in (* XXX: We call the obligation hook here, by consistency with the previous imperative behaviour, however I'm not sure this is right *) let pm = Hook.call_g ?hook:prg.prg_hook { Hook.S.uctx; obls; scope = prg.prg_info.Info.scope; dref = kn} pm in let pm = progmap_remove pm prg in pm, kn let rec lam_index n t acc = match Constr.kind t with | Lambda ({Context.binder_name = Name n'}, _, _) when Id.equal n n' -> acc | Lambda (_, _, b) -> lam_index n b (succ acc) | _ -> raise Not_found let compute_possible_guardness_evidences n fixbody fixtype = match n with | Some {CAst.loc; v = n} -> [lam_index n fixbody 0] | None -> (* If recursive argument was not given by user, we try all args. An earlier approach was to look only for inductive arguments, but doing it properly involves delta-reduction, and it finally doesn't seem to worth the effort (except for huge mutual fixpoints ?) *) let m = Termops.nb_prod Evd.empty (EConstr.of_constr fixtype) (* FIXME *) in let ctx = fst (Term.decompose_prod_n_assum m fixtype) in List.map_i (fun i _ -> i) 0 ctx let declare_mutual_definition ~pm l = let len = List.length l in let first = List.hd l in let defobl x = let oblsubst = obligation_substitution true x in let subs, typ = subst_prog oblsubst x in let env = Global.env () in let sigma = Evd.from_ctx x.prg_uctx in let r = Retyping.relevance_of_type env sigma (EConstr.of_constr typ) in let term = snd (Reductionops.splay_lam_n env sigma len (EConstr.of_constr subs)) in let typ = snd (Reductionops.splay_prod_n env sigma len (EConstr.of_constr typ)) in let term = EConstr.to_constr sigma term in let typ = EConstr.to_constr sigma typ in let def = (x.prg_reduce term, r, x.prg_reduce typ, x.prg_cinfo.CInfo.impargs, x.prg_cinfo.CInfo.using) in let oblsubst = List.map (fun (id, (_, c)) -> (id, c)) oblsubst in (def, oblsubst) in let defs, obls = List.fold_right (fun x (defs, obls) -> let xdef, xobls = defobl x in (xdef :: defs, xobls @ obls)) l ([], []) in (* let fixdefs = List.map reduce_fix fixdefs in *) let fixdefs, fixrs, fixtypes, fixitems = List.fold_right2 (fun (d, r, typ, impargs, using) name (a1, a2, a3, a4) -> ( d :: a1 , r :: a2 , typ :: a3 , CInfo.{name; typ; impargs; args = []; using } :: a4 )) defs first.prg_deps ([], [], [], []) in let fixkind = Option.get first.prg_fixkind in let arrrec, recvec = (Array.of_list fixtypes, Array.of_list fixdefs) in let rvec = Array.of_list fixrs in let namevec = Array.of_list (List.map (fun x -> Name x.prg_cinfo.CInfo.name) l) in let rec_declaration = (Array.map2 Context.make_annot namevec rvec, arrrec, recvec) in let possible_indexes = match fixkind with | IsFixpoint wfl -> Some (List.map3 compute_possible_guardness_evidences wfl fixdefs fixtypes) | IsCoFixpoint -> None in (* Declare the recursive definitions *) let kns = declare_mutually_recursive_core ~info:first.prg_info ~ntns:first.prg_notations ~uctx:first.prg_uctx ~rec_declaration ~possible_indexes ~opaque:first.prg_opaque ~restrict_ucontext:false ~cinfo:fixitems () in (* Only for the first constant *) let dref = List.hd kns in let scope = first.prg_info.Info.scope in let s_hook = {Hook.S.uctx = first.prg_uctx; obls; scope; dref} in Hook.call ?hook:first.prg_info.Info.hook s_hook; (* XXX: We call the obligation hook here, by consistency with the previous imperative behaviour, however I'm not sure this is right *) let pm = Hook.call_g ?hook:first.prg_hook s_hook pm in let pm = List.fold_left progmap_remove pm l in pm, dref let update_obls ~pm prg obls rem = let prg_obligations = {obls; remaining = rem} in let prg' = {prg with prg_obligations} in let pm = progmap_replace prg' pm in obligations_message rem; if rem > 0 then pm, Remain rem else match prg'.prg_deps with | [] -> let pm, kn = declare_definition ~pm prg' in pm, Defined kn | l -> let progs = List.map (fun x -> CEphemeron.get (ProgMap.find x pm)) prg'.prg_deps in if List.for_all (fun x -> obligations_solved x) progs then let pm, kn = declare_mutual_definition ~pm progs in pm, Defined kn else pm, Dependent let dependencies obls n = let res = ref Int.Set.empty in Array.iteri (fun i obl -> if (not (Int.equal i n)) && Int.Set.mem n obl.obl_deps then res := Int.Set.add i !res) obls; !res let update_program_decl_on_defined ~pm prg obls num obl ~uctx rem ~auto = let obls = Array.copy obls in let () = obls.(num) <- obl in let prg = {prg with prg_uctx = uctx} in let pm, _progress = update_obls ~pm prg obls (pred rem) in let pm = if pred rem > 0 then let deps = dependencies obls num in if not (Int.Set.is_empty deps) then let pm, _progress = auto ~pm (Some prg.prg_cinfo.CInfo.name) deps None in pm else pm else pm in pm type obligation_resolver = pm:State.t -> Id.t option -> Int.Set.t -> unit Proofview.tactic option -> State.t * progress type obligation_qed_info = {name : Id.t; num : int; auto : obligation_resolver} let obligation_terminator ~pm ~entry ~uctx ~oinfo:{name; num; auto} = let env = Global.env () in let ty = entry.proof_entry_type in let body, uctx = inline_private_constants ~uctx env entry in let sigma = Evd.from_ctx uctx in Inductiveops.control_only_guard (Global.env ()) sigma (EConstr.of_constr body); (* Declare the obligation ourselves and drop the hook *) let prg = Option.get (State.find pm name) in let {obls; remaining = rem} = prg.prg_obligations in let obl = obls.(num) in let status = match (obl.obl_status, entry.proof_entry_opaque) with | (_, Evar_kinds.Expand), true -> err_not_transp () | (true, _), true -> err_not_transp () | (false, _), true -> Evar_kinds.Define true | (_, Evar_kinds.Define true), false -> Evar_kinds.Define false | (_, status), false -> status in let obl = {obl with obl_status = (false, status)} in let poly = prg.prg_info.Info.poly in let uctx = if poly then uctx else UState.union prg.prg_uctx uctx in let defined, obl, cst = declare_obligation prg obl ~body ~types:ty ~uctx in let prg_ctx = if poly then (* Polymorphic *) (* We merge the new universes and constraints of the polymorphic obligation with the existing ones *) UState.union prg.prg_uctx uctx else if (* The first obligation, if defined, declares the univs of the constant, each subsequent obligation declares its own additional universes and constraints if any *) defined then UState.from_env (Global.env ()) else uctx in let pm = update_program_decl_on_defined ~pm prg obls num obl ~uctx:prg_ctx rem ~auto in pm, cst (* Similar to the terminator but for the admitted path; this assumes the admitted constant was already declared. FIXME: There is duplication of this code with obligation_terminator and Obligations.admit_obligations *) let obligation_admitted_terminator ~pm {name; num; auto} uctx' dref = let prg = Option.get (State.find pm name) in let {obls; remaining = rem} = prg.prg_obligations in let obl = obls.(num) in let cst = match dref with GlobRef.ConstRef cst -> cst | _ -> assert false in let transparent = Environ.evaluable_constant cst (Global.env ()) in let () = match obl.obl_status with | true, Evar_kinds.Expand | true, Evar_kinds.Define true -> if not transparent then err_not_transp () | _ -> () in let inst, uctx' = if not prg.prg_info.Info.poly (* Not polymorphic *) then (* The universe context was declared globally, we continue from the new global environment. *) let uctx = UState.from_env (Global.env ()) in let uctx' = UState.merge_subst uctx (UState.subst uctx') in (Univ.Instance.empty, uctx') else (* We get the right order somehow, but surely it could be enforced in a clearer way. *) let uctx = UState.context uctx' in (Univ.UContext.instance uctx, uctx') in let obl = {obl with obl_body = Some (DefinedObl (cst, inst))} in let () = if transparent then add_hint true prg cst in update_program_decl_on_defined ~pm prg obls num obl ~uctx:uctx' rem ~auto end (************************************************************************) (* Handling of interactive proofs *) (************************************************************************) type lemma_possible_guards = int list list module Proof_ending = struct type t = | Regular | End_obligation of Obls_.obligation_qed_info | End_derive of { f : Id.t; name : Id.t } | End_equations of { hook : pm:Obls_.State.t -> Constant.t list -> Evd.evar_map -> Obls_.State.t ; i : Id.t ; types : (Environ.env * Evar.t * Evd.evar_info * EConstr.named_context * Evd.econstr) list ; sigma : Evd.evar_map } end (* Alias *) module Proof_ = Proof module Proof = struct module Proof_info = struct type t = { cinfo : Constr.t CInfo.t list (** cinfo contains each individual constant info in a mutual decl *) ; info : Info.t ; proof_ending : Proof_ending.t CEphemeron.key (* This could be improved and the CEphemeron removed *) ; compute_guard : lemma_possible_guards (** thms and compute guard are specific only to start_lemma_with_initialization + regular terminator, so we could make this per-proof kind *) } let make ~cinfo ~info ?(compute_guard=[]) ?(proof_ending=Proof_ending.Regular) () = { cinfo ; info ; compute_guard ; proof_ending = CEphemeron.create proof_ending } end type t = { endline_tactic : Genarg.glob_generic_argument option ; using : Id.Set.t option ; proof : Proof.t ; initial_euctx : UState.t (** The initial universe context (for the statement) *) ; pinfo : Proof_info.t } (*** Proof Global manipulation ***) let get ps = ps.proof let get_name ps = (Proof.data ps.proof).Proof.name let get_initial_euctx ps = ps.initial_euctx let fold ~f p = f p.proof let map ~f p = { p with proof = f p.proof } let map_fold ~f p = let proof, res = f p.proof in { p with proof }, res let map_fold_endline ~f ps = let et = match ps.endline_tactic with | None -> Proofview.tclUNIT () | Some tac -> let open Geninterp in let {Proof.poly} = Proof.data ps.proof in let ist = { lfun = Id.Map.empty; poly; extra = TacStore.empty } in let Genarg.GenArg (Genarg.Glbwit tag, tac) = tac in let tac = Geninterp.interp tag ist tac in Ftactic.run tac (fun _ -> Proofview.tclUNIT ()) in let (newpr,ret) = f et ps.proof in let ps = { ps with proof = newpr } in ps, ret let compact pf = map ~f:Proof.compact pf (* Sets the tactic to be used when a tactic line is closed with [...] *) let set_endline_tactic tac ps = { ps with endline_tactic = Some tac } let initialize_named_context_for_proof () = let sign = Global.named_context () in List.fold_right (fun d signv -> let id = NamedDecl.get_id d in let d = if Decls.variable_opacity id then NamedDecl.drop_body d else d in Environ.push_named_context_val d signv) sign Environ.empty_named_context_val let start_proof_core ~name ~typ ~pinfo ?(sign=initialize_named_context_for_proof ()) sigma = (* In ?sign, we remove the bodies of variables in the named context marked "opaque", this is a hack tho, see #10446, and build_constant_by_tactic uses a different method that would break program_inference_hook *) let { Proof_info.info = { Info.poly; typing_flags; _ }; _ } = pinfo in let goals = [Global.env_of_context sign, typ] in let proof = Proof.start ~name ~poly ?typing_flags sigma goals in let initial_euctx = Evd.evar_universe_context Proof.((data proof).sigma) in { proof ; endline_tactic = None ; using = None ; initial_euctx ; pinfo } (** [start_proof ~info ~cinfo sigma] starts a proof of [cinfo]. The proof is started in the evar map [sigma] (which can typically contain universe constraints) *) let start_core ~info ~cinfo ?proof_ending sigma = let { CInfo.name; typ; _ } = cinfo in let cinfo = [{ cinfo with CInfo.typ = EConstr.Unsafe.to_constr cinfo.CInfo.typ }] in let pinfo = Proof_info.make ~cinfo ~info ?proof_ending () in start_proof_core ~name ~typ ~pinfo ?sign:None sigma let start = start_core ?proof_ending:None let start_dependent ~info ~name ~proof_ending goals = let { Info.poly; typing_flags; _ } = info in let proof = Proof.dependent_start ~name ~poly ?typing_flags goals in let initial_euctx = Evd.evar_universe_context Proof.((data proof).sigma) in let cinfo = [] in let pinfo = Proof_info.make ~info ~cinfo ~proof_ending () in { proof ; endline_tactic = None ; using = None ; initial_euctx ; pinfo } let start_derive ~f ~name ~info goals = let proof_ending = Proof_ending.End_derive {f; name} in start_dependent ~info ~name ~proof_ending goals let start_equations ~name ~info ~hook ~types sigma goals = let proof_ending = Proof_ending.End_equations {hook; i=name; types; sigma} in start_dependent ~name ~info ~proof_ending goals let rec_tac_initializer finite guard thms snl = if finite then match List.map (fun { CInfo.name; typ } -> name, (EConstr.of_constr typ)) thms with | (id,_)::l -> Tactics.mutual_cofix id l 0 | _ -> assert false else (* nl is dummy: it will be recomputed at Qed-time *) let nl = match snl with | None -> List.map succ (List.map List.last guard) | Some nl -> nl in match List.map2 (fun { CInfo.name; typ } n -> (name, n, (EConstr.of_constr typ))) thms nl with | (id,n,_)::l -> Tactics.mutual_fix id n l 0 | _ -> assert false let start_with_initialization ~info ~cinfo sigma = let { CInfo.name; typ; args } = cinfo in let init_tac = Tactics.auto_intros_tac args in let pinfo = Proof_info.make ~cinfo:[cinfo] ~info () in let lemma = start_proof_core ~name ~typ:(EConstr.of_constr typ) ~pinfo ?sign:None sigma in map lemma ~f:(fun p -> pi1 @@ Proof.run_tactic Global.(env ()) init_tac p) type mutual_info = (bool * lemma_possible_guards * Constr.t option list option) let start_mutual_with_initialization ~info ~cinfo ~mutual_info sigma snl = let intro_tac { CInfo.args; _ } = Tactics.auto_intros_tac args in let init_tac, compute_guard = let (finite,guard,init_terms) = mutual_info in let rec_tac = rec_tac_initializer finite guard cinfo snl in let term_tac = match init_terms with | None -> List.map intro_tac cinfo | Some init_terms -> (* This is the case for hybrid proof mode / definition fixpoint, where terms for some constants are given with := *) let tacl = List.map (Option.cata (EConstr.of_constr %> Tactics.exact_no_check) Tacticals.tclIDTAC) init_terms in List.map2 (fun tac thm -> Tacticals.tclTHEN tac (intro_tac thm)) tacl cinfo in Tacticals.tclTHENS rec_tac term_tac, guard in match cinfo with | [] -> CErrors.anomaly (Pp.str "No proof to start.") | { CInfo.name; typ; _} :: thms -> let pinfo = Proof_info.make ~cinfo ~info ~compute_guard () in (* start_lemma has the responsibility to add (name, impargs, typ) to thms, once Info.t is more refined this won't be necessary *) let typ = EConstr.of_constr typ in let lemma = start_proof_core ~name ~typ ~pinfo sigma in map lemma ~f:(fun p -> pi1 @@ Proof.run_tactic Global.(env ()) init_tac p) let get_used_variables pf = pf.using let get_universe_decl pf = pf.pinfo.Proof_info.info.Info.udecl let set_used_variables ps ~using = let open Context.Named.Declaration in let env = Global.env () in let ctx = Environ.keep_hyps env using in let ctx_set = List.fold_right Id.Set.add (List.map NamedDecl.get_id ctx) Id.Set.empty in let vars_of = Environ.global_vars_set in let aux env entry (ctx, all_safe as orig) = match entry with | LocalAssum ({Context.binder_name=x},_) -> if Id.Set.mem x all_safe then orig else (ctx, all_safe) | LocalDef ({Context.binder_name=x},bo, ty) as decl -> if Id.Set.mem x all_safe then orig else let vars = Id.Set.union (vars_of env bo) (vars_of env ty) in if Id.Set.subset vars all_safe then (decl :: ctx, Id.Set.add x all_safe) else (ctx, all_safe) in let ctx, _ = Environ.fold_named_context aux env ~init:(ctx,ctx_set) in if not (Option.is_empty ps.using) then CErrors.user_err Pp.(str "Used section variables can be declared only once"); ctx, { ps with using = Some (Context.Named.to_vars ctx) } let get_open_goals ps = let Proof.{ goals; stack; sigma } = Proof.data ps.proof in List.length goals + List.fold_left (+) 0 (List.map (fun (l1,l2) -> List.length l1 + List.length l2) stack) + List.length (Evd.shelf sigma) type proof_object = { name : Names.Id.t (* [name] only used in the STM *) ; entries : proof_entry list ; uctx: UState.t ; pinfo : Proof_info.t } let get_po_name { name } = name let private_poly_univs = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Private";"Polymorphic";"Universes"] ~value:true (* XXX: This is still separate from close_proof below due to drop_pt in the STM *) (* XXX: Unsafe_typ:true is needed by vio files, see bf0499bc507d5a39c3d5e3bf1f69191339270729 *) let prepare_proof ~unsafe_typ { proof } = let Proof.{name=pid;entry;poly} = Proof.data proof in let initial_goals = Proofview.initial_goals entry in let evd = Proof.return ~pid proof in let eff = Evd.eval_side_effects evd in let evd = Evd.minimize_universes evd in let to_constr_body c = match EConstr.to_constr_opt evd c with | Some p -> Vars.universes_of_constr p, p | None -> CErrors.user_err Pp.(str "Some unresolved existential variables remain") in let to_constr_typ t = if unsafe_typ then let t = EConstr.Unsafe.to_constr t in Vars.universes_of_constr t, t else to_constr_body t in (* ppedrot: FIXME, this is surely wrong. There is no reason to duplicate side-effects... This may explain why one need to uniquize side-effects thereafter... *) (* EJGA: actually side-effects de-duplication and this codepath is unrelated. Duplicated side-effects arise from incorrect scheme generation code, the main bulk of it was mostly fixed by #9836 but duplication can still happen because of rewriting schemes I think; however the code below is mostly untested, the only code-paths that generate several proof entries are derive and equations and so far there is no code in the CI that will actually call those and do a side-effect, TTBOMK *) (* EJGA: likely the right solution is to attach side effects to the first constant only? *) let proofs = List.map (fun (body, typ) -> (to_constr_body body, eff), to_constr_typ typ) initial_goals in proofs, Evd.evar_universe_context evd let make_univs_deferred ~poly ~initial_euctx ~uctx ~udecl (used_univs_typ, typ) (used_univs_body, body) = let used_univs = Univ.Level.Set.union used_univs_body used_univs_typ in let utyp = UState.univ_entry ~poly initial_euctx in let uctx = UState.constrain_variables (fst (UState.context_set initial_euctx)) uctx in (* For vi2vo compilation proofs are computed now but we need to complement the univ constraints of the typ with the ones of the body. So we keep the two sets distinct. *) let uctx_body = UState.restrict uctx used_univs in let ubody = UState.check_mono_univ_decl uctx_body udecl in utyp, ubody let make_univs_private_poly ~poly ~uctx ~udecl (used_univs_typ, typ) (used_univs_body, body) = let used_univs = Univ.Level.Set.union used_univs_body used_univs_typ in let uctx = UState.restrict uctx used_univs in let uctx' = UState.restrict uctx used_univs_typ in let utyp = UState.check_univ_decl ~poly uctx' udecl in let ubody = Univ.ContextSet.diff (UState.context_set uctx) (UState.context_set uctx') in utyp, ubody let make_univs ~poly ~uctx ~udecl (used_univs_typ, typ) (used_univs_body, body) = let used_univs = Univ.Level.Set.union used_univs_body used_univs_typ in (* Since the proof is computed now, we can simply have 1 set of constraints in which we merge the ones for the body and the ones for the typ. We recheck the declaration after restricting with the actually used universes. TODO: check if restrict is really necessary now. *) let uctx = UState.restrict uctx used_univs in let utyp = UState.check_univ_decl ~poly uctx udecl in utyp, Univ.ContextSet.empty let close_proof ~opaque ~keep_body_ucst_separate ps = let { using; proof; initial_euctx; pinfo } = ps in let { Proof_info.info = { Info.udecl } } = pinfo in let { Proof.name; poly } = Proof.data proof in let unsafe_typ = keep_body_ucst_separate && not poly in let elist, uctx = prepare_proof ~unsafe_typ ps in let opaque = match opaque with | Vernacexpr.Opaque -> true | Vernacexpr.Transparent -> false in let make_entry ((((_ub, body) as b), eff), ((_ut, typ) as t)) = let utyp, ubody = (* allow_deferred case *) if not poly && (keep_body_ucst_separate || not (Safe_typing.is_empty_private_constants eff.Evd.seff_private)) then make_univs_deferred ~initial_euctx ~poly ~uctx ~udecl t b (* private_poly_univs case *) else if poly && opaque && private_poly_univs () then make_univs_private_poly ~poly ~uctx ~udecl t b else make_univs ~poly ~uctx ~udecl t b in definition_entry_core ~opaque ?using ~univs:utyp ~univsbody:ubody ~types:typ ~eff body in let entries = CList.map make_entry elist in { name; entries; uctx; pinfo } type closed_proof_output = (Constr.t * Evd.side_effects) list * UState.t let close_proof_delayed ~feedback_id ps (fpl : closed_proof_output Future.computation) = let { using; proof; initial_euctx; pinfo } = ps in let { Proof_info.info = { Info.udecl } } = pinfo in let { Proof.name; poly; entry; sigma } = Proof.data proof in (* We don't allow poly = true in this path *) if poly then CErrors.anomaly (Pp.str "Cannot delay universe-polymorphic constants."); (* Because of dependent subgoals at the beginning of proofs, we could have existential variables in the initial types of goals, we need to normalise them for the kernel. *) let nf = Evarutil.nf_evars_universes (Evd.set_universe_context sigma initial_euctx) in (* We only support opaque proofs, this will be enforced by using different entries soon *) let opaque = true in let make_entry i (_, types) = (* Already checked the univ_decl for the type universes when starting the proof. *) let univs = UState.univ_entry ~poly:false initial_euctx in let types = nf (EConstr.Unsafe.to_constr types) in Future.chain fpl (fun (pf, uctx) -> let (pt, eff) = List.nth pf i in (* Deferred proof, we already checked the universe declaration with the initial universes, ensure that the final universes respect the declaration as well. If the declaration is non-extensible, this will prevent the body from adding universes and constraints. *) let uctx = UState.constrain_variables (fst (UState.context_set initial_euctx)) uctx in let used_univs = Univ.Level.Set.union (Vars.universes_of_constr types) (Vars.universes_of_constr pt) in let uctx = UState.restrict uctx used_univs in let uctx = UState.check_mono_univ_decl uctx udecl in (pt,uctx),eff) |> delayed_definition_entry ~opaque ~feedback_id ~using ~univs ~types in let entries = CList.map_i make_entry 0 (Proofview.initial_goals entry) in { name; entries; uctx = initial_euctx; pinfo } let close_future_proof = close_proof_delayed let return_partial_proof { proof } = let proofs = Proof.partial_proof proof in let Proof.{sigma=evd} = Proof.data proof in let eff = Evd.eval_side_effects evd in (* ppedrot: FIXME, this is surely wrong. There is no reason to duplicate side-effects... This may explain why one need to uniquize side-effects thereafter... *) let proofs = List.map (fun c -> EConstr.Unsafe.to_constr c, eff) proofs in proofs, Evd.evar_universe_context evd let return_proof ps = let p, uctx = prepare_proof ~unsafe_typ:false ps in List.map (fun (((_ub, body),eff),_) -> (body,eff)) p, uctx let update_sigma_univs ugraph p = map ~f:(Proof.update_sigma_univs ugraph) p let next = let n = ref 0 in fun () -> incr n; !n let by tac = map_fold ~f:(Proof.solve (Goal_select.SelectNth 1) None tac) let build_constant_by_tactic ~name ?(opaque=Vernacexpr.Transparent) ~uctx ~sign ~poly (typ : EConstr.t) tac = let evd = Evd.from_ctx uctx in let typ_ = EConstr.Unsafe.to_constr typ in let cinfo = [CInfo.make ~name ~typ:typ_ ()] in let info = Info.make ~poly () in let pinfo = Proof_info.make ~cinfo ~info () in let pf = start_proof_core ~name ~typ ~pinfo ~sign evd in let pf, status = by tac pf in let { entries; uctx } = close_proof ~opaque ~keep_body_ucst_separate:false pf in match entries with | [entry] -> let entry = Internal.pmap_entry_body ~f:Future.force entry in entry, status, uctx | _ -> CErrors.anomaly Pp.(str "[build_constant_by_tactic] close_proof returned more than one proof term") let build_by_tactic ?(side_eff=true) env ~uctx ~poly ~typ tac = let name = Id.of_string ("temporary_proof"^string_of_int (next())) in let sign = Environ.(val_of_named_context (named_context env)) in let ce, status, uctx = build_constant_by_tactic ~name ~uctx ~sign ~poly typ tac in let cb, uctx = if side_eff then inline_private_constants ~uctx env ce else (* GG: side effects won't get reset: no need to treat their universes specially *) let (cb, ctx), _eff = ce.proof_entry_body in cb, UState.merge ~sideff:false Evd.univ_rigid uctx ctx in cb, ce.proof_entry_type, ce.proof_entry_universes, status, uctx let declare_abstract ~name ~poly ~kind ~sign ~secsign ~opaque ~solve_tac sigma concl = (* EJGA: flush_and_check_evars is only used in abstract, could we use a different API? *) let concl = try Evarutil.flush_and_check_evars sigma concl with Evarutil.Uninstantiated_evar _ -> CErrors.user_err Pp.(str "\"abstract\" cannot handle existentials.") in let sigma, concl = (* FIXME: should be done only if the tactic succeeds *) let sigma = Evd.minimize_universes sigma in sigma, Evarutil.nf_evars_universes sigma concl in let concl = EConstr.of_constr concl in let uctx = Evd.evar_universe_context sigma in let (const, safe, uctx) = try build_constant_by_tactic ~name ~opaque:Vernacexpr.Transparent ~poly ~uctx ~sign:secsign concl solve_tac with Logic_monad.TacticFailure e as src -> (* if the tactic [tac] fails, it reports a [TacticFailure e], which is an error irrelevant to the proof system (in fact it means that [e] comes from [tac] failing to yield enough success). Hence it reraises [e]. *) let (_, info) = Exninfo.capture src in Exninfo.iraise (e, info) in let sigma = Evd.set_universe_context sigma uctx in let body, effs = const.proof_entry_body in (* We drop the side-effects from the entry, they already exist in the ambient environment *) let const = Internal.pmap_entry_body const ~f:(fun _ -> body, ()) in (* EJGA: Hack related to the above call to `build_constant_by_tactic` with `~opaque:Transparent`. Even if the abstracted term is destined to be opaque, if we trigger the `if poly && opaque && private_poly_univs ()` in `close_proof` kernel will boom. This deserves more investigation. *) let const = Internal.set_opacity ~opaque const in let const, args = Internal.shrink_entry sign const in let cst () = (* do not compute the implicit arguments, it may be costly *) let () = Impargs.make_implicit_args false in (* ppedrot: seems legit to have abstracted subproofs as local*) declare_private_constant ~local:Locality.ImportNeedQualified ~name ~kind const in let cst, eff = Impargs.with_implicit_protection cst () in let inst = match fst const.proof_entry_universes with | UState.Monomorphic_entry _ -> EConstr.EInstance.empty | UState.Polymorphic_entry ctx -> (* We mimic what the kernel does, that is ensuring that no additional constraints appear in the body of polymorphic constants. Ideally this should be enforced statically. *) let (_, body_uctx), _ = const.proof_entry_body in let () = assert (Univ.ContextSet.is_empty body_uctx) in EConstr.EInstance.make (Univ.UContext.instance ctx) in let args = List.map EConstr.of_constr args in let lem = EConstr.mkConstU (cst, inst) in let effs = Evd.concat_side_effects eff effs in effs, sigma, lem, args, safe let get_goal_context pf i = let p = get pf in Proof.get_goal_context_gen p i let get_current_goal_context pf = let p = get pf in try Proof.get_goal_context_gen p 1 with | Proof.NoSuchGoal _ -> (* spiwack: returning empty evar_map, since if there is no goal, under focus, there is no accessible evar either. EJGA: this seems strange, as we have pf *) let env = Global.env () in Evd.from_env env, env let get_current_context pf = let p = get pf in Proof.get_proof_context p (* Support for mutually proved theorems *) (* XXX: this should be unified with the code for non-interactive mutuals previously on this file. *) module MutualEntry : sig val declare_possibly_mutual_parameters : pinfo:Proof_info.t -> uctx:UState.t -> sec_vars:Id.Set.t option -> univs:UState.named_universes_entry -> Names.GlobRef.t list val declare_mutdef (* Common to all recthms *) : pinfo:Proof_info.t -> uctx:UState.t -> entry:proof_entry -> Names.GlobRef.t list end = struct (* XXX: Refactor this with the code in [Declare.declare_mutdef] *) let guess_decreasing env possible_indexes ((body, ctx), eff) = let open Constr in match Constr.kind body with | Fix ((nv,0),(_,_,fixdefs as fixdecls)) -> let env = Safe_typing.push_private_constants env eff.Evd.seff_private in let indexes = Pretyping.search_guard env possible_indexes fixdecls in (mkFix ((indexes,0),fixdecls), ctx), eff | _ -> (body, ctx), eff let select_body i t = let open Constr in match Constr.kind t with | Fix ((nv,0),decls) -> mkFix ((nv,i),decls) | CoFix (0,decls) -> mkCoFix (i,decls) | _ -> CErrors.anomaly Pp.(str "Not a proof by induction: " ++ Termops.Internal.debug_print_constr (EConstr.of_constr t) ++ str ".") let declare_mutdef ~uctx ~pinfo pe i CInfo.{ name; impargs; typ; _} = let { Proof_info.info; compute_guard; _ } = pinfo in let { Info.hook; scope; kind; typing_flags; _ } = info in (* if i = 0 , we don't touch the type; this is for compat but not clear it is the right thing to do. *) let pe, ubind = if i > 0 && not (CList.is_empty compute_guard) then let typ = UState.nf_universes uctx typ in Internal.map_entry_type pe ~f:(fun _ -> Some typ), UnivNames.empty_binders else pe, UState.universe_binders uctx in (* We when compute_guard was [] in the previous step we should not substitute the body *) let pe = match compute_guard with | [] -> pe | _ -> Internal.map_entry_body pe ~f:(fun ((body, ctx), eff) -> (select_body i body, ctx), eff) in declare_entry ~name ~scope ~kind ?hook ~impargs ~typing_flags ~uctx pe let declare_mutdef ~pinfo ~uctx ~entry = let pe = match pinfo.Proof_info.compute_guard with | [] -> (* Not a recursive statement *) entry | possible_indexes -> (* Try all combinations... not optimal *) let env = Global.env() in let typing_flags = pinfo.Proof_info.info.Info.typing_flags in let env = Environ.update_typing_flags ?typing_flags env in Internal.map_entry_body entry ~f:(guess_decreasing env possible_indexes) in List.map_i (declare_mutdef ~pinfo ~uctx pe) 0 pinfo.Proof_info.cinfo let declare_possibly_mutual_parameters ~pinfo ~uctx ~sec_vars ~univs = let { Info.scope; hook } = pinfo.Proof_info.info in List.map_i ( fun i { CInfo.name; typ; impargs } -> let pe = { parameter_entry_secctx = sec_vars; parameter_entry_type = Evarutil.nf_evars_universes (Evd.from_ctx uctx) typ; parameter_entry_universes = univs; parameter_entry_inline_code = None; } in declare_parameter ~name ~scope ~hook ~impargs ~uctx pe ) 0 pinfo.Proof_info.cinfo end (************************************************************************) (* Admitting a lemma-like constant *) (************************************************************************) (* Admitted *) let get_keep_admitted_vars = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Keep"; "Admitted"; "Variables"] ~value:true let compute_proof_using_for_admitted proof typ pproofs = if not (get_keep_admitted_vars ()) then None else match get_used_variables proof, pproofs with | Some _ as x, _ -> x | None, pproof :: _ -> let env = Global.env () in let ids_typ = Environ.global_vars_set env typ in (* [pproof] is evar-normalized by [partial_proof]. We don't count variables appearing only in the type of evars. *) let ids_def = Environ.global_vars_set env (EConstr.Unsafe.to_constr pproof) in Some (Environ.really_needed env (Id.Set.union ids_typ ids_def)) | _ -> None let finish_admitted ~pm ~pinfo ~uctx ~sec_vars ~univs = let cst = MutualEntry.declare_possibly_mutual_parameters ~pinfo ~uctx ~sec_vars ~univs in (* If the constant was an obligation we need to update the program map *) match CEphemeron.default pinfo.Proof_info.proof_ending Proof_ending.Regular with | Proof_ending.End_obligation oinfo -> Obls_.obligation_admitted_terminator ~pm oinfo uctx (List.hd cst) | _ -> pm let save_admitted ~pm ~proof = let udecl = get_universe_decl proof in let Proof.{ poly; entry } = Proof.data (get proof) in let typ = match Proofview.initial_goals entry with | [typ] -> snd typ | _ -> CErrors.anomaly ~label:"Lemmas.save_lemma_admitted" (Pp.str "more than one statement.") in let typ = EConstr.Unsafe.to_constr typ in let iproof = get proof in let pproofs = Proof.partial_proof iproof in let sec_vars = compute_proof_using_for_admitted proof typ pproofs in let uctx = get_initial_euctx proof in let univs = UState.check_univ_decl ~poly uctx udecl in finish_admitted ~pm ~pinfo:proof.pinfo ~uctx ~sec_vars ~univs (************************************************************************) (* Saving a lemma-like constant *) (************************************************************************) let finish_derived ~f ~name ~entries = (* [f] and [name] correspond to the proof of [f] and of [suchthat], respectively. *) let f_def, lemma_def = match entries with | [_;f_def;lemma_def] -> f_def, lemma_def | _ -> assert false in (* The opacity of [f_def] is adjusted to be [false], as it must. Then [f] is declared in the global environment. *) let f_def = Internal.set_opacity ~opaque:false f_def in let f_kind = Decls.(IsDefinition Definition) in let f_def = DefinitionEntry f_def in let f_kn = declare_constant ~name:f ~kind:f_kind f_def ~typing_flags:None in let f_kn_term = Constr.mkConst f_kn in (* In the type and body of the proof of [suchthat] there can be references to the variable [f]. It needs to be replaced by references to the constant [f] declared above. This substitution performs this precise action. *) let substf c = Vars.replace_vars [f,f_kn_term] c in (* Extracts the type of the proof of [suchthat]. *) let lemma_pretype typ = match typ with | Some t -> Some (substf t) | None -> assert false (* Declare always sets type here. *) in (* The references of [f] are subsituted appropriately. *) let lemma_def = Internal.map_entry_type lemma_def ~f:lemma_pretype in (* The same is done in the body of the proof. *) let lemma_def = Internal.map_entry_body lemma_def ~f:(fun ((b,ctx),fx) -> (substf b, ctx), fx) in let lemma_def = DefinitionEntry lemma_def in let ct = declare_constant ~name ~typing_flags:None ~kind:Decls.(IsProof Proposition) lemma_def in [GlobRef.ConstRef f_kn; GlobRef.ConstRef ct] let finish_proved_equations ~pm ~kind ~hook i proof_obj types sigma0 = let obls = ref 1 in let sigma, recobls = CList.fold_left2_map (fun sigma (_evar_env, ev, _evi, local_context, _type) entry -> let id = match Evd.evar_ident ev sigma0 with | Some id -> id | None -> let n = !obls in incr obls; Nameops.add_suffix i ("_obligation_" ^ string_of_int n) in let entry = Internal.pmap_entry_body ~f:Future.force entry in let entry, args = Internal.shrink_entry local_context entry in let entry = Internal.pmap_entry_body ~f:Future.from_val entry in let cst = declare_constant ~name:id ~kind ~typing_flags:None (DefinitionEntry entry) in let sigma, app = Evd.fresh_global (Global.env ()) sigma (GlobRef.ConstRef cst) in let sigma = Evd.define ev (EConstr.applist (app, List.map EConstr.of_constr args)) sigma in sigma, cst) sigma0 types proof_obj.entries in let pm = hook ~pm recobls sigma in pm, List.map (fun cst -> GlobRef.ConstRef cst) recobls let check_single_entry { entries; uctx } label = match entries with | [entry] -> entry, uctx | _ -> CErrors.anomaly ~label Pp.(str "close_proof returned more than one proof term") let finalize_proof ~pm proof_obj proof_info = let open Proof_ending in match CEphemeron.default proof_info.Proof_info.proof_ending Regular with | Regular -> let entry, uctx = check_single_entry proof_obj "Proof.save" in pm, MutualEntry.declare_mutdef ~entry ~uctx ~pinfo:proof_info | End_obligation oinfo -> let entry, uctx = check_single_entry proof_obj "Obligation.save" in let entry = Internal.pmap_entry_body ~f:Future.force entry in Obls_.obligation_terminator ~pm ~entry ~uctx ~oinfo | End_derive { f ; name } -> pm, finish_derived ~f ~name ~entries:proof_obj.entries | End_equations { hook; i; types; sigma } -> let kind = proof_info.Proof_info.info.Info.kind in finish_proved_equations ~pm ~kind ~hook i proof_obj types sigma let err_save_forbidden_in_place_of_qed () = CErrors.user_err (Pp.str "Cannot use Save with more than one constant or in this proof mode") let process_idopt_for_save ~idopt info = match idopt with | None -> info | Some { CAst.v = save_name } -> (* Save foo was used; we override the info in the first theorem *) let cinfo = match info.Proof_info.cinfo, CEphemeron.default info.Proof_info.proof_ending Proof_ending.Regular with | [ { CInfo.name; _} as decl ], Proof_ending.Regular -> [ { decl with CInfo.name = save_name } ] | _ -> err_save_forbidden_in_place_of_qed () in { info with Proof_info.cinfo } let save ~pm ~proof ~opaque ~idopt = (* Env and sigma are just used for error printing in save_remaining_recthms *) let proof_obj = close_proof ~opaque ~keep_body_ucst_separate:false proof in let proof_info = process_idopt_for_save ~idopt proof.pinfo in finalize_proof ~pm proof_obj proof_info let save_regular ~(proof : t) ~opaque ~idopt = let open Proof_ending in match CEphemeron.default proof.pinfo.Proof_info.proof_ending Regular with | Regular -> let (_, grs) : Obls_.State.t * _ = save ~pm:Obls_.State.empty ~proof ~opaque ~idopt in grs | _ -> CErrors.anomaly Pp.(str "save_regular: unexpected proof ending") (***********************************************************************) (* Special case to close a lemma without forcing a proof *) (***********************************************************************) let save_lemma_admitted_delayed ~pm ~proof = let { entries; uctx; pinfo } = proof in if List.length entries <> 1 then CErrors.user_err Pp.(str "Admitted does not support multiple statements"); let { proof_entry_secctx; proof_entry_type; proof_entry_universes } = List.hd entries in let poly = match fst (proof_entry_universes) with | UState.Monomorphic_entry _ -> false | UState.Polymorphic_entry _ -> true in let univs = UState.univ_entry ~poly uctx in let sec_vars = if get_keep_admitted_vars () then proof_entry_secctx else None in finish_admitted ~pm ~uctx ~pinfo ~sec_vars ~univs let save_lemma_proved_delayed ~pm ~proof ~idopt = (* vio2vo used to call this with invalid [pinfo], now it should work fine. *) let pinfo = process_idopt_for_save ~idopt proof.pinfo in finalize_proof ~pm proof pinfo end (* Proof module *) let _ = Ind_tables.declare_definition_scheme := declare_definition_scheme let _ = Abstract.declare_abstract := Proof.declare_abstract let build_by_tactic = Proof.build_by_tactic (* This module could be merged with Obl, and placed before [Proof], however there is a single dependency on [Proof.start] for the interactive case *) module Obls = struct (* For the records fields, opens should go away one these types are private *) open Obls_ open Obls_.Obligation open Obls_.ProgramDecl let reduce c = let env = Global.env () in let sigma = Evd.from_env env in EConstr.Unsafe.to_constr (Reductionops.clos_norm_flags CClosure.betaiota env sigma (EConstr.of_constr c)) let explain_no_obligations = function Some ident -> str "No obligations for program " ++ Id.print ident | None -> str "No obligations remaining" module Error = struct let no_obligations n = CErrors.user_err (explain_no_obligations n) let ambiguous_program id ids = CErrors.user_err Pp.(str "More than one program with unsolved obligations: " ++ prlist Id.print ids ++ str "; use the \"of\" clause to specify, as in \"Obligation 1 of " ++ Id.print id ++ str "\"") let unknown_obligation num = CErrors.user_err (Pp.str (Printf.sprintf "Unknown obligation number %i" (succ num))) let already_solved num = CErrors.user_err Pp.(str "Obligation " ++ int num ++ str " already solved." ) let depends num rem = CErrors.user_err ( str "Obligation " ++ int num ++ str " depends on obligation(s) " ++ pr_sequence (fun x -> int (succ x)) rem) end let default_tactic = ref (Proofview.tclUNIT ()) let evar_of_obligation o = Evd.make_evar (Global.named_context_val ()) (EConstr.of_constr o.obl_type) let subst_deps expand obls deps t = let osubst = Obls_.obl_substitution expand obls deps in (Vars.replace_vars (List.map (fun (n, (_, b)) -> n, b) osubst) t) let subst_deps_obl obls obl = let t' = subst_deps true obls obl.obl_deps obl.obl_type in Obligation.set_type ~typ:t' obl open Evd let is_defined obls x = not (Option.is_empty obls.(x).obl_body) let deps_remaining obls deps = Int.Set.fold (fun x acc -> if is_defined obls x then acc else x :: acc) deps [] let goal_kind = Decls.(IsDefinition Definition) let goal_proof_kind = Decls.(IsProof Lemma) let kind_of_obligation o = match o with | Evar_kinds.Define false | Evar_kinds.Expand -> goal_kind | _ -> goal_proof_kind (* Solve an obligation using tactics, return the corresponding proof term *) let warn_solve_errored = CWarnings.create ~name:"solve_obligation_error" ~category:"tactics" (fun err -> Pp.seq [ str "Solve Obligations tactic returned error: " ; err ; fnl () ; str "This will become an error in the future" ]) let solve_by_tac prg obls i tac = let obl = obls.(i) in let obl = subst_deps_obl obls obl in let tac = Option.(default !default_tactic (append tac obl.obl_tac)) in let uctx = Internal.get_uctx prg in let uctx = UState.update_sigma_univs uctx (Global.universes ()) in let poly = Internal.get_poly prg in let evi = evar_of_obligation obl in (* the status of [build_by_tactic] is dropped. *) try let env = Global.env () in let body, types, _univs, _, uctx = build_by_tactic env ~uctx ~poly ~typ:evi.evar_concl tac in Inductiveops.control_only_guard env (Evd.from_ctx uctx) (EConstr.of_constr body); Some (body, types, uctx) with | Tacticals.FailError (_, s) as exn -> let _ = Exninfo.capture exn in let loc = fst obl.obl_location in CErrors.user_err ?loc (Lazy.force s) (* If the proof is open we absorb the error and leave the obligation open *) | Proof_.OpenProof _ -> None | e when CErrors.noncritical e -> let err = CErrors.print e in let loc = fst obl.obl_location in warn_solve_errored ?loc err; None let solve_and_declare_by_tac prg obls i tac = match solve_by_tac prg obls i tac with | None -> None | Some (t, ty, uctx) -> let obl = obls.(i) in let poly = Internal.get_poly prg in let prg = ProgramDecl.Internal.set_uctx ~uctx prg in let def, obl', _cst = declare_obligation prg obl ~body:t ~types:ty ~uctx in obls.(i) <- obl'; if def && not poly then ( (* Declare the term constraints with the first obligation only *) let uctx_global = UState.from_env (Global.env ()) in let uctx = UState.merge_subst uctx_global (UState.subst uctx) in Some (ProgramDecl.Internal.set_uctx ~uctx prg)) else Some prg let solve_obligation_by_tac prg obls i tac = let obl = obls.(i) in match obl.obl_body with | Some _ -> None | None -> if List.is_empty (deps_remaining obls obl.obl_deps) then solve_and_declare_by_tac prg obls i tac else None let get_unique_prog ~pm prg = match State.get_unique_open_prog pm prg with | Ok prg -> prg | Error [] -> Error.no_obligations None | Error ((id :: _) as ids) -> Error.ambiguous_program id ids let rec solve_obligation prg num tac = let user_num = succ num in let { obls; remaining=rem } = Internal.get_obligations prg in let obl = obls.(num) in let remaining = deps_remaining obls obl.obl_deps in let () = if not (Option.is_empty obl.obl_body) then Error.already_solved user_num; if not (List.is_empty remaining) then Error.depends user_num remaining in let obl = subst_deps_obl obls obl in let scope = Locality.Global Locality.ImportNeedQualified in let kind = kind_of_obligation (snd obl.obl_status) in let evd = Evd.from_ctx (Internal.get_uctx prg) in let evd = Evd.update_sigma_univs (Global.universes ()) evd in let auto ~pm n oblset tac = auto_solve_obligations ~pm n ~oblset tac in let proof_ending = let name = Internal.get_name prg in Proof_ending.End_obligation {name; num; auto} in let using = Internal.get_using prg in let cinfo = CInfo.make ~name:obl.obl_name ~typ:(EConstr.of_constr obl.obl_type) ?using () in let poly = Internal.get_poly prg in let info = Info.make ~scope ~kind ~poly () in let lemma = Proof.start_core ~cinfo ~info ~proof_ending evd in let lemma = fst @@ Proof.by !default_tactic lemma in let lemma = Option.cata (fun tac -> Proof.set_endline_tactic tac lemma) lemma tac in lemma and solve_prg_obligations ~pm prg ?oblset tac = let { obls; remaining } = Internal.get_obligations prg in let rem = ref remaining in let obls' = Array.copy obls in let set = ref Int.Set.empty in let p = match oblset with | None -> (fun _ -> true) | Some s -> set := s; (fun i -> Int.Set.mem i !set) in let prg = Array.fold_left_i (fun i prg x -> if p i then ( match solve_obligation_by_tac prg obls' i tac with | None -> prg | Some prg -> let deps = dependencies obls i in set := Int.Set.union !set deps; decr rem; prg) else prg) prg obls' in update_obls ~pm prg obls' !rem and auto_solve_obligations ~pm n ?oblset tac : State.t * progress = Flags.if_verbose Feedback.msg_info (str "Solving obligations automatically..."); let prg = get_unique_prog ~pm n in solve_prg_obligations ~pm prg ?oblset tac let solve_obligations ~pm n tac = let prg = get_unique_prog ~pm n in solve_prg_obligations ~pm prg tac let solve_all_obligations ~pm tac = State.fold pm ~init:pm ~f:(fun k v pm -> solve_prg_obligations ~pm v tac |> fst) let try_solve_obligation ~pm n prg tac = let prg = get_unique_prog ~pm prg in let {obls; remaining} = Internal.get_obligations prg in let obls' = Array.copy obls in match solve_obligation_by_tac prg obls' n tac with | Some prg' -> let pm, _ = update_obls ~pm prg' obls' (pred remaining) in pm | None -> pm let try_solve_obligations ~pm n tac = solve_obligations ~pm n tac |> fst let obligation (user_num, name, typ) ~pm tac = let num = pred user_num in let prg = get_unique_prog ~pm name in let { obls; remaining } = Internal.get_obligations prg in if num >= 0 && num < Array.length obls then let obl = obls.(num) in match obl.obl_body with | None -> solve_obligation prg num tac | Some r -> Error.already_solved user_num else Error.unknown_obligation num let show_single_obligation i n obls x = let x = subst_deps_obl obls x in let env = Global.env () in let sigma = Evd.from_env env in let msg = str "Obligation" ++ spc () ++ int (succ i) ++ spc () ++ str "of" ++ spc () ++ Id.print n ++ str ":" ++ spc () ++ hov 1 (Printer.pr_constr_env env sigma x.obl_type ++ str "." ++ fnl ()) in Feedback.msg_info msg let show_obligations_of_prg ?(msg = true) prg = let n = Internal.get_name prg in let {obls; remaining} = Internal.get_obligations prg in let showed = ref 5 in if msg then Feedback.msg_info (int remaining ++ str " obligation(s) remaining: "); Array.iteri (fun i x -> match x.obl_body with | None -> if !showed > 0 then begin decr showed; show_single_obligation i n obls x end | Some _ -> ()) obls let show_obligations ~pm ?(msg = true) n = let progs = match n with | None -> State.all pm | Some n -> (match State.find pm n with | Some prg -> [prg] | None -> Error.no_obligations (Some n)) in List.iter (fun x -> show_obligations_of_prg ~msg x) progs let show_term ~pm n = let prg = get_unique_prog ~pm n in ProgramDecl.show prg let msg_generating_obl name obls = let len = Array.length obls in let info = Id.print name ++ str " has type-checked" in Feedback.msg_info (if len = 0 then info ++ str "." else info ++ str ", generating " ++ int len ++ str (String.plural len " obligation")) let add_definition ~pm ~cinfo ~info ?obl_hook ?term ~uctx ?tactic ?(reduce = reduce) ?(opaque = false) obls = let prg = ProgramDecl.make ~info ~cinfo ~body:term ~opaque ~uctx ~reduce ~ntns:[] ~deps:[] ~fixpoint_kind:None ?obl_hook obls in let name = CInfo.get_name cinfo in let {obls;_} = Internal.get_obligations prg in if Int.equal (Array.length obls) 0 then ( Flags.if_verbose (msg_generating_obl name) obls; let pm, cst = Obls_.declare_definition ~pm prg in pm, Defined cst) else let () = Flags.if_verbose (msg_generating_obl name) obls in let pm = State.add pm name prg in let pm, res = auto_solve_obligations ~pm (Some name) tactic in match res with | Remain rem -> Flags.if_verbose (show_obligations ~pm ~msg:false) (Some name); pm, res | _ -> pm, res let add_mutual_definitions l ~pm ~info ?obl_hook ~uctx ?tactic ?(reduce = reduce) ?(opaque = false) ~ntns fixkind = let deps = List.map (fun (ci,_,_) -> CInfo.get_name ci) l in let pm = List.fold_left (fun pm (cinfo, b, obls) -> let prg = ProgramDecl.make ~info ~cinfo ~opaque ~body:(Some b) ~uctx ~deps ~fixpoint_kind:(Some fixkind) ~ntns ~reduce ?obl_hook obls in State.add pm (CInfo.get_name cinfo) prg) pm l in let pm, _defined = List.fold_left (fun (pm, finished) x -> if finished then (pm, finished) else let pm, res = auto_solve_obligations ~pm (Some x) tactic in match res with | Defined _ -> (* If one definition is turned into a constant, the whole block is defined. *) (pm, true) | _ -> (pm, false)) (pm, false) deps in pm let rec admit_prog ~pm prg = let {obls} = Internal.get_obligations prg in let is_open _ x = Option.is_empty x.obl_body && List.is_empty (deps_remaining obls x.obl_deps) in let i = match Array.findi is_open obls with | Some i -> i | None -> CErrors.anomaly (Pp.str "Could not find a solvable obligation.") in let proof = solve_obligation prg i None in let pm = Proof.save_admitted ~pm ~proof in match ProgMap.find_opt (Internal.get_name prg) pm with | Some prg -> admit_prog ~pm (CEphemeron.get prg) | None -> pm let rec admit_all_obligations ~pm = let prg = State.first_pending pm in match prg with | None -> pm | Some prg -> let pm = admit_prog ~pm prg in admit_all_obligations ~pm let admit_obligations ~pm n = match n with | None -> admit_all_obligations ~pm | Some _ -> let prg = get_unique_prog ~pm n in let pm = admit_prog ~pm prg in pm let next_obligation ~pm n tac = let prg = match n with | None -> begin match State.first_pending pm with | Some prg -> prg | None -> Error.no_obligations None end | Some _ -> get_unique_prog ~pm n in let {obls; remaining} = Internal.get_obligations prg in let is_open _ x = Option.is_empty x.obl_body && List.is_empty (deps_remaining obls x.obl_deps) in let i = match Array.findi is_open obls with | Some i -> i | None -> CErrors.anomaly (Pp.str "Could not find a solvable obligation.") in solve_obligation prg i tac let check_program_libraries () = Coqlib.check_required_library Coqlib.datatypes_module_name; Coqlib.check_required_library ["Coq";"Init";"Specif"]; Coqlib.check_required_library ["Coq";"Program";"Tactics"] (* aliases *) let prepare_obligation = prepare_obligation let check_solved_obligations = Obls_.check_solved_obligations type fixpoint_kind = Obls_.fixpoint_kind = | IsFixpoint of lident option list | IsCoFixpoint type nonrec progress = progress = | Remain of int | Dependent | Defined of GlobRef.t end module OblState = Obls_.State let declare_constant ?local ~name ~kind ?typing_flags = declare_constant ?local ~name ~kind ~typing_flags let declare_entry ~name ~scope ~kind = declare_entry ~name ~scope ~kind ~typing_flags:None coq-8.15.0/vernac/declare.mli000066400000000000000000000460411417001151100157430ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a -> 'a) -> 'a g val make : (S.t -> unit) -> t val call : ?hook:t -> S.t -> unit end (** {2 One-go, non-interactive declaration API } *) (** Information for a single top-level named constant *) module CInfo : sig type 'constr t val make : name : Id.t -> typ:'constr -> ?args:Name.t list -> ?impargs:Impargs.manual_implicits -> ?using:Proof_using.t -> unit -> 'constr t (* Used only in Vernacentries, may disappear from public API *) val to_constr : Evd.evar_map -> EConstr.t t -> Constr.t t (* Used only in RecLemmas, may disappear from public API *) val get_typ : 'constr t -> 'constr end (** Information for a declaration, interactive or not, includes parameters shared by mutual constants *) module Info : sig type t (** Note that [opaque] doesn't appear here as it is not known at the start of the proof in the interactive case. *) val make : ?poly:bool -> ?inline : bool -> ?kind : Decls.logical_kind (** Theorem, etc... *) -> ?udecl : UState.universe_decl -> ?scope : Locality.locality (** locality *) -> ?hook : Hook.t (** Callback to be executed after saving the constant *) -> ?typing_flags:Declarations.typing_flags -> unit -> t end (** Declares a non-interactive constant; [body] and [types] will be normalized w.r.t. the passed [evar_map] [sigma]. Universes should be handled properly, including minimization and restriction. Note that [sigma] is checked for unresolved evars, thus you should be careful not to submit open terms *) val declare_definition : info:Info.t -> cinfo:EConstr.t option CInfo.t -> opaque:bool -> body:EConstr.t -> Evd.evar_map -> GlobRef.t type lemma_possible_guards = int list list val declare_mutually_recursive : info:Info.t -> cinfo: Constr.t CInfo.t list -> opaque:bool -> ntns:Metasyntax.where_decl_notation list -> uctx:UState.t -> rec_declaration:Constr.rec_declaration -> possible_indexes:lemma_possible_guards option -> Names.GlobRef.t list (** {2 Declaration of interactive constants } *) (** [save] / [save_admitted] can update obligations state, so we need to expose the state here *) module OblState : sig type t val empty : t end (** [Declare.Proof.t] Construction of constants using interactive proofs. *) module Proof : sig type t (** [start_proof ~info ~cinfo sigma] starts a proof of [cinfo]. The proof is started in the evar map [sigma] (which can typically contain universe constraints) *) val start : info:Info.t -> cinfo:EConstr.t CInfo.t -> Evd.evar_map -> t (** [start_{derive,equations}] are functions meant to handle interactive proofs with multiple goals, they should be considered experimental until we provide a more general API encompassing both of them. Please, get in touch with the developers if you would like to experiment with multi-goal dependent proofs so we can use your input on the design of the new API. *) val start_derive : f:Id.t -> name:Id.t -> info:Info.t -> Proofview.telescope -> t val start_equations : name:Id.t -> info:Info.t -> hook:(pm:OblState.t -> Constant.t list -> Evd.evar_map -> OblState.t) -> types:(Environ.env * Evar.t * Evd.evar_info * EConstr.named_context * Evd.econstr) list -> Evd.evar_map -> Proofview.telescope -> t (** Pretty much internal, used by the Lemma vernaculars *) val start_with_initialization : info:Info.t -> cinfo:Constr.t CInfo.t -> Evd.evar_map -> t type mutual_info = (bool * lemma_possible_guards * Constr.t option list option) (** Pretty much internal, used by mutual Lemma / Fixpoint vernaculars *) val start_mutual_with_initialization : info:Info.t -> cinfo:Constr.t CInfo.t list -> mutual_info:mutual_info -> Evd.evar_map -> int list option -> t (** Qed a proof *) val save : pm:OblState.t -> proof:t -> opaque:Vernacexpr.opacity_flag -> idopt:Names.lident option -> OblState.t * GlobRef.t list (** For proofs known to have [Regular] ending, no need to touch program state. *) val save_regular : proof:t -> opaque:Vernacexpr.opacity_flag -> idopt:Names.lident option -> GlobRef.t list (** Admit a proof *) val save_admitted : pm:OblState.t -> proof:t -> OblState.t (** [by tac] applies tactic [tac] to the 1st subgoal of the current focused proof. Returns [false] if an unsafe tactic has been used. *) val by : unit Proofview.tactic -> t -> t * bool (** Operations on ongoing proofs *) val get : t -> Proof.t val get_name : t -> Names.Id.t val fold : f:(Proof.t -> 'a) -> t -> 'a val map : f:(Proof.t -> Proof.t) -> t -> t val map_fold : f:(Proof.t -> Proof.t * 'a) -> t -> t * 'a val map_fold_endline : f:(unit Proofview.tactic -> Proof.t -> Proof.t * 'a) -> t -> t * 'a (** Sets the tactic to be used when a tactic line is closed with [...] *) val set_endline_tactic : Genarg.glob_generic_argument -> t -> t (** Sets the section variables assumed by the proof, returns its closure * (w.r.t. type dependencies and let-ins covered by it) *) val set_used_variables : t -> using:Proof_using.t -> Constr.named_context * t (** Gets the set of variables declared to be used by the proof. None means no "Proof using" or #[using] was given *) val get_used_variables : t -> Id.Set.t option (** Compacts the representation of the proof by pruning all intermediate terms *) val compact : t -> t (** Update the proof's universe information typically after a side-effecting command (e.g. a sublemma definition) has been run inside it. *) val update_sigma_univs : UGraph.t -> t -> t val get_open_goals : t -> int (** Helpers to obtain proof state when in an interactive proof *) (** [get_goal_context n] returns the context of the [n]th subgoal of the current focused proof or raises a [UserError] if there is no focused proof or if there is no more subgoals *) val get_goal_context : t -> int -> Evd.evar_map * Environ.env (** [get_current_goal_context ()] works as [get_goal_context 1] *) val get_current_goal_context : t -> Evd.evar_map * Environ.env (** [get_current_context ()] returns the context of the current focused goal. If there is no focused goal but there is a proof in progress, it returns the corresponding evar_map. If there is no pending proof then it returns the current global environment and empty evar_map. *) val get_current_context : t -> Evd.evar_map * Environ.env (** {2 Proof delay API, warning, internal, not stable *) (* Intermediate step necessary to delegate the future. * Both access the current proof state. The former is supposed to be * chained with a computation that completed the proof *) type closed_proof_output (** Requires a complete proof. *) val return_proof : t -> closed_proof_output (** An incomplete proof is allowed (no error), and a warn is given if the proof is complete. *) val return_partial_proof : t -> closed_proof_output (** XXX: This is an internal, low-level API and could become scheduled for removal from the public API, use higher-level declare APIs instead *) type proof_object val close_proof : opaque:Vernacexpr.opacity_flag -> keep_body_ucst_separate:bool -> t -> proof_object val close_future_proof : feedback_id:Stateid.t -> t -> closed_proof_output Future.computation -> proof_object (** Special cases for delayed proofs, in this case we must provide the proof information so the proof won't be forced. *) val save_lemma_admitted_delayed : pm:OblState.t -> proof:proof_object -> OblState.t val save_lemma_proved_delayed : pm:OblState.t -> proof:proof_object -> idopt:Names.lident option -> OblState.t * GlobRef.t list (** Used by the STM only to store info, should go away *) val get_po_name : proof_object -> Id.t end (** {2 low-level, internal API, avoid using unless you have special needs } *) (** Proof entries represent a proof that has been finished, but still not registered with the kernel. XXX: This is an internal, low-level API and could become scheduled for removal from the public API, use higher-level declare APIs instead *) type proof_entry type parameter_entry type primitive_entry val definition_entry : ?opaque:bool -> ?using:Names.Id.Set.t -> ?inline:bool -> ?types:Constr.types -> ?univs:UState.named_universes_entry -> Constr.constr -> proof_entry val parameter_entry : ?inline:int -> ?univs:UState.named_universes_entry -> Constr.constr -> parameter_entry val primitive_entry : ?types:(Constr.types * UState.named_universes_entry) -> CPrimitives.op_or_type -> primitive_entry (** XXX: This is an internal, low-level API and could become scheduled for removal from the public API, use higher-level declare APIs instead *) val declare_entry : name:Id.t -> scope:Locality.locality -> kind:Decls.logical_kind -> ?hook:Hook.t -> impargs:Impargs.manual_implicits -> uctx:UState.t -> proof_entry -> GlobRef.t (** Declaration of local constructions (Variable/Hypothesis/Local) *) val declare_variable : name:variable -> kind:Decls.logical_kind -> typ:Constr.types -> impl:Glob_term.binding_kind -> univs:UState.named_universes_entry -> unit (** Declaration of global constructions i.e. Definition/Theorem/Axiom/Parameter/... XXX: This is an internal, low-level API and could become scheduled for removal from the public API, use higher-level declare APIs instead *) type constant_entry = | DefinitionEntry of proof_entry | ParameterEntry of parameter_entry | PrimitiveEntry of primitive_entry val prepare_parameter : poly:bool -> udecl:UState.universe_decl -> types:EConstr.types -> Evd.evar_map -> Evd.evar_map * parameter_entry (** [declare_constant id cd] declares a global declaration (constant/parameter) with name [id] in the current section; it returns the full path of the declaration XXX: This is an internal, low-level API and could become scheduled for removal from the public API, use higher-level declare APIs instead *) val declare_constant : ?local:Locality.import_status -> name:Id.t -> kind:Decls.logical_kind -> ?typing_flags:Declarations.typing_flags -> constant_entry -> Constant.t (** Declaration messages, for internal use *) (** XXX: Scheduled for removal from public API, do not use *) val definition_message : Id.t -> unit val assumption_message : Id.t -> unit val fixpoint_message : int array option -> Id.t list -> unit val check_exists : Id.t -> unit (** Semantics of this function is a bit dubious, use with care *) val build_by_tactic : ?side_eff:bool -> Environ.env -> uctx:UState.t -> poly:bool -> typ:EConstr.types -> unit Proofview.tactic -> Constr.constr * Constr.types option * (UState.named_universes_entry) * bool * UState.t (** {2 Program mode API} *) (** Coq's Program mode support. This mode extends declarations of constants and fixpoints with [Program Definition] and [Program Fixpoint] to support incremental construction of terms using delayed proofs, called "obligations" The mode also provides facilities for managing and auto-solving sets of obligations. The basic code flow of programs/obligations is as follows: - [add_definition] / [add_mutual_definitions] are called from the respective [Program] vernacular command interpretation; at this point the only extra work we do is to prepare the new definition [d] using [RetrieveObl], which consists in turning unsolved evars into obligations. [d] is not sent to the kernel yet, as it is not complete and cannot be typchecked, but saved in a special data-structure. Auto-solving of obligations is tried at this stage (see below) - [next_obligation] will retrieve the next obligation ([RetrieveObl] sorts them by topological order) and will try to solve it. When all obligations are solved, the original constant [d] is grounded and sent to the kernel for addition to the global environment. Auto-solving of obligations is also triggered on obligation completion. {2} Solving of obligations: Solved obligations are stored as regular global declarations in the global environment, usually with name [constant_obligation_number] where [constant] is the original [constant] and [number] is the corresponding (internal) number. Solving an obligation can trigger a bit of a complex cascaded callback path; closing an obligation can indeed allow all other obligations to be closed, which in turn may trigged the declaration of the original constant. Care must be taken, as this can modify [Global.env] in arbitrarily ways. Current code takes some care to refresh the [env] in the proper boundaries, but the invariants remain delicate. {2} Saving of obligations: as open obligations use the regular proof mode, a `Qed` will call `Lemmas.save_lemma` first. For this reason obligations code is split in two: this file, [Obligations], taking care of the top-level vernac commands, and [Declare], which is called by `Lemmas` to close an obligation proof and eventually to declare the top-level [Program]ed constant. *) module Obls : sig type fixpoint_kind = IsFixpoint of lident option list | IsCoFixpoint (** Check obligations are properly solved before closing the [what_for] section / module *) val check_solved_obligations : pm:OblState.t -> what_for:Pp.t -> unit val default_tactic : unit Proofview.tactic ref (** Resolution status of a program *) type progress = | Remain of int (** n obligations remaining *) | Dependent (** Dependent on other definitions *) | Defined of GlobRef.t (** Defined as id *) (** Prepare API, to be removed once we provide the corresponding 1-step API *) val prepare_obligation : name:Id.t -> types:EConstr.t option -> body:EConstr.t -> Evd.evar_map -> Constr.constr * Constr.types * UState.t * RetrieveObl.obligation_info (** Start a [Program Definition c] proof. [uctx] [udecl] [impargs] [kind] [scope] [poly] etc... come from the interpretation of the vernacular; `obligation_info` was generated by [RetrieveObl] It will return whether all the obligations were solved; if so, it will also register [c] with the kernel. *) val add_definition : pm:OblState.t -> cinfo:Constr.types CInfo.t -> info:Info.t -> ?obl_hook: OblState.t Hook.g -> ?term:Constr.t -> uctx:UState.t -> ?tactic:unit Proofview.tactic -> ?reduce:(Constr.t -> Constr.t) -> ?opaque:bool -> RetrieveObl.obligation_info -> OblState.t * progress (* XXX: unify with MutualEntry *) (** Start a [Program Fixpoint] declaration, similar to the above, except it takes a list now. *) val add_mutual_definitions : (Constr.t CInfo.t * Constr.t * RetrieveObl.obligation_info) list -> pm:OblState.t -> info:Info.t -> ?obl_hook: OblState.t Hook.g -> uctx:UState.t -> ?tactic:unit Proofview.tactic -> ?reduce:(Constr.t -> Constr.t) -> ?opaque:bool -> ntns:Metasyntax.where_decl_notation list -> fixpoint_kind -> OblState.t (** Implementation of the [Obligation] command *) val obligation : int * Names.Id.t option * Constrexpr.constr_expr option -> pm:OblState.t -> Genarg.glob_generic_argument option -> Proof.t (** Implementation of the [Next Obligation] command *) val next_obligation : pm:OblState.t -> Names.Id.t option -> Genarg.glob_generic_argument option -> Proof.t (** Implementation of the [Solve Obligation] command *) val solve_obligations : pm:OblState.t -> Names.Id.t option -> unit Proofview.tactic option -> OblState.t * progress val solve_all_obligations : pm:OblState.t -> unit Proofview.tactic option -> OblState.t (** Number of remaining obligations to be solved for this program *) val try_solve_obligation : pm:OblState.t -> int -> Names.Id.t option -> unit Proofview.tactic option -> OblState.t val try_solve_obligations : pm:OblState.t -> Names.Id.t option -> unit Proofview.tactic option -> OblState.t val show_obligations : pm:OblState.t -> ?msg:bool -> Names.Id.t option -> unit val show_term : pm:OblState.t -> Names.Id.t option -> Pp.t val admit_obligations : pm:OblState.t -> Names.Id.t option -> OblState.t val check_program_libraries : unit -> unit end val is_local_constant : Constant.t -> bool (** {6 For internal support, do not use} *) module Internal : sig (* Liboject exports *) module Constant : sig type t val tag : t Libobject.Dyn.tag val kind : t -> Decls.logical_kind end val objVariable : unit Libobject.Dyn.tag end coq-8.15.0/vernac/declareInd.ml000066400000000000000000000173161417001151100162300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Notation.declare_ref_arguments_scope Evd.empty (GlobRef.IndRef (kn,i)); for j=1 to List.length lc do Notation.declare_ref_arguments_scope Evd.empty (GlobRef.ConstructRef ((kn,i),j)); done) mie.mind_entry_inds type inductive_obj = { ind_names : (Id.t * Id.t list) list (* For each block, name of the type + name of constructors *) } let inductive_names sp kn obj = let (dp,_) = Libnames.repr_path sp in let kn = Global.mind_of_delta_kn kn in let names, _ = List.fold_left (fun (names, n) (typename, consnames) -> let ind_p = (kn,n) in let names, _ = List.fold_left (fun (names, p) l -> let sp = Libnames.make_path dp l in ((sp, GlobRef.ConstructRef (ind_p,p)) :: names, p+1)) (names, 1) consnames in let sp = Libnames.make_path dp typename in ((sp, GlobRef.IndRef ind_p) :: names, n+1)) ([], 0) obj.ind_names in names let load_inductive i ((sp, kn), names) = let names = inductive_names sp kn names in List.iter (fun (sp, ref) -> Nametab.push (Nametab.Until i) sp ref ) names let open_inductive i ((sp, kn), names) = let names = inductive_names sp kn names in List.iter (fun (sp, ref) -> Nametab.push (Nametab.Exactly i) sp ref) names let cache_inductive ((sp, kn), names) = let names = inductive_names sp kn names in List.iter (fun (sp, ref) -> Nametab.push (Nametab.Until 1) sp ref) names let discharge_inductive ((sp, kn), names) = Some names let objInductive : inductive_obj Libobject.Dyn.tag = let open Libobject in declare_object_full {(default_object "INDUCTIVE") with cache_function = cache_inductive; load_function = load_inductive; open_function = simple_open open_inductive; classify_function = (fun a -> Substitute a); subst_function = ident_subst_function; discharge_function = discharge_inductive; } let inInductive v = Libobject.Dyn.Easy.inj v objInductive let cache_prim (_,(p,c)) = Structures.PrimitiveProjections.register p c let load_prim _ p = cache_prim p let subst_prim (subst,(p,c)) = Mod_subst.subst_proj_repr subst p, Mod_subst.subst_constant subst c let discharge_prim (_,(p,c)) = Some (Lib.discharge_proj_repr p, c) let inPrim : (Projection.Repr.t * Constant.t) -> Libobject.obj = let open Libobject in declare_object { (default_object "PRIMPROJS") with cache_function = cache_prim ; load_function = load_prim; subst_function = subst_prim; classify_function = (fun x -> Substitute x); discharge_function = discharge_prim } let declare_primitive_projection p c = Lib.add_anonymous_leaf (inPrim (p,c)) let feedback_axiom () = Feedback.(feedback AddedAxiom) let is_unsafe_typing_flags () = let open Declarations in let flags = Environ.typing_flags (Global.env()) in not (flags.check_universes && flags.check_guarded && flags.check_positive) (* for initial declaration *) let declare_mind ?typing_flags mie = let id = match mie.mind_entry_inds with | ind::_ -> ind.mind_entry_typename | [] -> CErrors.anomaly (Pp.str "cannot declare an empty list of inductives.") in let map_names mip = (mip.mind_entry_typename, mip.mind_entry_consnames) in let names = List.map map_names mie.mind_entry_inds in List.iter (fun (typ, cons) -> Declare.check_exists typ; List.iter Declare.check_exists cons) names; let _kn' = Global.add_mind ?typing_flags id mie in let (sp,kn as oname) = Lib.add_leaf id (inInductive { ind_names = names }) in if is_unsafe_typing_flags() then feedback_axiom (); let mind = Global.mind_of_delta_kn kn in let isprim = Inductive.is_primitive_record (Inductive.lookup_mind_specif (Global.env()) (mind,0)) in Impargs.declare_mib_implicits mind; declare_inductive_argument_scopes mind mie; oname, isprim let is_recursive mie = let open Constr in let rec is_recursive_constructor lift typ = match Constr.kind typ with | Prod (_,arg,rest) -> not (EConstr.Vars.noccurn Evd.empty (* FIXME *) lift (EConstr.of_constr arg)) || is_recursive_constructor (lift+1) rest | LetIn (na,b,t,rest) -> is_recursive_constructor (lift+1) rest | _ -> false in match mie.mind_entry_inds with | [ind] -> let nparams = List.length mie.mind_entry_params in List.exists (fun t -> is_recursive_constructor (nparams+1) t) ind.mind_entry_lc | _ -> false let warn_non_primitive_record = CWarnings.create ~name:"non-primitive-record" ~category:"record" (fun indsp -> Pp.(hov 0 (str "The record " ++ Nametab.pr_global_env Id.Set.empty (GlobRef.IndRef indsp) ++ strbrk" could not be defined as a primitive record"))) let minductive_message = function | [] -> CErrors.user_err Pp.(str "No inductive definition.") | [x] -> Pp.(Id.print x ++ str " is defined") | l -> Pp.(hov 0 (prlist_with_sep pr_comma Id.print l ++ spc () ++ str "are defined")) type one_inductive_impls = Impargs.manual_implicits (* for inds *) * Impargs.manual_implicits list (* for constrs *) let declare_mutual_inductive_with_eliminations ?(primitive_expected=false) ?typing_flags mie ubinders impls = (* spiwack: raises an error if the structure is supposed to be non-recursive, but isn't *) begin match mie.mind_entry_finite with | Declarations.BiFinite when is_recursive mie -> if Option.has_some mie.mind_entry_record then CErrors.user_err Pp.(str "Records declared with the keywords Record or Structure cannot be recursive. You can, however, define recursive records using the Inductive or CoInductive command.") else CErrors.user_err Pp.(str ("Types declared with the keyword Variant cannot be recursive. Recursive types are defined with the Inductive and CoInductive command.")) | _ -> () end; let names = List.map (fun e -> e.mind_entry_typename) mie.mind_entry_inds in let (_, kn), prim = declare_mind ?typing_flags mie in let mind = Global.mind_of_delta_kn kn in let is_template = match mie.mind_entry_universes with Template_ind_entry _ -> true | _ -> false in if primitive_expected && not prim then warn_non_primitive_record (mind,0); DeclareUniv.declare_univ_binders (GlobRef.IndRef (mind,0)) ubinders; List.iteri (fun i (indimpls, constrimpls) -> let ind = (mind,i) in let gr = GlobRef.IndRef ind in Impargs.maybe_declare_manual_implicits false gr indimpls; List.iteri (fun j impls -> Impargs.maybe_declare_manual_implicits false (GlobRef.ConstructRef (ind, succ j)) impls) constrimpls) impls; Flags.if_verbose Feedback.msg_info (minductive_message names); if is_template then List.iteri (fun i _ -> Equality.set_keep_equality (mind, i) true) mie.mind_entry_inds; if mie.mind_entry_private == None then Indschemes.declare_default_schemes mind; mind module Internal = struct type nonrec inductive_obj = inductive_obj let objInductive = objInductive end coq-8.15.0/vernac/declareInd.mli000066400000000000000000000025211417001151100163710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ?typing_flags:Declarations.typing_flags -> Entries.mutual_inductive_entry -> UState.named_universes_entry -> one_inductive_impls list -> Names.MutInd.t (** {6 For legacy support, do not use} *) module Internal : sig type inductive_obj val objInductive : inductive_obj Libobject.Dyn.tag end val declare_primitive_projection : Names.Projection.Repr.t -> Names.Constant.t -> unit coq-8.15.0/vernac/declareUctx.ml000066400000000000000000000030011417001151100164230ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* (* Having Prop/Set/Var as section universes makes no sense *) assert false | Some na -> try let qid = Nametab.shortest_qualid_of_universe Names.Id.Map.empty na in Names.Name (Libnames.qualid_basename qid) with Not_found -> (* Best-effort naming from the string representation of the level. See univNames.ml for a similar hack. *) Names.Name (Names.Id.of_string_soft (Univ.Level.to_string lvl)) in Array.map map (Univ.Instance.to_array inst) let declare_universe_context ~poly ctx = if poly then let uctx = Univ.ContextSet.to_context name_instance ctx in Global.push_section_context uctx else Global.push_context_set ~strict:true ctx coq-8.15.0/vernac/declareUctx.mli000066400000000000000000000013521417001151100166030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Univ.ContextSet.t -> unit coq-8.15.0/vernac/declareUniv.ml000066400000000000000000000127341417001151100164360ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Some Pp.(seq [ Pp.pr_opt_no_spc (fun s -> str s ++ spc ()) kind ; Id.print id; str " already exists."]) | _ -> None) type universe_source = | BoundUniv (* polymorphic universe, bound in a function (this will go away someday) *) | QualifiedUniv of Id.t (* global universe introduced by some global value *) | UnqualifiedUniv (* other global universe *) type universe_name_decl = universe_source * (Id.t * Univ.Level.UGlobal.t) list let check_exists_universe sp = if Nametab.exists_universe sp then raise (AlreadyDeclared (Some "Universe", Libnames.basename sp)) else () let qualify_univ i dp src id = match src with | BoundUniv | UnqualifiedUniv -> i, Libnames.make_path dp id | QualifiedUniv l -> let dp = DirPath.repr dp in Nametab.map_visibility succ i, Libnames.make_path (DirPath.make (l::dp)) id let do_univ_name ~check i dp src (id,univ) = let i, sp = qualify_univ i dp src id in if check then check_exists_universe sp; Nametab.push_universe i sp univ let cache_univ_names ((sp, _), (src, univs)) = let depth = Lib.sections_depth () in let dp = Libnames.pop_dirpath_n depth (Libnames.dirpath sp) in List.iter (do_univ_name ~check:true (Nametab.Until 1) dp src) univs let load_univ_names i ((sp, _), (src, univs)) = List.iter (do_univ_name ~check:false (Nametab.Until i) (Libnames.dirpath sp) src) univs let open_univ_names i ((sp, _), (src, univs)) = List.iter (do_univ_name ~check:false (Nametab.Exactly i) (Libnames.dirpath sp) src) univs let discharge_univ_names = function | _, (BoundUniv, _) -> None | _, ((QualifiedUniv _ | UnqualifiedUniv), _ as x) -> Some x let input_univ_names : universe_name_decl -> Libobject.obj = let open Libobject in declare_object { (default_object "Global universe name state") with cache_function = cache_univ_names; load_function = load_univ_names; open_function = simple_open open_univ_names; discharge_function = discharge_univ_names; subst_function = (fun (subst, a) -> (* Actually the name is generated once and for all. *) a); classify_function = (fun a -> Substitute a) } let input_univ_names (src, l) = if CList.is_empty l then () else Lib.add_anonymous_leaf (input_univ_names (src, l)) let invent_name (named,cnt) u = let rec aux i = let na = Id.of_string ("u"^(string_of_int i)) in if Id.Map.mem na named then aux (i+1) else na, (Id.Map.add na u named, i+1) in aux cnt let label_of = let open GlobRef in function | ConstRef c -> Label.to_id @@ Constant.label c | IndRef (c,_) -> Label.to_id @@ MutInd.label c | VarRef id -> id | ConstructRef _ -> CErrors.anomaly ~label:"declare_univ_binders" Pp.(str "declare_univ_binders on a constructor reference") let declare_univ_binders gr (univs, pl) = let l = label_of gr in match univs with | UState.Polymorphic_entry _ -> () | UState.Monomorphic_entry (levels, _) -> (* First the explicitly named universes *) let named, univs = Id.Map.fold (fun id univ (named,univs) -> let univs = match Univ.Level.name univ with | None -> assert false (* having Prop/Set/Var as binders is nonsense *) | Some univ -> (id,univ)::univs in let named = Level.Set.add univ named in named, univs) pl (Level.Set.empty,[]) in (* then invent names for the rest *) let _, univs = Level.Set.fold (fun univ (aux,univs) -> let id, aux = invent_name aux univ in let univ = Option.get (Level.name univ) in aux, (id,univ) :: univs) (Level.Set.diff levels named) ((pl,0),univs) in input_univ_names (QualifiedUniv l, univs) let do_universe ~poly l = let in_section = Global.sections_are_opened () in let () = if poly && not in_section then CErrors.user_err (Pp.str"Cannot declare polymorphic universes outside sections.") in let l = List.map (fun {CAst.v=id} -> (id, UnivGen.new_univ_global ())) l in let ctx = List.fold_left (fun ctx (_,qid) -> Univ.Level.Set.add (Univ.Level.make qid) ctx) Univ.Level.Set.empty l, Univ.Constraints.empty in let src = if poly then BoundUniv else UnqualifiedUniv in let () = input_univ_names (src, l) in DeclareUctx.declare_universe_context ~poly ctx let do_constraint ~poly l = let open Univ in let evd = Evd.from_env (Global.env ()) in let u_of_id x = Constrintern.interp_known_level evd x in let constraints = List.fold_left (fun acc (l, d, r) -> let lu = u_of_id l and ru = u_of_id r in Constraints.add (lu, d, ru) acc) Constraints.empty l in let uctx = ContextSet.add_constraints constraints ContextSet.empty in DeclareUctx.declare_universe_context ~poly uctx coq-8.15.0/vernac/declareUniv.mli000066400000000000000000000023361417001151100166040ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* UState.named_universes_entry -> unit (** Command [Universes]. *) val do_universe : poly:bool -> lident list -> unit (** Command [Constraint]. *) val do_constraint : poly:bool -> Constrexpr.univ_constraint_expr list -> unit coq-8.15.0/vernac/declaremods.ml000066400000000000000000001126601417001151100164560ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* None | InlineAt i -> Some i | DefaultInline -> default_inline () (** ModSubstObjs : a cache of module substitutive objects This table is common to modules and module types. - For a Module M:=N, the objects of N will be reloaded with M after substitution. - For a Module M:SIG:=..., the module M gets its objects from SIG Invariants: - A alias (i.e. a module path inside a Ref constructor) should never lead to another alias, but rather to a concrete Objs constructor. We will plug later a handler dealing with missing entries in the cache. Such missing entries may come from inner parts of module types, which aren't registered by the standard libobject machinery. *) module ModSubstObjs : sig val set : ModPath.t -> substitutive_objects -> unit val get : ModPath.t -> substitutive_objects val set_missing_handler : (ModPath.t -> substitutive_objects) -> unit end = struct let table = Summary.ref (MPmap.empty : substitutive_objects MPmap.t) ~name:"MODULE-SUBSTOBJS" let missing_handler = ref (fun mp -> assert false) let set_missing_handler f = (missing_handler := f) let set mp objs = (table := MPmap.add mp objs !table) let get mp = try MPmap.find mp !table with Not_found -> !missing_handler mp end (** Some utilities about substitutive objects : substitution, expansion *) let sobjs_no_functor (mbids,_) = List.is_empty mbids let subst_filtered sub (f,mp as x) = let mp' = subst_mp sub mp in if mp == mp' then x else f, mp' let rec subst_aobjs sub = function | Objs o as objs -> let o' = subst_objects sub o in if o == o' then objs else Objs o' | Ref (mp, sub0) as r -> let sub0' = join sub0 sub in if sub0' == sub0 then r else Ref (mp, sub0') and subst_sobjs sub (mbids,aobjs as sobjs) = let aobjs' = subst_aobjs sub aobjs in if aobjs' == aobjs then sobjs else (mbids, aobjs') and subst_objects subst seg = let subst_one (id,obj as node) = match obj with | AtomicObject obj -> let obj' = Libobject.subst_object (subst,obj) in if obj' == obj then node else (id, AtomicObject obj') | ModuleObject sobjs -> let sobjs' = subst_sobjs subst sobjs in if sobjs' == sobjs then node else (id, ModuleObject sobjs') | ModuleTypeObject sobjs -> let sobjs' = subst_sobjs subst sobjs in if sobjs' == sobjs then node else (id, ModuleTypeObject sobjs') | IncludeObject aobjs -> let aobjs' = subst_aobjs subst aobjs in if aobjs' == aobjs then node else (id, IncludeObject aobjs') | ExportObject { mpl } -> let mpl' = List.Smart.map (subst_filtered subst) mpl in if mpl'==mpl then node else (id, ExportObject { mpl = mpl' }) | KeepObject _ -> assert false in List.Smart.map subst_one seg let expand_aobjs = function | Objs o -> o | Ref (mp, sub) -> match ModSubstObjs.get mp with | (_,Objs o) -> subst_objects sub o | _ -> assert false (* Invariant : any alias points to concrete objs *) let expand_sobjs (_,aobjs) = expand_aobjs aobjs (** {6 ModObjs : a cache of module objects} For each module, we also store a cache of "prefix", "substituted objects", "keep objects". This is used for instance to implement the "Import" command. substituted objects : roughly the objects above after the substitution - we need to keep them to call open_object when the module is opened (imported) keep objects : The list of non-substitutive objects - as above, for each of them we will call open_object when the module is opened (Some) Invariants: * If the module is a functor, it won't appear in this cache. * Module objects in substitutive_objects part have empty substituted objects. * Modules which where created with Module M:=mexpr or with Module M:SIG. ... End M. have the keep list empty. *) type module_objects = { module_prefix : Nametab.object_prefix; module_substituted_objects : Lib.lib_objects; module_keep_objects : Lib.lib_objects; } module ModObjs : sig val set : ModPath.t -> module_objects -> unit val get : ModPath.t -> module_objects (* may raise Not_found *) val all : unit -> module_objects MPmap.t end = struct let table = Summary.ref (MPmap.empty : module_objects MPmap.t) ~name:"MODULE-OBJS" let set mp objs = (table := MPmap.add mp objs !table) let get mp = MPmap.find mp !table let all () = !table end (** {6 Name management} Auxiliary functions to transform full_path and kernel_name given by Lib into ModPath.t and DirPath.t needed for modules *) let mp_of_kn kn = let mp,l = KerName.repr kn in MPdot (mp,l) let dir_of_sp sp = let dir,id = repr_path sp in add_dirpath_suffix dir id (** {6 Declaration of module substitutive objects} *) (** These functions register the visibility of the module and iterates through its components. They are called by plenty of module functions *) let consistency_checks exists dir = if exists then let _ = try Nametab.locate_module (qualid_of_dirpath dir) with Not_found -> user_err (DirPath.print dir ++ str " should already exist!") in () else if Nametab.exists_module dir then user_err (DirPath.print dir ++ str " already exists.") let compute_visibility exists i = if exists then Nametab.Exactly i else Nametab.Until i (** Iterate some function [iter_objects] on all components of a module *) let do_module exists iter_objects i obj_dir obj_mp sobjs kobjs = let prefix = Nametab.{ obj_dir ; obj_mp; } in consistency_checks exists obj_dir; Nametab.push_module (compute_visibility exists i) obj_dir obj_mp; ModSubstObjs.set obj_mp sobjs; (* If we're not a functor, let's iter on the internal components *) if sobjs_no_functor sobjs then begin let objs = expand_sobjs sobjs in let module_objects = { module_prefix = prefix; module_substituted_objects = objs; module_keep_objects = kobjs; } in ModObjs.set obj_mp module_objects; iter_objects (i+1) prefix objs; iter_objects (i+1) prefix kobjs end let do_module' exists iter_objects i ((sp,kn),sobjs) = do_module exists iter_objects i (dir_of_sp sp) (mp_of_kn kn) sobjs [] (** Nota: Interactive modules and module types cannot be recached! This used to be checked here via a flag along the substobjs. *) (** {6 Declaration of module type substitutive objects} *) (** Nota: Interactive modules and module types cannot be recached! This used to be checked more properly here. *) let load_modtype i sp mp sobjs = if Nametab.exists_modtype sp then anomaly (pr_path sp ++ str " already exists."); Nametab.push_modtype (Nametab.Until i) sp mp; ModSubstObjs.set mp sobjs (** {6 Declaration of substitutive objects for Include} *) let rec load_object i (name, obj) = match obj with | AtomicObject o -> Libobject.load_object i (name, o) | ModuleObject sobjs -> do_module' false load_objects i (name, sobjs) | ModuleTypeObject sobjs -> let (sp,kn) = name in load_modtype i sp (mp_of_kn kn) sobjs | IncludeObject aobjs -> load_include i (name, aobjs) | ExportObject _ -> () | KeepObject objs -> load_keep i (name, objs) and load_objects i prefix objs = List.iter (fun (id, obj) -> load_object i (Lib.make_oname prefix id, obj)) objs and load_include i ((sp,kn), aobjs) = let obj_dir = Libnames.dirpath sp in let obj_mp = KerName.modpath kn in let prefix = Nametab.{ obj_dir; obj_mp; } in let o = expand_aobjs aobjs in load_objects i prefix o and load_keep i ((sp,kn),kobjs) = (* Invariant : seg isn't empty *) let obj_dir = dir_of_sp sp and obj_mp = mp_of_kn kn in let prefix = Nametab.{ obj_dir ; obj_mp; } in let modobjs = try ModObjs.get obj_mp with Not_found -> assert false (* a substobjs should already be loaded *) in assert Nametab.(eq_op modobjs.module_prefix prefix); assert (List.is_empty modobjs.module_keep_objects); ModObjs.set obj_mp { modobjs with module_keep_objects = kobjs }; load_objects i prefix kobjs (** {6 Implementation of Import and Export commands} *) let mark_object f obj (exports,acc) = (exports, (f,obj)::acc) let rec collect_modules mpl acc = List.fold_left (fun acc fmp -> collect_module fmp acc) acc (List.rev mpl) and collect_module (f,mp) acc = (* May raise Not_found for unknown module and for functors *) let modobjs = ModObjs.get mp in let prefix = modobjs.module_prefix in let acc = collect_objects f 1 prefix modobjs.module_keep_objects acc in collect_objects f 1 prefix modobjs.module_substituted_objects acc and collect_object f i (name, obj as o) acc = match obj with | ExportObject { mpl } -> collect_exports f i mpl acc | AtomicObject _ | IncludeObject _ | KeepObject _ | ModuleObject _ | ModuleTypeObject _ -> mark_object f o acc and collect_objects f i prefix objs acc = List.fold_left (fun acc (id, obj) -> collect_object f i (Lib.make_oname prefix id, obj) acc ) acc (List.rev objs) and collect_export f (f',mp) (exports,objs as acc) = match filter_and f f' with | None -> acc | Some f -> let exports' = MPmap.update mp (function | None -> Some f | Some f0 -> Some (filter_or f f0)) exports in (* If the map doesn't change there is nothing new to export. It's possible that [filter_and] or [filter_or] mangled precise filters such that we repeat uselessly, but the important [Unfiltered] case is handled correctly. *) if exports == exports' then acc else collect_module (f,mp) (exports', objs) and collect_exports f i mpl acc = if Int.equal i 1 then List.fold_left (fun acc fmp -> collect_export f fmp acc) acc (List.rev mpl) else acc let open_modtype i ((sp,kn),_) = let mp = mp_of_kn kn in let mp' = try Nametab.locate_modtype (qualid_of_path sp) with Not_found -> anomaly (pr_path sp ++ str " should already exist!"); in assert (ModPath.equal mp mp'); Nametab.push_modtype (Nametab.Exactly i) sp mp let rec open_object f i (name, obj) = match obj with | AtomicObject o -> Libobject.open_object f i (name, o) | ModuleObject sobjs -> let dir = dir_of_sp (fst name) in let mp = mp_of_kn (snd name) in open_module f i dir mp sobjs | ModuleTypeObject sobjs -> open_modtype i (name, sobjs) | IncludeObject aobjs -> open_include f i (name, aobjs) | ExportObject { mpl } -> open_export f i mpl | KeepObject objs -> open_keep f i (name, objs) and open_module f i obj_dir obj_mp sobjs = consistency_checks true obj_dir; if in_filter ~cat:None f then Nametab.push_module (Nametab.Exactly i) obj_dir obj_mp; (* If we're not a functor, let's iter on the internal components *) if sobjs_no_functor sobjs then begin let modobjs = ModObjs.get obj_mp in open_objects f (i+1) modobjs.module_prefix modobjs.module_substituted_objects end and open_objects f i prefix objs = List.iter (fun (id, obj) -> open_object f i (Lib.make_oname prefix id, obj)) objs and open_include f i ((sp,kn), aobjs) = let obj_dir = Libnames.dirpath sp in let obj_mp = KerName.modpath kn in let prefix = Nametab.{ obj_dir; obj_mp; } in let o = expand_aobjs aobjs in open_objects f i prefix o and open_export f i mpl = let _,objs = collect_exports f i mpl (MPmap.empty, []) in List.iter (fun (f,o) -> open_object f 1 o) objs and open_keep f i ((sp,kn),kobjs) = let obj_dir = dir_of_sp sp and obj_mp = mp_of_kn kn in let prefix = Nametab.{ obj_dir; obj_mp; } in open_objects f i prefix kobjs let rec cache_object (name, obj) = match obj with | AtomicObject o -> Libobject.cache_object (name, o) | ModuleObject sobjs -> do_module' false load_objects 1 (name, sobjs) | ModuleTypeObject sobjs -> let (sp,kn) = name in load_modtype 0 sp (mp_of_kn kn) sobjs | IncludeObject aobjs -> cache_include (name, aobjs) | ExportObject { mpl } -> anomaly Pp.(str "Export should not be cached") | KeepObject objs -> cache_keep (name, objs) and cache_include ((sp,kn), aobjs) = let obj_dir = Libnames.dirpath sp in let obj_mp = KerName.modpath kn in let prefix = Nametab.{ obj_dir; obj_mp; } in let o = expand_aobjs aobjs in load_objects 1 prefix o; open_objects unfiltered 1 prefix o and cache_keep ((sp,kn),kobjs) = anomaly (Pp.str "This module should not be cached!") (* Adding operations with containers *) let add_leaf id obj = if ModPath.equal (Lib.current_mp ()) ModPath.initial then user_err Pp.(str "No session module started (use -top dir)"); let oname = Lib.make_foname id in cache_object (oname,obj); Lib.add_entry oname (Lib.Leaf obj); oname let add_leaves id objs = let oname = Lib.make_foname id in let add_obj obj = Lib.add_entry oname (Lib.Leaf obj); load_object 1 (oname,obj) in List.iter add_obj objs; oname (** {6 Handler for missing entries in ModSubstObjs} *) (** Since the inner of Module Types are not added by default to the ModSubstObjs table, we compensate this by explicit traversal of Module Types inner objects when needed. Quite a hack... *) let mp_id mp id = MPdot (mp, Label.of_id id) let rec register_mod_objs mp (id,obj) = match obj with | ModuleObject sobjs -> ModSubstObjs.set (mp_id mp id) sobjs | ModuleTypeObject sobjs -> ModSubstObjs.set (mp_id mp id) sobjs | IncludeObject aobjs -> List.iter (register_mod_objs mp) (expand_aobjs aobjs) | _ -> () let handle_missing_substobjs mp = match mp with | MPdot (mp',l) -> let objs = expand_sobjs (ModSubstObjs.get mp') in List.iter (register_mod_objs mp') objs; ModSubstObjs.get mp | _ -> assert false (* Only inner parts of module types should be missing *) let () = ModSubstObjs.set_missing_handler handle_missing_substobjs (** {6 From module expression to substitutive objects} *) (** Turn a chain of [MSEapply] into the head ModPath.t and the list of ModPath.t parameters (deepest param coming first). The left part of a [MSEapply] must be either [MSEident] or another [MSEapply]. *) let get_applications mexpr = let rec get params = function | MEident mp -> mp, params | MEapply (fexpr, mp) -> get (mp::params) fexpr | MEwith _ -> user_err Pp.(str "Non-atomic functor application.") in get [] mexpr (** Create the substitution corresponding to some functor applications *) let rec compute_subst env mbids sign mp_l inl = match mbids,mp_l with | _,[] -> mbids,empty_subst | [],r -> user_err Pp.(str "Application of a functor with too few arguments.") | mbid::mbids,mp::mp_l -> let farg_id, farg_b, fbody_b = Modops.destr_functor sign in let mb = Environ.lookup_module mp env in let mbid_left,subst = compute_subst env mbids fbody_b mp_l inl in let resolver = if Modops.is_functor mb.mod_type then empty_delta_resolver else Modops.inline_delta_resolver env inl mp farg_id farg_b mb.mod_delta in mbid_left,join (map_mbid mbid mp resolver) subst (** Create the objects of a "with Module" structure. *) let rec replace_module_object idl mp0 objs0 mp1 objs1 = match idl, objs0 with | _,[] -> [] | id::idl,(id',obj)::tail when Id.equal id id' -> begin match obj with | ModuleObject sobjs -> let mp_id = MPdot(mp0, Label.of_id id) in let objs = match idl with | [] -> subst_objects (map_mp mp1 mp_id empty_delta_resolver) objs1 | _ -> let objs_id = expand_sobjs sobjs in replace_module_object idl mp_id objs_id mp1 objs1 in (id, ModuleObject ([], Objs objs))::tail | _ -> assert false end | idl,lobj::tail -> lobj::replace_module_object idl mp0 tail mp1 objs1 let type_of_mod mp env = function |true -> (Environ.lookup_module mp env).mod_type |false -> (Environ.lookup_modtype mp env).mod_type let rec get_module_path = function |MEident mp -> mp |MEwith (me,_) -> get_module_path me |MEapply (me,_) -> get_module_path me (** Substitutive objects of a module expression (or module type) *) let rec get_module_sobjs is_mod env inl = function |MEident mp -> begin match ModSubstObjs.get mp with |(mbids,Objs _) when not (ModPath.is_bound mp) -> (mbids,Ref (mp, empty_subst)) (* we create an alias *) |sobjs -> sobjs end |MEwith (mty, WithDef _) -> get_module_sobjs is_mod env inl mty |MEwith (mty, WithMod (idl,mp1)) -> assert (not is_mod); let sobjs0 = get_module_sobjs is_mod env inl mty in assert (sobjs_no_functor sobjs0); (* For now, we expanse everything, to be safe *) let mp0 = get_module_path mty in let objs0 = expand_sobjs sobjs0 in let objs1 = expand_sobjs (ModSubstObjs.get mp1) in ([], Objs (replace_module_object idl mp0 objs0 mp1 objs1)) |MEapply _ as me -> let mp1, mp_l = get_applications me in let mbids, aobjs = get_module_sobjs is_mod env inl (MEident mp1) in let typ = type_of_mod mp1 env is_mod in let mbids_left,subst = compute_subst env mbids typ mp_l inl in (mbids_left, subst_aobjs subst aobjs) let get_functor_sobjs is_mod env inl (params,mexpr) = let (mbids, aobjs) = get_module_sobjs is_mod env inl mexpr in (List.map fst params @ mbids, aobjs) (** {6 Handling of module parameters} *) (** For printing modules, [process_module_binding] adds names of bound module (and its components) to Nametab. It also loads objects associated to it. *) let process_module_binding mbid me = let dir = DirPath.make [MBId.to_id mbid] in let mp = MPbound mbid in let sobjs = get_module_sobjs false (Global.env()) (default_inline ()) me in let subst = map_mp (get_module_path me) mp empty_delta_resolver in let sobjs = subst_sobjs subst sobjs in do_module false load_objects 1 dir mp sobjs [] (** Process a declaration of functor parameter(s) (Id1 .. Idn : Typ) i.e. possibly multiple names with the same module type. Global environment is updated on the fly. Objects in these parameters are also loaded. Output is accumulated on top of [acc] (in reverse order). *) let intern_arg (acc, cst) (idl,(typ,ann)) = let inl = inl2intopt ann in let lib_dir = Lib.library_dp() in let env = Global.env() in let (mty, _, cst') = Modintern.interp_module_ast env Modintern.ModType typ in let () = Global.push_context_set ~strict:true cst' in let env = Global.env () in let sobjs = get_module_sobjs false env inl mty in let mp0 = get_module_path mty in let fold acc {CAst.v=id} = let dir = DirPath.make [id] in let mbid = MBId.make lib_dir id in let mp = MPbound mbid in let resolver = Global.add_module_parameter mbid mty inl in let sobjs = subst_sobjs (map_mp mp0 mp resolver) sobjs in do_module false load_objects 1 dir mp sobjs []; (mbid,mty,inl)::acc in let acc = List.fold_left fold acc idl in (acc, Univ.ContextSet.union cst cst') (** Process a list of declarations of functor parameters (Id11 .. Id1n : Typ1)..(Idk1 .. Idkm : Typk) Global environment is updated on the fly. The calls to [interp_modast] should be interleaved with these env updates, otherwise some "with Definition" could be rejected. Returns a list of mbids and entries (in reversed order). This used to be a [List.concat (List.map ...)], but this should be more efficient and independent of [List.map] eval order. *) let intern_args params = List.fold_left intern_arg ([], Univ.ContextSet.empty) params (** {6 Auxiliary functions concerning subtyping checks} *) let check_sub mtb sub_mtb_l = (* The constraints are checked and forgot immediately : *) ignore (List.fold_right (fun sub_mtb env -> Environ.add_constraints (Subtyping.check_subtypes env mtb sub_mtb) env) sub_mtb_l (Global.env())) (** This function checks if the type calculated for the module [mp] is a subtype of all signatures in [sub_mtb_l]. Uses only the global environment. *) let check_subtypes mp sub_mtb_l = let mb = try Global.lookup_module mp with Not_found -> assert false in let mtb = Modops.module_type_of_module mb in check_sub mtb sub_mtb_l (** Same for module type [mp] *) let check_subtypes_mt mp sub_mtb_l = let mtb = try Global.lookup_modtype mp with Not_found -> assert false in check_sub mtb sub_mtb_l (** Create a params entry. In [args], the youngest module param now comes first. *) let mk_params_entry args = List.rev_map (fun (mbid,arg_t,_) -> (mbid,arg_t)) args (** Create a functor type struct. In [args], the youngest module param now comes first. *) let mk_funct_type env args seb0 = List.fold_left (fun (seb,cst) (arg_id,arg_t,arg_inl) -> let mp = MPbound arg_id in let arg_t, cst' = Mod_typing.translate_modtype env mp arg_inl ([],arg_t) in MoreFunctor(arg_id,arg_t,seb), Univ.Constraints.union cst cst') seb0 args (** Prepare the module type list for check of subtypes *) let build_subtypes env mp args mtys = let (ctx, ans) = List.fold_left_map (fun ctx (m,ann) -> let inl = inl2intopt ann in let mte, _, ctx' = Modintern.interp_module_ast env Modintern.ModType m in let env = Environ.push_context_set ~strict:true ctx' env in let ctx = Univ.ContextSet.union ctx ctx' in let mtb, cst = Mod_typing.translate_modtype env mp inl ([],mte) in let mod_type, cst = mk_funct_type env args (mtb.mod_type,cst) in let ctx = Univ.ContextSet.add_constraints cst ctx in ctx, { mtb with mod_type }) Univ.ContextSet.empty mtys in (ans, ctx) (** {6 Current module information} This information is stored by each [start_module] for use in a later [end_module]. *) type current_module_info = { cur_typ : (module_struct_entry * int option) option; (** type via ":" *) cur_typs : module_type_body list (** types via "<:" *) } let default_module_info = { cur_typ = None; cur_typs = [] } let openmod_info = Summary.ref default_module_info ~name:"MODULE-INFO" (** {6 Current module type information} This information is stored by each [start_modtype] for use in a later [end_modtype]. *) let openmodtype_info = Summary.ref ([] : module_type_body list) ~name:"MODTYPE-INFO" (** {6 Modules : start, end, declare} *) module RawModOps = struct let start_module export id args res fs = let mp = Global.start_module id in let arg_entries_r, ctx = intern_args args in let () = Global.push_context_set ~strict:true ctx in let env = Global.env () in let res_entry_o, subtyps, ctx = match res with | Enforce (res,ann) -> let inl = inl2intopt ann in let (mte, _, ctx) = Modintern.interp_module_ast env Modintern.ModType res in let env = Environ.push_context_set ~strict:true ctx env in (* We check immediately that mte is well-formed *) let _, _, _, cst = Mod_typing.translate_mse env None inl mte in let ctx = Univ.ContextSet.add_constraints cst ctx in Some (mte, inl), [], ctx | Check resl -> let typs, ctx = build_subtypes env mp arg_entries_r resl in None, typs, ctx in let () = Global.push_context_set ~strict:true ctx in openmod_info := { cur_typ = res_entry_o; cur_typs = subtyps }; let prefix = Lib.start_module export id mp fs in Nametab.(push_dir (Until 1) (prefix.obj_dir) (GlobDirRef.DirOpenModule prefix)); mp let end_module () = let oldoname,oldprefix,fs,lib_stack = Lib.end_module () in let substitute, keep, special = Lib.classify_segment lib_stack in let m_info = !openmod_info in (* For sealed modules, we use the substitutive objects of their signatures *) let sobjs0, keep, special = match m_info.cur_typ with | None -> ([], Objs substitute), keep, special | Some (mty, inline) -> get_module_sobjs false (Global.env()) inline mty, [], [] in let id = basename (fst oldoname) in let mp,mbids,resolver = Global.end_module fs id m_info.cur_typ in let sobjs = let (ms,objs) = sobjs0 in (mbids@ms,objs) in check_subtypes mp m_info.cur_typs; (* We substitute objects if the module is sealed by a signature *) let sobjs = match m_info.cur_typ with | None -> sobjs | Some (mty, _) -> subst_sobjs (map_mp (get_module_path mty) mp resolver) sobjs in let node = ModuleObject sobjs in (* We add the keep objects, if any, and if this isn't a functor *) let objects = match keep, mbids with | [], _ | _, _ :: _ -> special@[node] | _ -> special@[node;KeepObject keep] in let newoname = add_leaves id objects in (* Name consistency check : start_ vs. end_module, kernel vs. library *) assert (eq_full_path (fst newoname) (fst oldoname)); assert (ModPath.equal (mp_of_kn (snd newoname)) mp); mp (* TODO cleanup push universes directly to global env *) let declare_module id args res mexpr_o fs = (* We simulate the beginning of an interactive module, then we adds the module parameters to the global env. *) let mp = Global.start_module id in let arg_entries_r, ctx = intern_args args in let params = mk_params_entry arg_entries_r in let env = Global.env () in let env = Environ.push_context_set ~strict:true ctx env in let mty_entry_o, subs, inl_res, ctx' = match res with | Enforce (mty,ann) -> let inl = inl2intopt ann in let (mte, _, ctx) = Modintern.interp_module_ast env Modintern.ModType mty in let env = Environ.push_context_set ~strict:true ctx env in (* We check immediately that mte is well-formed *) let _, _, _, cst = Mod_typing.translate_mse env None inl mte in let ctx = Univ.ContextSet.add_constraints cst ctx in Some mte, [], inl, ctx | Check mtys -> let typs, ctx = build_subtypes env mp arg_entries_r mtys in None, typs, default_inline (), ctx in let env = Environ.push_context_set ~strict:true ctx' env in let ctx = Univ.ContextSet.union ctx ctx' in let mexpr_entry_o, inl_expr, ctx' = match mexpr_o with | None -> None, default_inline (), Univ.ContextSet.empty | Some (mexpr,ann) -> let (mte, _, ctx) = Modintern.interp_module_ast env Modintern.Module mexpr in Some mte, inl2intopt ann, ctx in let env = Environ.push_context_set ~strict:true ctx' env in let ctx = Univ.ContextSet.union ctx ctx' in let entry = match mexpr_entry_o, mty_entry_o with | None, None -> assert false (* No body, no type ... *) | None, Some typ -> MType (params, typ) | Some body, otyp -> MExpr (params, body, otyp) in let sobjs, mp0 = match entry with | MType (_,mte) | MExpr (_,_,Some mte) -> get_functor_sobjs false env inl_res (params,mte), get_module_path mte | MExpr (_,me,None) -> get_functor_sobjs true env inl_expr (params,me), get_module_path me in (* Undo the simulated interactive building of the module and declare the module as a whole *) Summary.unfreeze_summaries fs; let inl = match inl_expr with | None -> None | _ -> inl_res in let () = Global.push_context_set ~strict:true ctx in let mp_env,resolver = Global.add_module id entry inl in (* Name consistency check : kernel vs. library *) assert (ModPath.equal mp (mp_of_kn (Lib.make_kn id))); assert (ModPath.equal mp mp_env); check_subtypes mp subs; let sobjs = subst_sobjs (map_mp mp0 mp resolver) sobjs in ignore (add_leaf id (ModuleObject sobjs)); mp end (** {6 Module types : start, end, declare} *) module RawModTypeOps = struct let start_modtype id args mtys fs = let mp = Global.start_modtype id in let arg_entries_r, cst = intern_args args in let () = Global.push_context_set ~strict:true cst in let env = Global.env () in let sub_mty_l, cst = build_subtypes env mp arg_entries_r mtys in let () = Global.push_context_set ~strict:true cst in openmodtype_info := sub_mty_l; let prefix = Lib.start_modtype id mp fs in Nametab.(push_dir (Until 1) (prefix.obj_dir) (GlobDirRef.DirOpenModtype prefix)); mp let end_modtype () = let oldoname,prefix,fs,lib_stack = Lib.end_modtype () in let id = basename (fst oldoname) in let substitute, _, special = Lib.classify_segment lib_stack in let sub_mty_l = !openmodtype_info in let mp, mbids = Global.end_modtype fs id in let modtypeobjs = (mbids, Objs substitute) in check_subtypes_mt mp sub_mty_l; let oname = add_leaves id (special@[ModuleTypeObject modtypeobjs]) in (* Check name consistence : start_ vs. end_modtype, kernel vs. library *) assert (eq_full_path (fst oname) (fst oldoname)); assert (ModPath.equal (mp_of_kn (snd oname)) mp); mp let declare_modtype id args mtys (mty,ann) fs = let inl = inl2intopt ann in (* We simulate the beginning of an interactive module, then we adds the module parameters to the global env. *) let mp = Global.start_modtype id in let arg_entries_r, arg_ctx = intern_args args in let () = Global.push_context_set ~strict:true arg_ctx in let params = mk_params_entry arg_entries_r in let env = Global.env () in let mte, _, mte_ctx = Modintern.interp_module_ast env Modintern.ModType mty in let () = Global.push_context_set ~strict:true mte_ctx in let env = Global.env () in (* We check immediately that mte is well-formed *) let _, _, _, mte_cst = Mod_typing.translate_mse env None inl mte in let () = Global.push_context_set ~strict:true (Univ.Level.Set.empty,mte_cst) in let env = Global.env () in let entry = params, mte in let sub_mty_l, sub_mty_ctx = build_subtypes env mp arg_entries_r mtys in let () = Global.push_context_set ~strict:true sub_mty_ctx in let env = Global.env () in let sobjs = get_functor_sobjs false env inl entry in let subst = map_mp (get_module_path (snd entry)) mp empty_delta_resolver in let sobjs = subst_sobjs subst sobjs in (* Undo the simulated interactive building of the module type and declare the module type as a whole *) Summary.unfreeze_summaries fs; (* We enrich the global environment *) let () = Global.push_context_set ~strict:true arg_ctx in let () = Global.push_context_set ~strict:true mte_ctx in let () = Global.push_context_set ~strict:true (Univ.Level.Set.empty,mte_cst) in let () = Global.push_context_set ~strict:true sub_mty_ctx in let mp_env = Global.add_modtype id entry inl in (* Name consistency check : kernel vs. library *) assert (ModPath.equal mp_env mp); (* Subtyping checks *) check_subtypes_mt mp sub_mty_l; ignore (add_leaf id (ModuleTypeObject sobjs)); mp end (** {6 Include} *) module RawIncludeOps = struct let rec include_subst env mp reso mbids sign inline = match mbids with | [] -> empty_subst | mbid::mbids -> let farg_id, farg_b, fbody_b = Modops.destr_functor sign in let subst = include_subst env mp reso mbids fbody_b inline in let mp_delta = Modops.inline_delta_resolver env inline mp farg_id farg_b reso in join (map_mbid mbid mp mp_delta) subst let rec decompose_functor mpl typ = match mpl, typ with | [], _ -> typ | _::mpl, MoreFunctor(_,_,str) -> decompose_functor mpl str | _ -> user_err Pp.(str "Application of a functor with too much arguments.") exception NoIncludeSelf let type_of_incl env is_mod = function |MEident mp -> type_of_mod mp env is_mod |MEapply _ as me -> let mp0, mp_l = get_applications me in decompose_functor mp_l (type_of_mod mp0 env is_mod) |MEwith _ -> raise NoIncludeSelf let declare_one_include (me_ast,annot) = let env = Global.env() in let me, kind, cst = Modintern.interp_module_ast env Modintern.ModAny me_ast in let () = Global.push_context_set ~strict:true cst in let env = Global.env () in let is_mod = (kind == Modintern.Module) in let cur_mp = Lib.current_mp () in let inl = inl2intopt annot in let mbids,aobjs = get_module_sobjs is_mod env inl me in let subst_self = try if List.is_empty mbids then raise NoIncludeSelf; let typ = type_of_incl env is_mod me in let reso,_ = Safe_typing.delta_of_senv (Global.safe_env ()) in include_subst env cur_mp reso mbids typ inl with NoIncludeSelf -> empty_subst in let base_mp = get_module_path me in let resolver = Global.add_include me is_mod inl in let subst = join subst_self (map_mp base_mp cur_mp resolver) in let aobjs = subst_aobjs subst aobjs in ignore (add_leaf (Lib.current_mod_id ()) (IncludeObject aobjs)) let declare_include me_asts = List.iter declare_one_include me_asts end (** {6 Module operations handling summary freeze/unfreeze} *) let protect_summaries f = let fs = Summary.freeze_summaries ~marshallable:false in try f fs with reraise -> (* Something wrong: undo the whole process *) let reraise = Exninfo.capture reraise in let () = Summary.unfreeze_summaries fs in Exninfo.iraise reraise let start_module export id args res = protect_summaries (RawModOps.start_module export id args res) let end_module = RawModOps.end_module let declare_module id args mtys me_l = let declare_me fs = match me_l with | [] -> RawModOps.declare_module id args mtys None fs | [me] -> RawModOps.declare_module id args mtys (Some me) fs | me_l -> ignore (RawModOps.start_module None id args mtys fs); RawIncludeOps.declare_include me_l; RawModOps.end_module () in protect_summaries declare_me let start_modtype id args mtys = protect_summaries (RawModTypeOps.start_modtype id args mtys) let end_modtype = RawModTypeOps.end_modtype let declare_modtype id args mtys mty_l = let declare_mt fs = match mty_l with | [] -> assert false | [mty] -> RawModTypeOps.declare_modtype id args mtys mty fs | mty_l -> ignore (RawModTypeOps.start_modtype id args mtys fs); RawIncludeOps.declare_include mty_l; RawModTypeOps.end_modtype () in protect_summaries declare_mt let declare_include me_asts = if Global.sections_are_opened () then user_err Pp.(str "Include is not allowed inside sections."); protect_summaries (fun _ -> RawIncludeOps.declare_include me_asts) (** {6 Libraries} *) type library_name = DirPath.t (** A library object is made of some substitutive objects and some "keep" objects. *) type library_objects = Lib.lib_objects * Lib.lib_objects (** For the native compiler, we cache the library values *) let register_library dir cenv (objs:library_objects) digest univ = let mp = MPfile dir in let () = try (* Is this library already loaded ? *) ignore(Global.lookup_module mp); with Not_found -> (* If not, let's do it now ... *) let mp' = Global.import cenv univ digest in if not (ModPath.equal mp mp') then anomaly (Pp.str "Unexpected disk module name."); in let sobjs,keepobjs = objs in do_module false load_objects 1 dir mp ([],Objs sobjs) keepobjs let start_library dir = let mp = Global.start_library dir in openmod_info := default_module_info; Lib.start_compilation dir mp let end_library_hook = ref ignore let append_end_library_hook f = let old_f = !end_library_hook in end_library_hook := fun () -> old_f(); f () let end_library ~output_native_objects dir = !end_library_hook(); let oname = Lib.end_compilation_checks dir in let mp,cenv,ast = Global.export ~output_native_objects dir in let prefix, lib_stack = Lib.end_compilation oname in assert (ModPath.equal mp (MPfile dir)); let substitute, keep, _ = Lib.classify_segment lib_stack in cenv,(substitute,keep),ast let import_modules ~export mpl = let _,objs = collect_modules mpl (MPmap.empty, []) in List.iter (fun (f,o) -> open_object f 1 o) objs; if export then Lib.add_anonymous_entry (Lib.Leaf (ExportObject { mpl })) let import_module f ~export mp = import_modules ~export [f,mp] (** {6 Iterators} *) let iter_all_segments f = let rec apply_obj prefix (id,obj) = match obj with | IncludeObject aobjs -> let objs = expand_aobjs aobjs in List.iter (apply_obj prefix) objs | _ -> f (Lib.make_oname prefix id) obj in let apply_mod_obj _ modobjs = let prefix = modobjs.module_prefix in List.iter (apply_obj prefix) modobjs.module_substituted_objects; List.iter (apply_obj prefix) modobjs.module_keep_objects in let apply_node = function | sp, Lib.Leaf o -> f sp o | _ -> () in MPmap.iter apply_mod_obj (ModObjs.all ()); List.iter apply_node (Lib.contents ()) (** {6 Some types used to shorten declaremods.mli} *) type module_params = (lident list * (Constrexpr.module_ast * inline)) list (** {6 Debug} *) let debug_print_modtab _ = let pr_seg = function | [] -> str "[]" | l -> str "[." ++ int (List.length l) ++ str ".]" in let pr_modinfo mp modobjs s = let objs = modobjs.module_substituted_objects @ modobjs.module_keep_objects in s ++ str (ModPath.to_string mp) ++ (spc ()) ++ (pr_seg (Lib.segment_of_objects modobjs.module_prefix objs)) in let modules = MPmap.fold pr_modinfo (ModObjs.all ()) (mt ()) in hov 0 modules coq-8.15.0/vernac/declaremods.mli000066400000000000000000000101651417001151100166240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* module_params -> (Constrexpr.module_ast * inline) module_signature -> (Constrexpr.module_ast * inline) list -> ModPath.t val start_module : bool option -> Id.t -> module_params -> (Constrexpr.module_ast * inline) module_signature -> ModPath.t val end_module : unit -> ModPath.t (** {6 Module types } *) (** [declare_modtype interp_modast id fargs typs exprs] Similar to [declare_module], except that the types could be multiple *) val declare_modtype : Id.t -> module_params -> (Constrexpr.module_ast * inline) list -> (Constrexpr.module_ast * inline) list -> ModPath.t val start_modtype : Id.t -> module_params -> (Constrexpr.module_ast * inline) list -> ModPath.t val end_modtype : unit -> ModPath.t (** {6 Libraries i.e. modules on disk } *) type library_name = DirPath.t type library_objects val register_library : library_name -> Safe_typing.compiled_library -> library_objects -> Safe_typing.vodigest -> Univ.ContextSet.t -> unit val start_library : library_name -> unit val end_library : output_native_objects:bool -> library_name -> Safe_typing.compiled_library * library_objects * Nativelib.native_library (** append a function to be executed at end_library *) val append_end_library_hook : (unit -> unit) -> unit (** [import_module export mp] imports the module [mp]. It modifies Nametab and performs the [open_object] function for every object of the module. Raises [Not_found] when [mp] is unknown or when [mp] corresponds to a functor. If [export] is [true], the module is also opened every time the module containing it is. *) val import_module : Libobject.open_filter -> export:bool -> ModPath.t -> unit (** Same as [import_module] but for multiple modules, and more optimized than iterating [import_module]. *) val import_modules : export:bool -> (Libobject.open_filter * ModPath.t) list -> unit (** Include *) val declare_include : (Constrexpr.module_ast * inline) list -> unit (** {6 ... } *) (** [iter_all_segments] iterate over all segments, the modules' segments first and then the current segment. Modules are presented in an arbitrary order. The given function is applied to all leaves (together with their section path). *) val iter_all_segments : (Libobject.object_name -> Libobject.t -> unit) -> unit val debug_print_modtab : unit -> Pp.t (** For printing modules, [process_module_binding] adds names of bound module (and its components) to Nametab. It also loads objects associated to it. It may raise a [Failure] when the bound module hasn't an atomic type. *) val process_module_binding : MBId.t -> Declarations.module_alg_expr -> unit coq-8.15.0/vernac/dune000066400000000000000000000004041417001151100145100ustar00rootroot00000000000000(library (name vernac) (synopsis "Coq's Vernacular Language") (public_name coq-core.vernac) (wrapped false) ; until ocaml/dune#4892 fixed ; (private_modules comProgramFixpoint egramcoq) (libraries tactics parsing)) (coq.pp (modules g_proofs g_vernac)) coq-8.15.0/vernac/egramcoq.ml000066400000000000000000000640551417001151100157760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* levels, String.Map.find "constr" levels | InCustomEntry s -> try levels, String.Map.find s levels with Not_found -> String.Map.add s ([],[]) levels, ([],[]) let save_levels levels custom lev = let s = match custom with InConstrEntry -> "constr" | InCustomEntry s -> s in String.Map.add s lev levels (* At a same level, LeftA takes precedence over RightA and NoneA *) (* In case, several associativity exists for a level, we make two levels, *) (* first LeftA, then RightA and NoneA together *) let admissible_assoc = function | Gramlib.Gramext.LeftA, Some (Gramlib.Gramext.RightA | Gramlib.Gramext.NonA) -> false | Gramlib.Gramext.RightA, Some Gramlib.Gramext.LeftA -> false | _ -> true let create_assoc = function | None -> Gramlib.Gramext.RightA | Some a -> a let error_level_assoc p current expected = let open Pp in let pr_assoc = function | Gramlib.Gramext.LeftA -> str "left" | Gramlib.Gramext.RightA -> str "right" | Gramlib.Gramext.NonA -> str "non" in user_err (str "Level " ++ int p ++ str " is already declared " ++ pr_assoc current ++ str " associative while it is now expected to be " ++ pr_assoc expected ++ str " associative.") type position = NewFirst | NewAfter of int | ReuseFirst | ReuseLevel of int let create_pos = function | None -> NewFirst | Some lev -> NewAfter lev let find_position_gen current ensure assoc lev = match lev with | None -> current, (ReuseFirst, None, None, None) | Some n -> let after = ref None in let init = ref None in let rec add_level q = function | (p,_,_ as pa)::l when p > n -> pa :: add_level (Some p) l | (p,a,reinit)::l when Int.equal p n -> if reinit then let a' = create_assoc assoc in (init := Some (a', q); (p,a',false)::l) else if admissible_assoc (a,assoc) then raise Exit else error_level_assoc p a (Option.get assoc) | l -> after := q; (n,create_assoc assoc,ensure)::l in try let updated = add_level None current in let assoc = create_assoc assoc in begin match !init with | None -> (* Create the entry *) updated, (create_pos !after, Some assoc, Some (constr_level n), None) | _ -> (* The reinit flag has been updated *) updated, (ReuseLevel n, None, None, !init) end with (* Nothing has changed *) Exit -> (* Just inherit the existing associativity and name (None) *) current, (ReuseLevel n, None, None, None) let rec list_mem_assoc_triple x = function | [] -> false | (a,b,c) :: l -> Int.equal a x || list_mem_assoc_triple x l let register_empty_levels accu forpat levels = let rec filter accu = function | [] -> ([], accu) | (where,n) :: rem -> let rem, accu = filter accu rem in let accu, (clev, plev) = find_levels accu where in let levels = if forpat then plev else clev in if not (list_mem_assoc_triple n levels) then let nlev, ans = find_position_gen levels true None (Some n) in let nlev = if forpat then (clev, nlev) else (nlev, plev) in (where, ans) :: rem, save_levels accu where nlev else rem, accu in let (l,accu) = filter accu levels in List.rev l, accu let find_position accu custom forpat assoc level = let accu, (clev, plev) = find_levels accu custom in let levels = if forpat then plev else clev in let nlev, ans = find_position_gen levels false assoc level in let nlev = if forpat then (clev, nlev) else (nlev, plev) in (ans, save_levels accu custom nlev) (**************************************************************************) (* * --- Note on the mapping of grammar productions to camlp5 actions --- * * Translation of environments: a production * [ nt1(x1) ... nti(xi) ] -> act(x1..xi) * is written (with camlp5 conventions): * (fun vi -> .... (fun v1 -> act(v1 .. vi) )..) * where v1..vi are the values generated by non-terminals nt1..nti. * Since the actions are executed by substituting an environment, * the make_*_action family build the following closure: * * ((fun env -> * (fun vi -> * (fun env -> ... * * (fun v1 -> * (fun env -> gram_action .. env act) * ((x1,v1)::env)) * ...) * ((xi,vi)::env))) * []) *) (**********************************************************************) (** Declare Notations grammar rules *) (**********************************************************************) (* Binding constr entry keys to entries *) (* Camlp5 levels do not treat NonA: use RightA with a NEXT on the left *) let camlp5_assoc = let open Gramlib.Gramext in function | Some NonA | Some RightA -> RightA | None | Some LeftA -> LeftA let assoc_eq al ar = let open Gramlib.Gramext in match al, ar with | NonA, NonA | RightA, RightA | LeftA, LeftA -> true | _, _ -> false (** [adjust_level assoc from prod] where [assoc] and [from] are the name and associativity of the level where to add the rule; the meaning of the result is DefaultLevel = entry name NextLevel = NEXT NumLevel n = constr LEVEL n *) let adjust_level custom assoc (custom',from) p = let open Gramlib.Gramext in match p with (* If a level in a different grammar, no other choice than denoting it by absolute level *) | (NumLevel n,_) when not (Notation.notation_entry_eq custom custom') -> NumLevel n (* If a default level in a different grammar, the entry name is ok *) | (DefaultLevel,InternalProd) -> if Notation.notation_entry_eq custom InConstrEntry then NumLevel 200 else DefaultLevel | (DefaultLevel,BorderProd _) when not (Notation.notation_entry_eq custom custom') -> if Notation.notation_entry_eq custom InConstrEntry then NumLevel 200 else DefaultLevel (* Associativity is None means force the level *) | (NumLevel n,BorderProd (_,None)) -> NumLevel n | (DefaultLevel,BorderProd (_,None)) -> assert false (* Compute production name on the right side *) (* If NonA or LeftA on the right-hand side, set to NEXT *) | ((NumLevel _ | DefaultLevel),BorderProd (Right,Some (NonA|LeftA))) -> NextLevel (* If RightA on the right-hand side, set to the explicit (current) level *) | (NumLevel n,BorderProd (Right,Some RightA)) -> NumLevel n | (DefaultLevel,BorderProd (Right,Some RightA)) -> NumLevel from (* Compute production name on the left side *) (* If NonA on the left-hand side, adopt the current assoc ?? *) | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some NonA)) -> DefaultLevel (* If the expected assoc is the current one, set to SELF *) | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some a)) when assoc_eq a (camlp5_assoc assoc) -> DefaultLevel (* Otherwise, force the level, n or n-1, according to expected assoc *) | (NumLevel n,BorderProd (Left,Some LeftA)) -> NumLevel n | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some _)) -> NextLevel (* None means NEXT *) | (NextLevel,_) -> assert (Notation.notation_entry_eq custom custom'); NextLevel (* Compute production name elsewhere *) | (NumLevel n,InternalProd) -> if from = n + 1 then NextLevel else NumLevel n type _ target = | ForConstr : constr_expr target | ForPattern : cases_pattern_expr target type prod_info = production_level * production_position type (_, _) entry = | TTIdent : ('self, lident) entry | TTName : ('self, lname) entry | TTReference : ('self, qualid) entry | TTBigint : ('self, string) entry | TTBinder : bool -> ('self, kinded_cases_pattern_expr) entry | TTConstr : notation_entry * prod_info * 'r target -> ('r, 'r) entry | TTConstrList : notation_entry * prod_info * (bool * string) list * 'r target -> ('r, 'r list) entry | TTPattern : int -> ('self, cases_pattern_expr) entry | TTOpenBinderList : ('self, local_binder_expr list) entry | TTClosedBinderList : (bool * string) list -> ('self, local_binder_expr list list) entry type _ any_entry = TTAny : ('s, 'r) entry -> 's any_entry let constr_custom_entry : (string, Constrexpr.constr_expr) entry_command = create_entry_command "constr" (fun s st -> [s], st) let pattern_custom_entry : (string, Constrexpr.cases_pattern_expr) entry_command = create_entry_command "pattern" (fun s st -> [s], st) let custom_entry_locality = Summary.ref ~name:"LOCAL-CUSTOM-ENTRY" String.Set.empty (** If the entry is present then local *) let create_custom_entry ~local s = if List.mem s ["constr";"pattern";"ident";"global";"binder";"bigint"] then user_err Pp.(quote (str s) ++ str " is a reserved entry name."); let sc = "custom:"^s in let sp = "custom_pattern:"^s in let _ = extend_entry_command constr_custom_entry sc in let _ = extend_entry_command pattern_custom_entry sp in let () = if local then custom_entry_locality := String.Set.add s !custom_entry_locality in () let find_custom_entry s = let sc = "custom:"^s in let sp = "custom_pattern:"^s in try (find_custom_entry constr_custom_entry sc, find_custom_entry pattern_custom_entry sp) with Not_found -> user_err Pp.(str "Undeclared custom entry: " ++ str s ++ str ".") let exists_custom_entry s = match find_custom_entry s with | _ -> true | exception _ -> false let locality_of_custom_entry s = String.Set.mem s !custom_entry_locality (* This computes the name of the level where to add a new rule *) let interp_constr_entry_key : type r. _ -> r target -> int -> r Entry.t * int option = fun custom forpat level -> match custom with | InCustomEntry s -> (let (entry_for_constr, entry_for_patttern) = find_custom_entry s in match forpat with | ForConstr -> entry_for_constr, Some level | ForPattern -> entry_for_patttern, Some level) | InConstrEntry -> match forpat with | ForConstr -> if level = 200 then Constr.binder_constr, None else Constr.term, Some level | ForPattern -> Constr.pattern, Some level let target_entry : type s. notation_entry -> s target -> s Entry.t = function | InConstrEntry -> (function | ForConstr -> Constr.term | ForPattern -> Constr.pattern) | InCustomEntry s -> let (entry_for_constr, entry_for_patttern) = find_custom_entry s in function | ForConstr -> entry_for_constr | ForPattern -> entry_for_patttern let is_self custom (custom',from) e = Notation.notation_entry_eq custom custom' && match e with | (NumLevel n, BorderProd (Right, _ (* Some(NonA|LeftA) *))) -> false | (NumLevel n, BorderProd (Left, _)) -> Int.equal from n | _ -> false let is_binder_level custom (custom',from) e = match e with | (NumLevel 200, (BorderProd (Right, _) | InternalProd)) -> custom = InConstrEntry && custom' = InConstrEntry && from = 200 | _ -> false let make_pattern (keyword,s) = if keyword then TPattern (Tok.PKEYWORD s) else match NumTok.Unsigned.parse_string s with | Some n -> TPattern (Tok.PNUMBER (Some n)) | None -> TPattern (Tok.PIDENT (Some s)) let make_sep_rules tkl = Pcoq.Symbol.tokens (List.map make_pattern tkl) type ('s, 'a) mayrec_symbol = | MayRecNo : ('s, Gramlib.Grammar.norec, 'a) Symbol.t -> ('s, 'a) mayrec_symbol | MayRecMay : ('s, Gramlib.Grammar.mayrec, 'a) Symbol.t -> ('s, 'a) mayrec_symbol let symbol_of_target : type s. _ -> _ -> _ -> _ -> s target -> (s, s) mayrec_symbol = fun custom p assoc from forpat -> if is_binder_level custom from p then (* Prevent self *) MayRecNo (Pcoq.Symbol.nterml (target_entry custom forpat) "200") else if is_self custom from p then MayRecMay Pcoq.Symbol.self else let g = target_entry custom forpat in let lev = adjust_level custom assoc from p in begin match lev with | DefaultLevel -> MayRecNo (Pcoq.Symbol.nterm g) | NextLevel -> MayRecMay Pcoq.Symbol.next | NumLevel lev -> MayRecNo (Pcoq.Symbol.nterml g (string_of_int lev)) end let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) mayrec_symbol = fun assoc from typ -> match typ with | TTConstr (s, p, forpat) -> symbol_of_target s p assoc from forpat | TTConstrList (s, typ', [], forpat) -> begin match symbol_of_target s typ' assoc from forpat with | MayRecNo s -> MayRecNo (Pcoq.Symbol.list1 s) | MayRecMay s -> MayRecMay (Pcoq.Symbol.list1 s) end | TTConstrList (s, typ', tkl, forpat) -> begin match symbol_of_target s typ' assoc from forpat with | MayRecNo s -> MayRecNo (Pcoq.Symbol.list1sep s (make_sep_rules tkl) false) | MayRecMay s -> MayRecMay (Pcoq.Symbol.list1sep s (make_sep_rules tkl) false) end | TTPattern p -> MayRecNo (Pcoq.Symbol.nterml Constr.pattern (string_of_int p)) | TTClosedBinderList [] -> MayRecNo (Pcoq.Symbol.list1 (Pcoq.Symbol.nterm Constr.binder)) | TTClosedBinderList tkl -> MayRecNo (Pcoq.Symbol.list1sep (Pcoq.Symbol.nterm Constr.binder) (make_sep_rules tkl) false) | TTIdent -> MayRecNo (Pcoq.Symbol.nterm Prim.identref) | TTName -> MayRecNo (Pcoq.Symbol.nterm Prim.name) | TTBinder true -> MayRecNo (Pcoq.Symbol.nterm Constr.one_open_binder) | TTBinder false -> MayRecNo (Pcoq.Symbol.nterm Constr.one_closed_binder) | TTOpenBinderList -> MayRecNo (Pcoq.Symbol.nterm Constr.open_binders) | TTBigint -> MayRecNo (Pcoq.Symbol.nterm Prim.bignat) | TTReference -> MayRecNo (Pcoq.Symbol.nterm Constr.global) let interp_entry forpat e = match e with | ETProdIdent -> TTAny TTIdent | ETProdName -> TTAny TTName | ETProdReference -> TTAny TTReference | ETProdBigint -> TTAny TTBigint | ETProdOneBinder o -> TTAny (TTBinder o) | ETProdConstr (s,p) -> TTAny (TTConstr (s, p, forpat)) | ETProdPattern p -> TTAny (TTPattern p) | ETProdConstrList (s, p, tkl) -> TTAny (TTConstrList (s, p, tkl, forpat)) | ETProdBinderList ETBinderOpen -> TTAny TTOpenBinderList | ETProdBinderList (ETBinderClosed tkl) -> TTAny (TTClosedBinderList tkl) let cases_pattern_expr_of_id { CAst.loc; v = id } = CAst.make ?loc @@ CPatAtom (Some (qualid_of_ident ?loc id)) let cases_pattern_expr_of_name { CAst.loc; v = na } = CAst.make ?loc @@ match na with | Anonymous -> CPatAtom None | Name id -> CPatAtom (Some (qualid_of_ident ?loc id)) type 'r env = { constrs : 'r list; constrlists : 'r list list; binders : kinded_cases_pattern_expr list; binderlists : local_binder_expr list list; } let push_constr subst v = { subst with constrs = v :: subst.constrs } let push_item : type s r. s target -> (s, r) entry -> s env -> r -> s env = fun forpat e subst v -> match e with | TTConstr _ -> push_constr subst v | TTIdent -> begin match forpat with | ForConstr -> { subst with binders = (cases_pattern_expr_of_id v, Glob_term.Explicit) :: subst.binders } | ForPattern -> push_constr subst (cases_pattern_expr_of_id v) end | TTName -> begin match forpat with | ForConstr -> { subst with binders = (cases_pattern_expr_of_name v, Glob_term.Explicit) :: subst.binders } | ForPattern -> push_constr subst (cases_pattern_expr_of_name v) end | TTPattern _ -> begin match forpat with | ForConstr -> { subst with binders = (v, Glob_term.Explicit) :: subst.binders } | ForPattern -> push_constr subst v end | TTBinder o -> { subst with binders = v :: subst.binders } | TTOpenBinderList -> { subst with binderlists = v :: subst.binderlists } | TTClosedBinderList _ -> { subst with binderlists = List.flatten v :: subst.binderlists } | TTBigint -> begin match forpat with | ForConstr -> push_constr subst (CAst.make @@ CPrim (Number (NumTok.Signed.of_int_string v))) | ForPattern -> push_constr subst (CAst.make @@ CPatPrim (Number (NumTok.Signed.of_int_string v))) end | TTReference -> begin match forpat with | ForConstr -> push_constr subst (CAst.make @@ CRef (v, None)) | ForPattern -> push_constr subst (CAst.make @@ CPatAtom (Some v)) end | TTConstrList _ -> { subst with constrlists = v :: subst.constrlists } type (_, _) ty_symbol = | TyTerm : 'a Tok.p -> ('s, 'a) ty_symbol | TyNonTerm : 's target * ('s, 'a) entry * ('s, 'a) mayrec_symbol * bool -> ('s, 'a) ty_symbol type ('self, _, 'r) ty_rule = | TyStop : ('self, 'r, 'r) ty_rule | TyNext : ('self, 'a, 'r) ty_rule * ('self, 'b) ty_symbol -> ('self, 'b -> 'a, 'r) ty_rule | TyMark : int * bool * int * ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule type 'r gen_eval = Loc.t -> 'r env -> 'r let rec ty_eval : type s a. (s, a, Loc.t -> s) ty_rule -> s gen_eval -> s env -> a = function | TyStop -> fun f env loc -> f loc env | TyNext (rem, TyTerm _) -> fun f env _ -> ty_eval rem f env | TyNext (rem, TyNonTerm (_, _, _, false)) -> fun f env _ -> ty_eval rem f env | TyNext (rem, TyNonTerm (forpat, e, _, true)) -> fun f env v -> ty_eval rem f (push_item forpat e env v) | TyMark (n, b, p, rem) -> fun f env -> let heads, constrs = List.chop n env.constrs in let constrlists, constrs = if b then (* We rearrange constrs = c1..cn rem and constrlists = [d1..dr e1..ep] rem' into constrs = e1..ep rem and constrlists [c1..cn d1..dr] rem' *) let constrlist = List.hd env.constrlists in let constrlist, tail = List.chop (List.length constrlist - p) constrlist in (heads @ constrlist) :: List.tl env.constrlists, tail @ constrs else (* We rearrange constrs = c1..cn e1..ep rem into constrs = e1..ep rem and add a constr list [c1..cn] *) let constrlist, tail = List.chop (n - p) heads in constrlist :: env.constrlists, tail @ constrs in ty_eval rem f { env with constrs; constrlists; } type ('s, 'a, 'r) mayrec_rule = | MayRecRNo : ('s, Gramlib.Grammar.norec, 'a, 'r) Rule.t -> ('s, 'a, 'r) mayrec_rule | MayRecRMay : ('s, Gramlib.Grammar.mayrec, 'a, 'r) Rule.t -> ('s, 'a, 'r) mayrec_rule let rec ty_erase : type s a r. (s, a, r) ty_rule -> (s, a, r) mayrec_rule = function | TyStop -> MayRecRNo Rule.stop | TyMark (_, _, _, r) -> ty_erase r | TyNext (rem, TyTerm tok) -> begin match ty_erase rem with | MayRecRNo rem -> MayRecRMay (Rule.next rem (Symbol.token tok)) | MayRecRMay rem -> MayRecRMay (Rule.next rem (Symbol.token tok)) end | TyNext (rem, TyNonTerm (_, _, s, _)) -> begin match ty_erase rem, s with | MayRecRNo rem, MayRecNo s -> MayRecRMay (Rule.next rem s) | MayRecRNo rem, MayRecMay s -> MayRecRMay (Rule.next rem s) | MayRecRMay rem, MayRecNo s -> MayRecRMay (Rule.next rem s) | MayRecRMay rem, MayRecMay s -> MayRecRMay (Rule.next rem s) end type ('self, 'r) any_ty_rule = | AnyTyRule : ('self, 'act, Loc.t -> 'r) ty_rule -> ('self, 'r) any_ty_rule let make_ty_rule assoc from forpat prods = let rec make_ty_rule = function | [] -> AnyTyRule TyStop | GramConstrTerminal (kw,s) :: rem -> let AnyTyRule r = make_ty_rule rem in let TPattern tk = make_pattern (kw,s) in AnyTyRule (TyNext (r, TyTerm tk)) | GramConstrNonTerminal (e, var) :: rem -> let AnyTyRule r = make_ty_rule rem in let TTAny e = interp_entry forpat e in let s = symbol_of_entry assoc from e in let bind = match var with None -> false | Some _ -> true in AnyTyRule (TyNext (r, TyNonTerm (forpat, e, s, bind))) | GramConstrListMark (n, b, p) :: rem -> let AnyTyRule r = make_ty_rule rem in AnyTyRule (TyMark (n, b, p, r)) in make_ty_rule (List.rev prods) let target_to_bool : type r. r target -> bool = function | ForConstr -> false | ForPattern -> true let prepare_empty_levels forpat (where,(pos,p4assoc,name,reinit)) = let empty = match pos with | ReuseFirst -> Pcoq.Reuse (None, []) | ReuseLevel n -> Pcoq.Reuse (Some (constr_level n), []) | NewFirst -> Pcoq.Fresh (Gramlib.Gramext.First, [(name, p4assoc, [])]) | NewAfter n -> Pcoq.Fresh (Gramlib.Gramext.After (constr_level n), [(name, p4assoc, [])]) in match reinit with | None -> ExtendRule (target_entry where forpat, empty) | Some (assoc, pos) -> let pos = match pos with None -> Gramlib.Gramext.First | Some n -> Gramlib.Gramext.After (constr_level n) in let reinit = (assoc, pos) in ExtendRuleReinit (target_entry where forpat, reinit, empty) let different_levels (custom,opt_level) (custom',string_level) = match opt_level with | None -> true | Some level -> not (Notation.notation_entry_eq custom custom') || level <> int_of_string string_level let rec pure_sublevels' assoc from forpat level = function | [] -> [] | GramConstrNonTerminal (e,_) :: rem -> let rem = pure_sublevels' assoc from forpat level rem in let push where p rem = match symbol_of_target where p assoc from forpat with | MayRecNo sym -> (match Pcoq.level_of_nonterm sym with | None -> rem | Some i -> if different_levels (fst from,level) (where,i) then (where,int_of_string i) :: rem else rem) | _ -> rem in (match e with | ETProdPattern i -> push InConstrEntry (NumLevel i,InternalProd) rem | ETProdConstr (s,p) -> push s p rem | _ -> rem) | (GramConstrTerminal _ | GramConstrListMark _) :: rem -> pure_sublevels' assoc from forpat level rem let make_act : type r. r target -> _ -> r gen_eval = function | ForConstr -> fun notation loc env -> let env = (env.constrs, env.constrlists, env.binders, env.binderlists) in CAst.make ~loc @@ CNotation (None, notation, env) | ForPattern -> fun notation loc env -> let env = (env.constrs, env.constrlists) in CAst.make ~loc @@ CPatNotation (None, notation, env, []) let extend_constr state forpat ng = let custom,n,_ = ng.notgram_level in let assoc = ng.notgram_assoc in let (entry, level) = interp_constr_entry_key custom forpat n in let fold (accu, state) pt = let AnyTyRule r = make_ty_rule assoc (custom,n) forpat pt in let pure_sublevels = pure_sublevels' assoc (custom,n) forpat level pt in let isforpat = target_to_bool forpat in let needed_levels, state = register_empty_levels state isforpat pure_sublevels in let (pos,p4assoc,name,reinit), state = find_position state custom isforpat assoc level in let empty_rules = List.map (prepare_empty_levels forpat) needed_levels in let empty = { constrs = []; constrlists = []; binders = []; binderlists = [] } in let act = ty_eval r (make_act forpat ng.notgram_notation) empty in let rule = let r = match ty_erase r with | MayRecRNo symbs -> Pcoq.Production.make symbs act | MayRecRMay symbs -> Pcoq.Production.make symbs act in let rule = name, p4assoc, [r] in match pos with | NewFirst -> Pcoq.Fresh (Gramlib.Gramext.First, [rule]) | NewAfter n -> Pcoq.Fresh (Gramlib.Gramext.After (constr_level n), [rule]) | ReuseFirst -> Pcoq.Reuse (None, [r]) | ReuseLevel n -> Pcoq.Reuse (Some (constr_level n), [r]) in let r = match reinit with | None -> ExtendRule (entry, rule) | Some (assoc, pos) -> let pos = match pos with None -> Gramlib.Gramext.First | Some n -> Gramlib.Gramext.After (constr_level n) in let reinit = (assoc, pos) in ExtendRuleReinit (entry, reinit, rule) in (accu @ empty_rules @ [r], state) in List.fold_left fold ([], state) ng.notgram_prods let constr_levels = GramState.field () let is_disjunctive_pattern_rule ng = String.is_sub "( _ | " (snd ng.notgram_notation) 0 let warn_disj_pattern_notation = let open Pp in let pp ng = str "Use of " ++ Notation.pr_notation ng.notgram_notation ++ str " Notation is deprecated as it is inconsistent with pattern syntax." in CWarnings.create ~name:"disj-pattern-notation" ~category:"notation" ~default:CWarnings.Disabled pp let extend_constr_notation ng state = let levels = match GramState.get state constr_levels with | None -> String.Map.add "constr" default_constr_levels String.Map.empty | Some lev -> lev in (* Add the notation in constr *) let (r, levels) = extend_constr levels ForConstr ng in (* Add the notation in cases_pattern, unless it would disrupt *) (* parsing nested disjunctive patterns. *) let (r', levels) = if is_disjunctive_pattern_rule ng then begin warn_disj_pattern_notation ng; ([], levels) end else extend_constr levels ForPattern ng in let state = GramState.set state constr_levels levels in (r @ r', state) let constr_grammar : one_notation_grammar grammar_command = create_grammar_command "Notation" extend_constr_notation let extend_constr_grammar ntn = extend_grammar_command constr_grammar ntn coq-8.15.0/vernac/egramcoq.mli000066400000000000000000000022261417001151100161370ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit (** Add a term notation rule to the parsing system. *) val create_custom_entry : local:bool -> string -> unit val exists_custom_entry : string -> bool val locality_of_custom_entry : string -> bool coq-8.15.0/vernac/egramml.ml000066400000000000000000000070561417001151100156220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 's grammar_prod_item type 'a ty_arg = ('a -> raw_generic_argument) type ('self, 'tr, _, 'r) ty_rule = | TyStop : ('self, Gramlib.Grammar.norec, 'r, 'r) ty_rule | TyNext : ('self, _, 'a, 'r) ty_rule * ('self, _, 'b) Symbol.t * 'b ty_arg option -> ('self, Gramlib.Grammar.mayrec, 'b -> 'a, 'r) ty_rule type ('self, 'r) any_ty_rule = | AnyTyRule : ('self, _, 'act, Loc.t -> 'r) ty_rule -> ('self, 'r) any_ty_rule let rec ty_rule_of_gram = function | [] -> AnyTyRule TyStop | GramTerminal s :: rem -> let AnyTyRule rem = ty_rule_of_gram rem in let tok = Pcoq.Symbol.token (CLexer.terminal s) in let r = TyNext (rem, tok, None) in AnyTyRule r | GramNonTerminal (_, (t, tok)) :: rem -> let AnyTyRule rem = ty_rule_of_gram rem in let inj = Some (fun obj -> Genarg.in_gen t obj) in let r = TyNext (rem, tok, inj) in AnyTyRule r let rec ty_erase : type s tr a r. (s, tr, a, r) ty_rule -> (s, tr, a, r) Pcoq.Rule.t = function | TyStop -> Pcoq.Rule.stop | TyNext (rem, tok, _) -> Pcoq.Rule.next (ty_erase rem) tok type 'r gen_eval = Loc.t -> raw_generic_argument list -> 'r let rec ty_eval : type s tr a. (s, tr, a, Loc.t -> s) ty_rule -> s gen_eval -> a = function | TyStop -> fun f loc -> f loc [] | TyNext (rem, tok, None) -> fun f _ -> ty_eval rem f | TyNext (rem, tok, Some inj) -> fun f x -> let f loc args = f loc (inj x :: args) in ty_eval rem f let make_rule f prod = let AnyTyRule ty_rule = ty_rule_of_gram (List.rev prod) in let symb = ty_erase ty_rule in let f loc l = f loc (List.rev l) in let act = ty_eval ty_rule f in Pcoq.Production.make symb act let rec proj_symbol : type a b c. (a, b, c) ty_user_symbol -> (a, b, c) genarg_type = function | TUentry a -> ExtraArg a | TUentryl (a,l) -> ExtraArg a | TUopt(o) -> OptArg (proj_symbol o) | TUlist1 l -> ListArg (proj_symbol l) | TUlist1sep (l,_) -> ListArg (proj_symbol l) | TUlist0 l -> ListArg (proj_symbol l) | TUlist0sep (l,_) -> ListArg (proj_symbol l) (** Vernac grammar extensions *) let vernac_exts = ref [] let get_extend_vernac_rule (s, i) = try let find ((name, j), _) = String.equal name s && Int.equal i j in let (_, rules) = List.find find !vernac_exts in rules with | Failure _ -> raise Not_found let extend_vernac_command_grammar s nt gl = let nt = Option.default Pvernac.Vernac_.command nt in vernac_exts := (s,gl) :: !vernac_exts; let mkact loc l = VernacExtend (s, l) in let rules = [make_rule mkact gl] in if Pcoq.Entry.is_empty nt then (* Small hack to tolerate empty entries in VERNAC { ... } EXTEND *) grammar_extend nt (Pcoq.Fresh (Gramlib.Gramext.First, [None, None, rules])) else grammar_extend nt (Pcoq.Reuse (None, rules)) coq-8.15.0/vernac/egramml.mli000066400000000000000000000027761417001151100157770ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 's grammar_prod_item val extend_vernac_command_grammar : extend_name -> vernac_expr Pcoq.Entry.t option -> vernac_expr grammar_prod_item list -> unit val get_extend_vernac_rule : extend_name -> vernac_expr grammar_prod_item list val proj_symbol : ('a, 'b, 'c) Extend.ty_user_symbol -> ('a, 'b, 'c) Genarg.genarg_type (** Utility function reused in Egramcoq : *) val make_rule : (Loc.t -> Genarg.raw_generic_argument list -> 'a) -> 'a grammar_prod_item list -> 'a Pcoq.Production.t coq-8.15.0/vernac/future.ml000066400000000000000000000136121417001151100155030ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.strbrk("The value you are asking for ("^name^") is not ready yet. "^ "Please wait or pass "^ "the \"-async-proofs off\" option to CoqIDE to disable "^ "asynchronous script processing and don't pass \"-vio\" to "^ "coqc.")) let not_here_msg = ref (fun name -> Pp.strbrk("The value you are asking for ("^name^") is not available "^ "in this process. If you really need this, pass "^ "the \"-async-proofs off\" option to CoqIDE to disable "^ "asynchronous script processing and don't pass \"-vio\" to "^ "coqc.")) let customize_not_ready_msg f = not_ready_msg := f let customize_not_here_msg f = not_here_msg := f exception NotReady of string exception NotHere of string let _ = CErrors.register_handler (function | NotReady name -> Some (!not_ready_msg name) | NotHere name -> Some (!not_here_msg name) | _ -> None) type fix_exn = Stateid.exn_info option let eval_fix_exn f (e, info) = match f with | None -> (e, info) | Some { Stateid.id; valid } -> match Stateid.get info with | Some _ -> (e, info) | None -> let loc = Loc.get_loc info in let msg = CErrors.iprint (e, info) in let () = Feedback.(feedback ~id (Message (Error, loc, msg))) in (e, Stateid.add info ~valid id) module UUID = struct type t = int let invalid = 0 let fresh = let count = ref invalid in fun () -> incr count; !count let compare = compare let equal = (==) end module UUIDMap = Map.Make(UUID) module UUIDSet = Set.Make(UUID) type 'a assignment = [ `Val of 'a | `Exn of Exninfo.iexn | `Comp of (unit -> 'a)] (* Val is not necessarily a final state, so the computation restarts from the state stocked into Val *) and 'a comp = | Delegated of (Mutex.t * Condition.t) option | Closure of (unit -> 'a) | Val of 'a | Exn of Exninfo.iexn (* Invariant: this exception is always "fixed" as in fix_exn *) and 'a computation = | Ongoing of string * (UUID.t * fix_exn * 'a comp ref) CEphemeron.key let unnamed = "unnamed" let create ?(name=unnamed) ?(uuid=UUID.fresh ()) ~fix_exn x = Ongoing (name, CEphemeron.create (uuid, fix_exn, ref x)) let get x = match x with | Ongoing (name, x) -> try let uuid, fix, c = CEphemeron.get x in name, uuid, fix, c with CEphemeron.InvalidKey -> name, UUID.invalid, None, ref (Exn (NotHere name, Exninfo.null)) type 'a value = [ `Val of 'a | `Exn of Exninfo.iexn ] let is_over kx = let _, _, _, x = get kx in match !x with | Val _ | Exn _ -> true | Closure _ | Delegated _ -> false let is_exn kx = let _, _, _, x = get kx in match !x with | Exn _ -> true | Val _ | Closure _ | Delegated _ -> false let peek_val kx = let _, _, _, x = get kx in match !x with | Val v -> Some v | Exn _ | Closure _ | Delegated _ -> None let uuid kx = let _, id, _, _ = get kx in id let from_val v = create ~fix_exn:None (Val v) let create_delegate ?(blocking=true) ~name fix_exn = let sync = if blocking then Some (Mutex.create (), Condition.create ()) else None in let ck = create ~name ~fix_exn (Delegated sync) in let assignment = fun v -> let _, _, fix_exn, c = get ck in let sync = match !c with Delegated s -> s | _ -> assert false in begin match v with | `Val v -> c := Val v | `Exn e -> c := Exn (eval_fix_exn fix_exn e) | `Comp f -> c := Closure f end; let iter (lock, cond) = CThread.with_lock lock ~scope:(fun () -> Condition.broadcast cond) in Option.iter iter sync in ck, assignment (* TODO: get rid of try/catch to be stackless *) let rec compute ck : 'a value = let name, _, fix_exn, c = get ck in match !c with | Val x -> `Val x | Exn (e, info) -> `Exn (e, info) | Delegated None -> raise (NotReady name) | Delegated (Some (lock, cond)) -> CThread.with_lock lock ~scope:(fun () -> Condition.wait cond lock); compute ck | Closure f -> try let data = f () in c := Val data; `Val data with e -> let e = Exninfo.capture e in let e = eval_fix_exn fix_exn e in match e with | (NotReady _, _) -> `Exn e | _ -> c := Exn e; `Exn e let force x = match compute x with | `Val v -> v | `Exn e -> Exninfo.iraise e let chain ck f = let name, uuid, fix_exn, c = get ck in create ~uuid ~name ~fix_exn (match !c with | Closure _ | Delegated _ -> Closure (fun () -> f (force ck)) | Exn _ as x -> x | Val v -> Val (f v)) let create ~fix_exn f = create ~fix_exn (Closure f) let replace kx y = let _, _, _, x = get kx in match !x with | Exn _ -> x := Closure (fun () -> force y) | _ -> CErrors.anomaly (Pp.str "A computation can be replaced only if is_exn holds.") let chain x f = let y = chain x f in if is_over x then ignore(force y); y let print f kx = let open Pp in let name, uid, _, x = get kx in let uid = if UUID.equal uid UUID.invalid then str "[#:" ++ str name ++ str "]" else str "[" ++ int uid ++ str":" ++ str name ++ str "]" in match !x with | Delegated _ -> str "Delegated" ++ uid | Closure _ -> str "Closure" ++ uid | Val x -> str "PureVal" ++ uid ++ spc () ++ hov 0 (f x) | Exn (e, _) -> str "Exn" ++ uid ++ spc () ++ hov 0 (str (Printexc.to_string e)) coq-8.15.0/vernac/future.mli000066400000000000000000000072351417001151100156600ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t -> int val equal : t -> t -> bool end module UUIDMap : Map.S with type key = UUID.t module UUIDSet : Set.S with type elt = UUID.t exception NotReady of string type 'a computation type 'a value = [ `Val of 'a | `Exn of Exninfo.iexn ] type fix_exn = Stateid.exn_info option (* Build a computation, no snapshot of the global state is taken. If you need to grab a copy of the state start with from_here () and then chain. fix_exn is used to enrich any exception raised by forcing the computations or any computation that is chained after it. It is used by STM to attach errors to their corresponding states, and to communicate to the code catching the exception a valid state id. *) val create : fix_exn:fix_exn -> (unit -> 'a) -> 'a computation (* Usually from_val is used to create "fake" futures, to use the same API as if a real asynchronous computations was there. *) val from_val : 'a -> 'a computation (* Run remotely, returns the function to assign. If not blocking (the default) it raises NotReady if forced before the delegate assigns it. *) type 'a assignment = [ `Val of 'a | `Exn of Exninfo.iexn | `Comp of (unit -> 'a)] val create_delegate : ?blocking:bool -> name:string -> fix_exn -> 'a computation * ('a assignment -> unit) (* Given a computation that is_exn, replace it by another one *) val replace : 'a computation -> 'a computation -> unit (* Inspect a computation *) val is_over : 'a computation -> bool val is_exn : 'a computation -> bool val peek_val : 'a computation -> 'a option val uuid : 'a computation -> UUID.t (* [chain c f] chains computation [c] with [f]. * [chain] is eager, that is to say, it won't suspend the new computation * if the old one is_over (Exn or Val). *) val chain : 'a computation -> ('a -> 'b) -> 'b computation (* Forcing a computation *) val force : 'a computation -> 'a val compute : 'a computation -> 'a value (** Debug: print a computation given an inner printing function. *) val print : ('a -> Pp.t) -> 'a computation -> Pp.t val customize_not_ready_msg : (string -> Pp.t) -> unit val customize_not_here_msg : (string -> Pp.t) -> unit coq-8.15.0/vernac/g_proofs.mlg000066400000000000000000000140741417001151100161610ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.strbrk "The Focus command is deprecated; use bullets or focusing brackets instead" ) let warn_deprecated_focus_n n = CWarnings.create ~name:"deprecated-focus" ~category:"deprecated" (fun () -> Pp.(str "The Focus command is deprecated;" ++ spc () ++ str "use '" ++ int n ++ str ": {' instead") ) let warn_deprecated_unfocus = CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated" (fun () -> Pp.strbrk "The Unfocus command is deprecated") } (* Proof commands *) GRAMMAR EXTEND Gram GLOBAL: hint command; opt_hintbases: [ [ -> { [] } | ":"; l = LIST1 [id = IDENT -> { id } ] -> { l } ] ] ; command: TOP [ [ IDENT "Goal"; c = lconstr -> { VernacDefinition (Decls.(NoDischarge, Definition), ((CAst.make ~loc Names.Anonymous), None), ProveBody ([], c)) } | IDENT "Proof" -> { VernacProof (None,None) } | IDENT "Proof"; IDENT "using"; l = G_vernac.section_subset_expr -> { VernacProof (None,Some l) } | IDENT "Proof" ; IDENT "Mode" ; mn = string -> { VernacProofMode mn } | IDENT "Proof"; c = lconstr -> { VernacExactProof c } | IDENT "Abort" -> { VernacAbort None } | IDENT "Abort"; IDENT "All" -> { VernacAbortAll } | IDENT "Abort"; id = identref -> { VernacAbort (Some id) } | IDENT "Admitted" -> { VernacEndProof Admitted } | IDENT "Qed" -> { VernacEndProof (Proved (Opaque,None)) } | IDENT "Save"; id = identref -> { VernacEndProof (Proved (Opaque, Some id)) } | IDENT "Defined" -> { VernacEndProof (Proved (Transparent,None)) } | IDENT "Defined"; id=identref -> { VernacEndProof (Proved (Transparent,Some id)) } | IDENT "Restart" -> { VernacRestart } | IDENT "Undo" -> { VernacUndo 1 } | IDENT "Undo"; n = natural -> { VernacUndo n } | IDENT "Undo"; IDENT "To"; n = natural -> { VernacUndoTo n } | IDENT "Focus" -> { warn_deprecated_focus ~loc (); VernacFocus None } | IDENT "Focus"; n = natural -> { warn_deprecated_focus_n n ~loc (); VernacFocus (Some n) } | IDENT "Unfocus" -> { warn_deprecated_unfocus ~loc (); VernacUnfocus } | IDENT "Unfocused" -> { VernacUnfocused } | IDENT "Show" -> { VernacShow (ShowGoal OpenSubgoals) } | IDENT "Show"; n = natural -> { VernacShow (ShowGoal (NthGoal n)) } | IDENT "Show"; id = ident -> { VernacShow (ShowGoal (GoalId id)) } | IDENT "Show"; IDENT "Existentials" -> { VernacShow ShowExistentials } | IDENT "Show"; IDENT "Universes" -> { VernacShow ShowUniverses } | IDENT "Show"; IDENT "Conjectures" -> { VernacShow ShowProofNames } | IDENT "Show"; IDENT "Proof" -> { VernacShow ShowProof } | IDENT "Show"; IDENT "Intro" -> { VernacShow (ShowIntros false) } | IDENT "Show"; IDENT "Intros" -> { VernacShow (ShowIntros true) } | IDENT "Show"; IDENT "Match"; id = reference -> { VernacShow (ShowMatch id) } | IDENT "Guarded" -> { VernacCheckGuard } (* Hints for Auto and EAuto *) | IDENT "Create"; IDENT "HintDb" ; id = IDENT ; b = [ IDENT "discriminated" -> { true } | -> { false } ] -> { VernacCreateHintDb (id, b) } | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases -> { VernacRemoveHints (dbnames, ids) } | IDENT "Hint"; h = hint; dbnames = opt_hintbases -> { VernacHints (dbnames, h) } ] ]; reference_or_constr: [ [ r = global -> { HintsReference r } | c = constr -> { HintsConstr c } ] ] ; hint: [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info -> { HintsResolve (List.map (fun x -> (info, true, x)) lc) } | IDENT "Resolve"; "->"; lc = LIST1 global; n = OPT natural -> { HintsResolveIFF (true, lc, n) } | IDENT "Resolve"; "<-"; lc = LIST1 global; n = OPT natural -> { HintsResolveIFF (false, lc, n) } | IDENT "Immediate"; lc = LIST1 reference_or_constr -> { HintsImmediate lc } | IDENT "Variables"; IDENT "Transparent" -> { HintsTransparency (HintsVariables, true) } | IDENT "Variables"; IDENT "Opaque" -> { HintsTransparency (HintsVariables, false) } | IDENT "Constants"; IDENT "Transparent" -> { HintsTransparency (HintsConstants, true) } | IDENT "Constants"; IDENT "Opaque" -> { HintsTransparency (HintsConstants, false) } | IDENT "Transparent"; lc = LIST1 global -> { HintsTransparency (HintsReferences lc, true) } | IDENT "Opaque"; lc = LIST1 global -> { HintsTransparency (HintsReferences lc, false) } | IDENT "Mode"; l = global; m = mode -> { HintsMode (l, m) } | IDENT "Unfold"; lqid = LIST1 global -> { HintsUnfold lqid } | IDENT "Constructors"; lc = LIST1 global -> { HintsConstructors lc } ] ] ; constr_body: [ [ ":="; c = lconstr -> { c } | ":"; t = lconstr; ":="; c = lconstr -> { CAst.make ~loc @@ CCast(c,C.DEFAULTcast, t) } ] ] ; mode: [ [ l = LIST1 [ "+" -> { ModeInput } | "!" -> { ModeNoHeadEvar } | "-" -> { ModeOutput } ] -> { l } ] ] ; END coq-8.15.0/vernac/g_vernac.mlg000066400000000000000000001424631417001151100161330ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Dash n | '+' -> Plus n | '*' -> Star n | _ -> assert false (* For now we just keep the top-level location of the whole vernacular, that is to say, including attributes and control flags; this is not very convenient for advanced clients tho, so in the future it'd be cool to actually locate the attributes and control flags individually too. *) let add_control_flag ~loc ~flag { CAst.v = cmd } = CAst.make ~loc { cmd with control = flag :: cmd.control } let test_hash_ident = let open Pcoq.Lookahead in to_entry "test_hash_ident" begin lk_kw "#" >> lk_ident >> check_no_space end let test_id_colon = let open Pcoq.Lookahead in to_entry "test_id_colon" begin lk_ident >> lk_kw ":" end } GRAMMAR EXTEND Gram GLOBAL: vernac_control quoted_attributes gallina_ext noedit_mode subprf; vernac_control: FIRST [ [ IDENT "Time"; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlTime false) c } | IDENT "Redirect"; s = ne_string; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlRedirect s) c } | IDENT "Timeout"; n = natural; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlTimeout n) c } | IDENT "Fail"; c = vernac_control -> { add_control_flag ~loc ~flag:ControlFail c } | IDENT "Succeed"; c = vernac_control -> { add_control_flag ~loc ~flag:ControlSucceed c } | v = decorated_vernac -> { let (attrs, expr) = v in CAst.make ~loc { control = []; attrs; expr = expr } } ] ] ; decorated_vernac: [ [ a = LIST0 quoted_attributes ; fv = vernac -> { let (f, v) = fv in (List.append (List.flatten a) f, v) } ] ] ; quoted_attributes: [ [ "#[" ; a = attribute_list ; "]" -> { a } ] ] ; attribute_list: [ [ a = LIST0 attribute SEP "," -> { a } ] ] ; attribute: [ [ k = ident ; v = attr_value -> { CAst.make ~loc (Names.Id.to_string k, v) } (* Required because "ident" is declared a keyword when loading Ltac. *) | IDENT "using" ; v = attr_value -> { CAst.make ~loc ("using", v) } ] ] ; attr_value: [ [ "=" ; v = string -> { VernacFlagLeaf (FlagString v) } | "=" ; v = IDENT -> { VernacFlagLeaf (FlagIdent v) } | "(" ; a = attribute_list ; ")" -> { VernacFlagList a } | -> { VernacFlagEmpty } ] ] ; legacy_attr: [ [ IDENT "Local" -> { CAst.make ~loc ("local", VernacFlagEmpty) } | IDENT "Global" -> { CAst.make ~loc ("global", VernacFlagEmpty) } | IDENT "Polymorphic" -> { Attributes.vernac_polymorphic_flag (Some loc) } | IDENT "Monomorphic" -> { Attributes.vernac_monomorphic_flag (Some loc) } | IDENT "Cumulative" -> { CAst.make ~loc ("universes", VernacFlagList [CAst.make ~loc ("cumulative", VernacFlagEmpty)]) } | IDENT "NonCumulative" -> { CAst.make ~loc ("universes", VernacFlagList [CAst.make ~loc ("cumulative", VernacFlagLeaf (FlagIdent "no"))]) } | IDENT "Private" -> { CAst.make ~loc ("private", VernacFlagList [CAst.make ~loc ("matching", VernacFlagEmpty)]) } | IDENT "Program" -> { CAst.make ~loc ("program", VernacFlagEmpty) } ] ] ; vernac: [ [ attrs = LIST0 legacy_attr; v = vernac_aux -> { (attrs, v) } ] ] ; vernac_aux: (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *) (* "." is still in the stream and discard_to_dot works correctly *) [ [ g = gallina; "." -> { g } | g = gallina_ext; "." -> { g } | c = command; "." -> { c } | c = syntax; "." -> { c } | c = subprf -> { c } ] ] ; vernac_aux: LAST [ [ prfcom = command_entry -> { prfcom } ] ] ; noedit_mode: [ [ c = query_command -> { c None } ] ] ; subprf: [ [ s = BULLET -> { VernacBullet (make_bullet s) } | "{" -> { VernacSubproof None } | "}" -> { VernacEndSubproof } ] ] ; END { let warn_plural_command = CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd)) let test_plural_form loc kwd = function | [(_,([_],_))] -> warn_plural_command ~loc kwd | _ -> () let test_plural_form_types loc kwd = function | [([_],_)] -> warn_plural_command ~loc kwd | _ -> () let lname_of_lident : lident -> lname = CAst.map (fun s -> Name s) let name_of_ident_decl : ident_decl -> name_decl = on_fst lname_of_lident let test_variance_ident = let open Pcoq.Lookahead in to_entry "test_variance_ident" begin lk_kws ["=";"+";"*"] >> lk_ident end } (* Gallina declarations *) GRAMMAR EXTEND Gram GLOBAL: gallina gallina_ext thm_token def_token assumption_token def_body of_type record_field decl_notations fix_definition ident_decl univ_decl inductive_definition; gallina: (* Definition, Theorem, Variable, Axiom, ... *) [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr; l = LIST0 [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr -> { (id,(bl,c)) } ] -> { VernacStartTheoremProof (thm, (id,(bl,c))::l) } | stre = assumption_token; nl = inline; bl = assum_list -> { VernacAssumption (stre, nl, bl) } | tk = assumptions_token; nl = inline; bl = assum_list -> { let (kwd,stre) = tk in test_plural_form loc kwd bl; VernacAssumption (stre, nl, bl) } | d = def_token; id = ident_decl; b = def_body -> { VernacDefinition (d, name_of_ident_decl id, b) } | IDENT "Let"; id = ident_decl; b = def_body -> { VernacDefinition ((DoDischarge, Let), name_of_ident_decl id, b) } (* Gallina inductive declarations *) | f = finite_token; indl = LIST1 inductive_definition SEP "with" -> { VernacInductive (f, indl) } | "Fixpoint"; recs = LIST1 fix_definition SEP "with" -> { VernacFixpoint (NoDischarge, recs) } | IDENT "Let"; "Fixpoint"; recs = LIST1 fix_definition SEP "with" -> { VernacFixpoint (DoDischarge, recs) } | "CoFixpoint"; corecs = LIST1 cofix_definition SEP "with" -> { VernacCoFixpoint (NoDischarge, corecs) } | IDENT "Let"; "CoFixpoint"; corecs = LIST1 cofix_definition SEP "with" -> { VernacCoFixpoint (DoDischarge, corecs) } | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> { VernacScheme l } | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from"; l = LIST1 identref SEP "," -> { VernacCombinedScheme (id, l) } | IDENT "Register"; g = global; "as"; quid = qualid -> { VernacRegister(g, RegisterCoqlib quid) } | IDENT "Register"; IDENT "Inline"; g = global -> { VernacRegister(g, RegisterInline) } | IDENT "Primitive"; id = ident_decl; typopt = OPT [ ":"; typ = lconstr -> { typ } ]; ":="; r = register_token -> { VernacPrimitive(id, r, typopt) } | IDENT "Universe"; l = LIST1 identref -> { VernacUniverse l } | IDENT "Universes"; l = LIST1 identref -> { VernacUniverse l } | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> { VernacConstraint l } ] ] ; register_token: [ [ test_hash_ident; "#"; r = IDENT -> { CPrimitives.parse_op_or_type ~loc r } ] ] ; thm_token: [ [ "Theorem" -> { Theorem } | IDENT "Lemma" -> { Lemma } | IDENT "Fact" -> { Fact } | IDENT "Remark" -> { Remark } | IDENT "Corollary" -> { Corollary } | IDENT "Proposition" -> { Proposition } | IDENT "Property" -> { Property } ] ] ; def_token: [ [ "Definition" -> { (NoDischarge,Definition) } | IDENT "Example" -> { (NoDischarge,Example) } | IDENT "SubClass" -> { (NoDischarge,SubClass) } ] ] ; assumption_token: [ [ "Hypothesis" -> { (DoDischarge, Logical) } | "Variable" -> { (DoDischarge, Definitional) } | "Axiom" -> { (NoDischarge, Logical) } | "Parameter" -> { (NoDischarge, Definitional) } | IDENT "Conjecture" -> { (NoDischarge, Conjectural) } ] ] ; assumptions_token: [ [ IDENT "Hypotheses" -> { ("Hypotheses", (DoDischarge, Logical)) } | IDENT "Variables" -> { ("Variables", (DoDischarge, Definitional)) } | IDENT "Axioms" -> { ("Axioms", (NoDischarge, Logical)) } | IDENT "Parameters" -> { ("Parameters", (NoDischarge, Definitional)) } | IDENT "Conjectures" -> { ("Conjectures", (NoDischarge, Conjectural)) } ] ] ; inline: [ [ IDENT "Inline"; "("; i = natural; ")" -> { InlineAt i } | IDENT "Inline" -> { DefaultInline } | -> { NoInline } ] ] ; univ_constraint: [ [ l = universe_name; ord = [ "<" -> { Univ.Lt } | "=" -> { Univ.Eq } | "<=" -> { Univ.Le } ]; r = universe_name -> { (l, ord, r) } ] ] ; univ_decl: [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> { true } | -> { false } ]; cs = [ "|"; l' = LIST0 univ_constraint SEP ","; ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) } | ext = [ "}" -> { true } | bar_cbrace -> { false } ] -> { ([], ext) } ] -> { let open UState in { univdecl_instance = l; univdecl_extensible_instance = ext; univdecl_constraints = fst cs; univdecl_extensible_constraints = snd cs } } ] ] ; variance: [ [ "+" -> { Univ.Variance.Covariant } | "=" -> { Univ.Variance.Invariant } | "*" -> { Univ.Variance.Irrelevant } ] ] ; variance_identref: [ [ id = identref -> { (id, None) } | test_variance_ident; v = variance; id = identref -> { (id, Some v) } (* We need this test to help the parser avoid the conflict between "+" before ident (covariance) and trailing "+" (extra univs allowed) *) ] ] ; cumul_univ_decl: [ [ "@{" ; l = LIST0 variance_identref; ext = [ "+" -> { true } | -> { false } ]; cs = [ "|"; l' = LIST0 univ_constraint SEP ","; ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) } | ext = [ "}" -> { true } | bar_cbrace -> { false } ] -> { ([], ext) } ] -> { let open UState in { univdecl_instance = l; univdecl_extensible_instance = ext; univdecl_constraints = fst cs; univdecl_extensible_constraints = snd cs } } ] ] ; ident_decl: [ [ i = identref; l = OPT univ_decl -> { (i, l) } ] ] ; cumul_ident_decl: [ [ i = identref; l = OPT cumul_univ_decl -> { (i, l) } ] ] ; finite_token: [ [ IDENT "Inductive" -> { Inductive_kw } | IDENT "CoInductive" -> { CoInductive } | IDENT "Variant" -> { Variant } | IDENT "Record" -> { Record } | IDENT "Structure" -> { Structure } | IDENT "Class" -> { Class true } ] ] ; (* Simple definitions *) def_body: [ [ bl = binders; ":="; red = reduce; c = lconstr -> { match c.CAst.v with | CCast(c, C.DEFAULTcast, t) -> DefineBody (bl, red, c, Some t) | _ -> DefineBody (bl, red, c, None) } | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr -> { DefineBody (bl, red, c, Some t) } | bl = binders; ":"; t = lconstr -> { ProveBody (bl, t) } ] ] ; reduce: [ [ IDENT "Eval"; r = red_expr; "in" -> { Some r } | -> { None } ] ] ; decl_notation: [ [ ntn = ne_lstring; ":="; c = constr; modl = syntax_modifiers; scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { { decl_ntn_string = ntn; decl_ntn_interp = c; decl_ntn_scope = scopt; decl_ntn_modifiers = modl; } } ] ] ; decl_sep: [ [ IDENT "and" -> { () } ] ] ; decl_notations: [ [ "where"; l = LIST1 decl_notation SEP decl_sep -> { l } | -> { [] } ] ] ; (* Inductives and records *) opt_constructors_or_fields: [ [ ":="; lc = constructors_or_record -> { lc } | -> { RecordDecl (None, []) } ] ] ; inductive_definition: [ [ oc = opt_coercion; id = cumul_ident_decl; indpar = binders; extrapar = OPT [ "|"; p = binders -> { p } ]; c = OPT [ ":"; c = lconstr -> { c } ]; lc=opt_constructors_or_fields; ntn = decl_notations -> { (((oc,id),(indpar,extrapar),c,lc),ntn) } ] ] ; constructors_or_record: [ [ "|"; l = LIST1 constructor SEP "|" -> { Constructors l } | id = identref ; c = constructor_type; "|"; l = LIST1 constructor SEP "|" -> { Constructors ((c id)::l) } | id = identref ; c = constructor_type -> { Constructors [ c id ] } | cstr = identref; "{"; fs = record_fields; "}" -> { RecordDecl (Some cstr,fs) } | "{";fs = record_fields; "}" -> { RecordDecl (None,fs) } | -> { Constructors [] } ] ] ; (* csort: [ [ s = sort -> CSort (loc,s) ] ] ; *) opt_coercion: [ [ ">" -> { true } | -> { false } ] ] ; (* (co)-fixpoints *) fix_definition: [ [ id_decl = ident_decl; bl = binders_fixannot; rtype = type_cstr; body_def = OPT [":="; def = lconstr -> { def } ]; notations = decl_notations -> { let binders, rec_order = bl in {fname = fst id_decl; univs = snd id_decl; rec_order; binders; rtype; body_def; notations} } ] ] ; cofix_definition: [ [ id_decl = ident_decl; binders = binders; rtype = type_cstr; body_def = OPT [":="; def = lconstr -> { def }]; notations = decl_notations -> { {fname = fst id_decl; univs = snd id_decl; rec_order = (); binders; rtype; body_def; notations} } ]] ; (* Inductive schemes *) scheme: [ [ kind = scheme_kind -> { (None,kind) } | id = identref; ":="; kind = scheme_kind -> { (Some id,kind) } ] ] ; scheme_kind: [ [ IDENT "Induction"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { InductionScheme(true,ind,s) } | IDENT "Minimality"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { InductionScheme(false,ind,s) } | IDENT "Elimination"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { CaseScheme(true,ind,s) } | IDENT "Case"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { CaseScheme(false,ind,s) } | IDENT "Equality"; "for" ; ind = smart_global -> { EqualityScheme(ind) } ] ] ; (* Various Binders *) (* (* ... without coercions *) binder_nodef: [ [ b = binder_let -> (match b with CLocalAssum(l,ty) -> (l,ty) | CLocalDef _ -> user_err ~loc (Pp.str"defined binder not allowed here.")) ] ] ; *) (* ... with coercions *) record_field: [ [ attr = LIST0 quoted_attributes ; bd = record_binder; rf_priority = OPT [ "|"; n = natural -> { n } ]; rf_notation = decl_notations -> { let rf_canonical = attr |> List.flatten |> parse canonical_field in let rf_subclass, rf_decl = bd in rf_decl, { rf_subclass ; rf_priority ; rf_notation ; rf_canonical } } ] ] ; record_fields: [ [ f = record_field; ";"; fs = record_fields -> { f :: fs } | f = record_field -> { [f] } | -> { [] } ] ] ; field_body: [ [ l = binders; oc = of_type; t = lconstr -> { fun id -> (oc,AssumExpr (id,l,t)) } | l = binders; oc = of_type; t = lconstr; ":="; b = lconstr -> { fun id -> (oc,DefExpr (id,l,b,Some t)) } | l = binders; ":="; b = lconstr -> { fun id -> (* Why are we dropping cast info here? *) match b.CAst.v with | CCast(b', _, t) -> (NoInstance,DefExpr(id,l,b',Some t)) | _ -> (NoInstance,DefExpr(id,l,b,None)) } ] ] ; record_binder: [ [ id = name -> { (NoInstance,AssumExpr(id, [], CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) } | id = name; f = field_body -> { f id } ] ] ; assum_list: [ [ bl = LIST1 assum_coe -> { bl } | b = assumpt -> { [b] } ] ] ; assum_coe: [ [ "("; a = assumpt; ")" -> { a } ] ] ; assumpt: [ [ idl = LIST1 ident_decl; oc = of_type; c = lconstr -> { (oc <> NoInstance,(idl,c)) } ] ] ; constructor_type: [[ l = binders; t= [ coe = of_type; c = lconstr -> { fun l id -> (coe <> NoInstance,(id,mkProdCN ~loc l c)) } | -> { fun l id -> (false,(id,mkProdCN ~loc l (CAst.make ~loc @@ CHole (None, IntroAnonymous, None)))) } ] -> { t l } ]] ; constructor: [ [ id = identref; c=constructor_type -> { c id } ] ] ; of_type: [ [ ":>" -> { BackInstance } | ":"; ">" -> { BackInstance } | ":" -> { NoInstance } ] ] ; END { let test_only_starredidentrefs = let open Pcoq.Lookahead in to_entry "test_only_starredidentrefs" begin lk_list (lk_ident <+> lk_kws ["Type"; "*"]) >> (lk_kws [".";")"]) end let starredidentreflist_to_expr l = match l with | [] -> SsEmpty | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x let warn_deprecated_include_type = CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated" (fun () -> strbrk "Include Type is deprecated; use Include instead") let warn_deprecated_as_ident_kind = CWarnings.create ~name:"deprecated-as-ident-kind" ~category:"deprecated" (fun () -> strbrk "grammar kind \"as ident\" no longer accepts \"_\"; use \"as name\" instead to accept \"_\", too, or silence the warning if you actually intended to accept only identifiers.") } (* Modules and Sections *) GRAMMAR EXTEND Gram GLOBAL: gallina_ext module_expr module_type section_subset_expr; gallina_ext: [ [ (* Interactive module declaration *) IDENT "Module"; export = export_token; id = identref; bl = LIST0 module_binder; sign = of_module_type; body = is_module_expr -> { VernacDefineModule (export, id, bl, sign, body) } | IDENT "Module"; "Type"; id = identref; bl = LIST0 module_binder; sign = check_module_types; body = is_module_type -> { VernacDeclareModuleType (id, bl, sign, body) } | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref; bl = LIST0 module_binder; ":"; mty = module_type_inl -> { VernacDeclareModule (export, id, bl, mty) } (* Section beginning *) | IDENT "Section"; id = identref -> { VernacBeginSection id } (* This end a Section a Module or a Module Type *) | IDENT "End"; id = identref -> { VernacEndSegment id } (* Naming a set of section hyps *) | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr -> { VernacNameSectionHypSet (id, expr) } (* Requiring an already compiled module *) | IDENT "Require"; export = export_token; qidl = LIST1 global -> { VernacRequire (None, export, qidl) } | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token ; qidl = LIST1 global -> { VernacRequire (Some ns, export, qidl) } | IDENT "Import"; cats = OPT import_categories; qidl = LIST1 filtered_import -> { VernacImport (false,cats,qidl) } | IDENT "Export"; cats = OPT import_categories; qidl = LIST1 filtered_import -> { VernacImport (true,cats,qidl) } | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr -> { VernacInclude(e::l) } | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type -> { warn_deprecated_include_type ~loc (); VernacInclude(e::l) } ] ] ; import_categories: [ [ negative = OPT "-"; "("; cats = LIST1 qualid SEP ","; ")" -> { let cats = List.map (fun cat -> CAst.make ?loc:cat.CAst.loc (Libnames.string_of_qualid cat)) cats in { negative=Option.has_some negative; import_cats = cats } } ] ] ; filtered_import: [ [ m = global -> { (m, ImportAll) } | m = global; "("; ns = LIST1 one_import_filter_name SEP ","; ")" -> { (m, ImportNames ns) } ] ] ; one_import_filter_name: [ [ n = global; etc = OPT [ "("; ".."; ")" -> { () } ] -> { n, Option.has_some etc } ] ] ; export_token: [ [ IDENT "Import" -> { Some false } | IDENT "Export" -> { Some true } | -> { None } ] ] ; ext_module_type: [ [ "<+"; mty = module_type_inl -> { mty } ] ] ; ext_module_expr: [ [ "<+"; mexpr = module_expr_inl -> { mexpr } ] ] ; check_module_type: [ [ "<:"; mty = module_type_inl -> { mty } ] ] ; check_module_types: [ [ mtys = LIST0 check_module_type -> { mtys } ] ] ; of_module_type: [ [ ":"; mty = module_type_inl -> { Enforce mty } | mtys = check_module_types -> { Check mtys } ] ] ; is_module_type: [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> { (mty::l) } | -> { [] } ] ] ; is_module_expr: [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> { (mexpr::l) } | -> { [] } ] ] ; functor_app_annot: [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = natural; "]" -> { InlineAt i } | "["; IDENT "no"; IDENT "inline"; "]" -> { NoInline } | -> { DefaultInline } ] ] ; module_expr_inl: [ [ "!"; me = module_expr -> { (me,NoInline) } | me = module_expr; a = functor_app_annot -> { (me,a) } ] ] ; module_type_inl: [ [ "!"; me = module_type -> { (me,NoInline) } | me = module_type; a = functor_app_annot -> { (me,a) } ] ] ; (* Module binder *) module_binder: [ [ "("; export = export_token; idl = LIST1 identref; ":"; mty = module_type_inl; ")" -> { (export,idl,mty) } ] ] ; (* Module expressions *) module_expr: [ [ me = module_expr_atom -> { me } | me1 = module_expr; me2 = module_expr_atom -> { CAst.make ~loc @@ CMapply (me1,me2) } ] ] ; module_expr_atom: [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; me = module_expr; ")" -> { me } ] ] ; with_declaration: [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr -> { CWith_Definition (fqid,udecl,c) } | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid -> { CWith_Module (fqid,qid) } ] ] ; module_type: [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; mt = module_type; ")" -> { mt } | mty = module_type; me = module_expr_atom -> { CAst.make ~loc @@ CMapply (mty,me) } | mty = module_type; "with"; decl = with_declaration -> { CAst.make ~loc @@ CMwith (mty,decl) } ] ] ; (* Proof using *) section_subset_expr: [ [ test_only_starredidentrefs; l = LIST0 starredidentref -> { starredidentreflist_to_expr l } | e = ssexpr -> { e } ]] ; starredidentref: [ [ i = identref -> { SsSingl i } | i = identref; "*" -> { SsFwdClose(SsSingl i) } | "Type" -> { SsType } | "Type"; "*" -> { SsFwdClose SsType } ]] ; ssexpr: [ "35" [ "-"; e = ssexpr -> { SsCompl e } ] | "50" [ e1 = ssexpr; "-"; e2 = ssexpr-> { SsSubstr(e1,e2) } | e1 = ssexpr; "+"; e2 = ssexpr-> { SsUnion(e1,e2) } ] | "0" [ i = starredidentref -> { i } | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"-> { starredidentreflist_to_expr l } | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" -> { SsFwdClose(starredidentreflist_to_expr l) } | "("; e = ssexpr; ")"-> { e } | "("; e = ssexpr; ")"; "*" -> { SsFwdClose e } ] ] ; END (* Extensions: implicits, coercions, etc. *) GRAMMAR EXTEND Gram GLOBAL: gallina_ext hint_info scope_delimiter; gallina_ext: TOP [ [ (* Transparent and Opaque *) IDENT "Transparent"; l = LIST1 smart_global -> { VernacSetOpacity (Conv_oracle.transparent, l) } | IDENT "Opaque"; l = LIST1 smart_global -> { VernacSetOpacity (Conv_oracle.Opaque, l) } | IDENT "Strategy"; l = LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> { (v,q) } ] -> { VernacSetStrategy l } (* Canonical structure *) | IDENT "Canonical"; OPT [ IDENT "Structure" -> {()} ]; qid = global; ud = OPT [ u = OPT univ_decl; d = def_body -> { (u,d) } ] -> { match ud with | None -> VernacCanonical CAst.(make ?loc:qid.CAst.loc @@ AN qid) | Some (u,d) -> let s = coerce_reference_to_id qid in VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make ?loc:qid.CAst.loc (Name s)),u),d) } | IDENT "Canonical"; OPT [ IDENT "Structure" -> {()} ]; ntn = by_notation -> { VernacCanonical CAst.(make ~loc @@ ByNotation ntn) } (* Coercions *) | IDENT "Coercion"; qid = global; u = OPT univ_decl; d = def_body -> { let s = coerce_reference_to_id qid in VernacDefinition ((NoDischarge,Coercion),((CAst.make ?loc:qid.CAst.loc (Name s)),u),d) } | IDENT "Identity"; IDENT "Coercion"; f = identref; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacIdentityCoercion (f, s, t) } | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacCoercion (CAst.make ~loc @@ AN qid, s, t) } | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacCoercion (CAst.make ~loc @@ ByNotation ntn, s, t) } | IDENT "Context"; c = LIST1 binder -> { VernacContext (List.flatten c) } | IDENT "Instance"; namesup = instance_name; ":"; t = term LEVEL "200"; info = hint_info ; props = [ ":="; "{"; r = record_declaration; "}" -> { Some (true,r) } | ":="; c = lconstr -> { Some (false,c) } | -> { None } ] -> { VernacInstance (fst namesup,snd namesup,t,props,info) } | IDENT "Existing"; IDENT "Instance"; id = global; info = hint_info -> { VernacExistingInstance [id, info] } | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global; pri = OPT [ "|"; i = natural -> { i } ] -> { let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in let insts = List.map (fun i -> (i, info)) ids in VernacExistingInstance insts } | IDENT "Existing"; IDENT "Class"; is = global -> { VernacExistingClass is } (* Arguments *) | IDENT "Arguments"; qid = smart_global; args = LIST0 arg_specs; more_implicits = OPT [ ","; impl = LIST1 [ impl = LIST0 implicits_alt -> { List.flatten impl } ] SEP "," -> { impl } ]; mods = OPT [ ":"; l = LIST1 args_modifier SEP "," -> { l } ] -> { let mods = match mods with None -> [] | Some l -> List.flatten l in let more_implicits = Option.default [] more_implicits in VernacArguments (qid, List.flatten args, more_implicits, mods) } | IDENT "Implicit"; "Type"; bl = reserv_list -> { VernacReserve bl } | IDENT "Implicit"; IDENT "Types"; bl = reserv_list -> { test_plural_form_types loc "Implicit Types" bl; VernacReserve bl } | IDENT "Generalizable"; gen = [IDENT "All"; IDENT "Variables" -> { Some [] } | IDENT "No"; IDENT "Variables" -> { None } | ["Variable" -> { () } | IDENT "Variables" -> { () } ]; idl = LIST1 identref -> { Some idl } ] -> { VernacGeneralizable gen } ] ] ; args_modifier: [ [ IDENT "simpl"; IDENT "nomatch" -> { [`ReductionDontExposeCase] } | IDENT "simpl"; IDENT "never" -> { [`ReductionNeverUnfold] } | IDENT "default"; IDENT "implicits" -> { [`DefaultImplicits] } | IDENT "clear"; IDENT "implicits" -> { [`ClearImplicits] } | IDENT "clear"; IDENT "scopes" -> { [`ClearScopes] } | IDENT "clear"; IDENT "bidirectionality"; IDENT "hint" -> { [`ClearBidiHint] } | IDENT "rename" -> { [`Rename] } | IDENT "assert" -> { [`Assert] } | IDENT "extra"; IDENT "scopes" -> { [`ExtraScopes] } | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" -> { [`ClearImplicits; `ClearScopes] } | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" -> { [`ClearImplicits; `ClearScopes] } ] ] ; scope_delimiter: [ [ "%"; key = IDENT -> { key } ] ] ; argument_spec: [ [ b = OPT "!"; id = name ; s = OPT scope_delimiter -> { id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc x) s } ] ]; (* List of arguments implicit status, scope, modifiers *) arg_specs: [ [ item = argument_spec -> { let name, recarg_like, notation_scope = item in [RealArg { name=name; recarg_like=recarg_like; notation_scope=notation_scope; implicit_status = Explicit}] } | "/" -> { [VolatileArg] } | "&" -> { [BidiArg] } | "("; items = LIST1 argument_spec; ")"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err ~loc Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = Explicit}) items } | "["; items = LIST1 argument_spec; "]"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err ~loc Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = NonMaxImplicit}) items } | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err ~loc Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = MaxImplicit}) items } ] ]; (* Same as [arg_specs], but with only implicit status and names *) implicits_alt: [ [ name = name -> { [(name.CAst.v, Explicit)] } | "["; items = LIST1 name; "]" -> { List.map (fun name -> (name.CAst.v, NonMaxImplicit)) items } | "{"; items = LIST1 name; "}" -> { List.map (fun name -> (name.CAst.v, MaxImplicit)) items } ] ]; instance_name: [ [ name = ident_decl; bl = binders -> { (CAst.map (fun id -> Name id) (fst name), snd name), bl } | -> { ((CAst.make ~loc Anonymous), None), [] } ] ] ; hint_info: [ [ "|"; i = OPT natural; pat = OPT constr_pattern -> { { Typeclasses.hint_priority = i; hint_pattern = pat } } | -> { { Typeclasses.hint_priority = None; hint_pattern = None } } ] ] ; reserv_list: [ [ bl = LIST1 reserv_tuple -> { bl } | b = simple_reserv -> { [b] } ] ] ; reserv_tuple: [ [ "("; a = simple_reserv; ")" -> { a } ] ] ; simple_reserv: [ [ idl = LIST1 identref; ":"; c = lconstr -> { (idl,c) } ] ] ; END GRAMMAR EXTEND Gram GLOBAL: command query_command class_rawexpr gallina_ext search_query search_queries; gallina_ext: TOP [ [ IDENT "Export"; "Set"; table = setting_name; v = option_setting -> { VernacSetOption (true, table, v) } | IDENT "Export"; IDENT "Unset"; table = setting_name -> { VernacSetOption (true, table, OptionUnset) } ] ]; command: [ [ IDENT "Comments"; l = LIST0 comment -> { VernacComments l } (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *) | IDENT "Declare"; IDENT "Instance"; id = ident_decl; bl = binders; ":"; t = term LEVEL "200"; info = hint_info -> { VernacDeclareInstance (id, bl, t, info) } (* Should be in syntax, but camlp5 would not factorize *) | IDENT "Declare"; IDENT "Scope"; sc = IDENT -> { VernacDeclareScope sc } (* System directory *) | IDENT "Pwd" -> { VernacChdir None } | IDENT "Cd" -> { VernacChdir None } | IDENT "Cd"; dir = ne_string -> { VernacChdir (Some dir) } | IDENT "Load"; verbosely = [ IDENT "Verbose" -> { true } | -> { false } ]; s = [ s = ne_string -> { s } | s = IDENT -> { s } ] -> { VernacLoad (verbosely, s) } | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string -> { VernacDeclareMLModule l } | IDENT "Locate"; l = locatable -> { VernacLocate l } (* Managing load paths *) | IDENT "Add"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> { VernacAddLoadPath { implicit = false; logical_path; physical_path } } | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> { VernacAddLoadPath { implicit = true; logical_path; physical_path } } | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string -> { VernacRemoveLoadPath dir } (* Type-Checking *) | "Type"; c = lconstr -> { VernacGlobalCheck c } (* Printing (careful factorization of entries) *) | IDENT "Print"; p = printable -> { VernacPrint p } | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> { VernacPrint (PrintName (qid,l)) } | IDENT "Print"; IDENT "Module"; "Type"; qid = global -> { VernacPrint (PrintModuleType qid) } | IDENT "Print"; IDENT "Module"; qid = global -> { VernacPrint (PrintModule qid) } | IDENT "Print"; IDENT "Namespace" ; ns = dirpath -> { VernacPrint (PrintNamespace ns) } | IDENT "Inspect"; n = natural -> { VernacPrint (PrintInspect n) } | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string -> { VernacAddMLPath dir } (* For acting on parameter tables *) | "Set"; table = setting_name; v = option_setting -> { VernacSetOption (false, table, v) } | IDENT "Unset"; table = setting_name -> { VernacSetOption (false, table, OptionUnset) } | IDENT "Print"; IDENT "Table"; table = setting_name -> { VernacPrintOption table } | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 table_value -> { VernacAddOption ([table;field], v) } (* A global value below will be hidden by a field above! *) (* In fact, we give priority to secondary tables *) (* No syntax for tertiary tables due to conflict *) (* (but they are unused anyway) *) | IDENT "Add"; table = IDENT; v = LIST1 table_value -> { VernacAddOption ([table], v) } | IDENT "Test"; table = setting_name; "for"; v = LIST1 table_value -> { VernacMemOption (table, v) } | IDENT "Test"; table = setting_name -> { VernacPrintOption table } | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 table_value -> { VernacRemoveOption ([table;field], v) } | IDENT "Remove"; table = IDENT; v = LIST1 table_value -> { VernacRemoveOption ([table], v) } ]] ; query_command: (* TODO: rapprocher Eval et Check *) [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (Some r, g, c) } | IDENT "Compute"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c) } | IDENT "Check"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (None, g, c) } (* Searching the environment *) | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." -> { fun g -> VernacPrint (PrintAbout (qid,l,g)) } | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." -> { fun g -> VernacSearch (SearchPattern c,g, l) } | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." -> { fun g -> VernacSearch (SearchRewrite c,g, l) } | IDENT "Search"; s = search_query; l = search_queries; "." -> { let (sl,m) = l in fun g -> VernacSearch (Search (s::sl),g, m) } ] ] ; printable: [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> { PrintName (qid,l) } | IDENT "All" -> { PrintFullContext } | IDENT "Section"; s = global -> { PrintSectionContext s } | IDENT "Grammar"; ent = IDENT -> (* This should be in "syntax" section but is here for factorization*) { PrintGrammar ent } | IDENT "Custom"; IDENT "Grammar"; ent = IDENT -> (* Should also be in "syntax" section *) { PrintCustomGrammar ent } | IDENT "LoadPath"; dir = OPT dirpath -> { PrintLoadPath dir } | IDENT "Libraries" -> { PrintLibraries } | IDENT "ML"; IDENT "Path" -> { PrintMLLoadPath } | IDENT "ML"; IDENT "Modules" -> { PrintMLModules } | IDENT "Debug"; IDENT "GC" -> { PrintDebugGC } | IDENT "Graph" -> { PrintGraph } | IDENT "Classes" -> { PrintClasses } | IDENT "TypeClasses" -> { PrintTypeClasses } | IDENT "Instances"; qid = smart_global -> { PrintInstances qid } | IDENT "Coercions" -> { PrintCoercions } | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr -> { PrintCoercionPaths (s,t) } | IDENT "Canonical"; IDENT "Projections"; qids = LIST0 smart_global -> { PrintCanonicalConversions qids } | IDENT "Typing"; IDENT "Flags" -> { PrintTypingFlags } | IDENT "Tables" -> { PrintTables } | IDENT "Options" -> { PrintTables (* A Synonymous to Tables *) } | IDENT "Hint" -> { PrintHintGoal } | IDENT "Hint"; qid = smart_global -> { PrintHint qid } | IDENT "Hint"; "*" -> { PrintHintDb } | IDENT "HintDb"; s = IDENT -> { PrintHintDbName s } | IDENT "Scopes" -> { PrintScopes } | IDENT "Scope"; s = IDENT -> { PrintScope s } | IDENT "Visibility"; s = OPT IDENT -> { PrintVisibility s } | IDENT "Implicit"; qid = smart_global -> { PrintImplicit qid } | b = [ IDENT "Sorted" -> { true } | -> { false } ]; IDENT "Universes"; g = OPT printunivs_subgraph; fopt = OPT ne_string -> { PrintUniverses (b, g, fopt) } | IDENT "Assumptions"; qid = smart_global -> { PrintAssumptions (false, false, qid) } | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, false, qid) } | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (false, true, qid) } | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, true, qid) } | IDENT "Strategy"; qid = smart_global -> { PrintStrategy (Some qid) } | IDENT "Strategies" -> { PrintStrategy None } | IDENT "Registered" -> { PrintRegistered } ] ] ; printunivs_subgraph: [ [ IDENT "Subgraph"; "("; l = LIST0 reference; ")" -> { l } ] ] ; class_rawexpr: [ [ IDENT "Funclass" -> { FunClass } | IDENT "Sortclass" -> { SortClass } | qid = smart_global -> { RefClass qid } ] ] ; locatable: [ [ qid = smart_global -> { LocateAny qid } | IDENT "Term"; qid = smart_global -> { LocateTerm qid } | IDENT "File"; f = ne_string -> { LocateFile f } | IDENT "Library"; qid = global -> { LocateLibrary qid } | IDENT "Module"; qid = global -> { LocateModule qid } ] ] ; option_setting: [ [ -> { OptionSetTrue } | n = integer -> { OptionSetInt n } | s = STRING -> { OptionSetString s } ] ] ; table_value: [ [ id = global -> { Goptions.QualidRefValue id } | s = STRING -> { Goptions.StringRefValue s } ] ] ; setting_name: [ [ fl = LIST1 [ x = IDENT -> { x } ] -> { fl } ]] ; ne_in_or_out_modules: [ [ IDENT "inside"; l = LIST1 global -> { SearchInside l } | "in"; l = LIST1 global -> { SearchInside l } | IDENT "outside"; l = LIST1 global -> { SearchOutside l } ] ] ; in_or_out_modules: [ [ m = ne_in_or_out_modules -> { m } | -> { SearchOutside [] } ] ] ; comment: [ [ c = constr -> { CommentConstr c } | s = STRING -> { CommentString s } | n = natural -> { CommentInt n } ] ] ; positive_search_mark: [ [ "-" -> { false } | -> { true } ] ] ; search_query: [ [ b = positive_search_mark; s = search_item -> { (b, SearchLiteral s) } | b = positive_search_mark; "["; l = LIST1 (LIST1 search_query) SEP "|"; "]" -> { (b, SearchDisjConj l) } ] ] ; search_item: [ [ test_id_colon; where = search_where; ":"; s = ne_string; sc = OPT scope_delimiter -> { SearchString (where,s,sc) } | IDENT "is"; ":"; kl = logical_kind -> { SearchKind kl } | s = ne_string; sc = OPT scope_delimiter -> { SearchString ((Anywhere,false),s,sc) } | test_id_colon; where = search_where; ":"; p = constr_pattern -> { SearchSubPattern (where,p) } | p = constr_pattern -> { SearchSubPattern ((Anywhere,false),p) } ] ] ; logical_kind: [ [ k = thm_token -> { IsProof k } | k = assumption_token -> { IsAssumption (snd k) } | k = IDENT "Context" -> { IsAssumption Context } | k = extended_def_token -> { IsDefinition k } | IDENT "Primitive" -> { IsPrimitive } ] ] ; extended_def_token: [ [ k = def_token -> { snd k } | IDENT "Coercion" -> { Coercion } | IDENT "Instance" -> { Instance } | IDENT "Scheme" -> { Scheme } | IDENT "Canonical" -> { CanonicalStructure } | IDENT "Field" -> { StructureComponent } | IDENT "Method" -> { Method } ] ] ; search_where: [ [ IDENT "head" -> { Anywhere, true } | IDENT "hyp" -> { InHyp, false } | IDENT "concl" -> { InConcl, false } | IDENT "headhyp" -> { InHyp, true } | IDENT "headconcl" -> { InConcl, true } ] ] ; search_queries: [ [ m = ne_in_or_out_modules -> { ([],m) } | s = search_query; l = search_queries -> { let (sl,m) = l in (s::sl,m) } | -> { ([],SearchOutside []) } ] ] ; univ_name_list: [ [ "@{" ; l = LIST0 name; "}" -> { l } ] ] ; END GRAMMAR EXTEND Gram GLOBAL: command; command: TOP [ [ (* Resetting *) IDENT "Reset"; IDENT "Initial" -> { VernacResetInitial } | IDENT "Reset"; id = identref -> { VernacResetName id } | IDENT "Back" -> { VernacBack 1 } | IDENT "Back"; n = natural -> { VernacBack n } (* Tactic Debugger *) | IDENT "Debug"; IDENT "On" -> { VernacSetOption (false, ["Ltac";"Debug"], OptionSetTrue) } | IDENT "Debug"; IDENT "Off" -> { VernacSetOption (false, ["Ltac";"Debug"], OptionUnset) } (* registration of a custom reduction *) | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":="; r = red_expr -> { VernacDeclareReduction (s,r) } (* factorized here, though relevant for syntax extensions *) | IDENT "Declare"; IDENT "Custom"; IDENT "Entry"; s = IDENT -> { VernacDeclareCustomEntry s } ] ]; END (* Grammar extensions *) GRAMMAR EXTEND Gram GLOBAL: syntax syntax_modifiers; syntax: [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT -> { VernacOpenCloseScope (true,sc) } | IDENT "Close"; IDENT "Scope"; sc = IDENT -> { VernacOpenCloseScope (false,sc) } | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT -> { VernacDelimiters (sc, Some key) } | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT -> { VernacDelimiters (sc, None) } | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with"; refl = LIST1 class_rawexpr -> { VernacBindScope (sc,refl) } | IDENT "Infix"; op = ne_lstring; ":="; p = constr; modl = syntax_modifiers; sc = OPT [ ":"; sc = IDENT -> { sc } ] -> { VernacNotation (true,p,(op,modl),sc) } | IDENT "Notation"; id = identref; idl = LIST0 ident; ":="; c = constr; modl = syntax_modifiers -> { VernacSyntacticDefinition (id,(idl,c), modl) } | IDENT "Notation"; s = lstring; ":="; c = constr; modl = syntax_modifiers; sc = OPT [ ":"; sc = IDENT -> { sc } ] -> { VernacNotation (false,c,(s,modl),sc) } | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING -> { VernacNotationAddFormat (n,s,fmt) } | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring; l = syntax_modifiers -> { VernacReservedNotation (true,(s,l)) } | IDENT "Reserved"; IDENT "Notation"; s = ne_lstring; l = syntax_modifiers -> { VernacReservedNotation (false,(s,l)) } (* "Print" "Grammar" and "Declare" "Scope" should be here but are in "command" entry in order to factorize with other "Print"-based or "Declare"-based vernac entries *) ] ] ; level: [ [ IDENT "level"; n = natural -> { NumLevel n } | IDENT "next"; IDENT "level" -> { NextLevel } ] ] ; syntax_modifier: [ [ "at"; IDENT "level"; n = natural -> { SetLevel n } | "in"; IDENT "custom"; x = IDENT -> { SetCustomEntry (x,None) } | "in"; IDENT "custom"; x = IDENT; "at"; IDENT "level"; n = natural -> { SetCustomEntry (x,Some n) } | IDENT "left"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.LeftA } | IDENT "right"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.RightA } | IDENT "no"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.NonA } | IDENT "only"; IDENT "printing" -> { SetOnlyPrinting } | IDENT "only"; IDENT "parsing" -> { SetOnlyParsing } | IDENT "format"; s1 = [s = STRING -> { CAst.make ~loc s } ]; s2 = OPT [s = STRING -> { CAst.make ~loc s } ] -> { begin match s1, s2 with | { CAst.v = k }, Some s -> SetFormat (ExtraFormat (k,s)) | s, None -> SetFormat (TextFormat s) end } | x = IDENT; ","; l = LIST1 IDENT SEP ","; v = [ "at"; lev = level -> { fun x l -> SetItemLevel (x::l,None,lev) } | "in"; IDENT "scope"; k = IDENT -> { fun x l -> SetItemScope(x::l,k) } ] -> { v x l } | x = IDENT; "at"; lev = level; b = OPT binder_interp -> { SetItemLevel ([x],b,lev) } | x = IDENT; "in"; IDENT "scope"; k = IDENT -> { SetItemScope([x],k) } | x = IDENT; b = binder_interp -> { SetItemLevel ([x],Some b,DefaultLevel) } | x = IDENT; typ = explicit_subentry -> { SetEntryType (x,typ) } ] ] ; syntax_modifiers: [ [ "("; l = LIST1 [ s = syntax_modifier -> { CAst.make ~loc s } ] SEP ","; ")" -> { l } | -> { [] } ] ] ; explicit_subentry: [ [ (* Warning to be turn into an error at the end of deprecation phase (for 8.14) *) IDENT "ident" -> { ETName false } (* To be activated at the end of transitory phase (for 8.15) | IDENT "ident" -> { ETIdent } *) | IDENT "name" -> { ETName true } (* Boolean to remove at the end of transitory phase *) | IDENT "global" -> { ETGlobal } | IDENT "bigint" -> { ETBigint } | IDENT "binder" -> { ETBinder true } | IDENT "constr" -> { ETConstr (InConstrEntry,None,DefaultLevel) } | IDENT "constr"; n = at_level_opt; b = OPT binder_interp -> { ETConstr (InConstrEntry,b,n) } | IDENT "pattern" -> { ETPattern (false,None) } | IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (false,Some n) } | IDENT "strict"; IDENT "pattern" -> { ETPattern (true,None) } | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (true,Some n) } | IDENT "closed"; IDENT "binder" -> { ETBinder false } | IDENT "custom"; x = IDENT; n = at_level_opt; b = OPT binder_interp -> { ETConstr (InCustomEntry x,b,n) } ] ] ; at_level_opt: [ [ "at"; n = level -> { n } | -> { DefaultLevel } ] ] ; binder_interp: [ [ "as"; IDENT "ident" -> { warn_deprecated_as_ident_kind (); Notation_term.AsIdent } | "as"; IDENT "name" -> { Notation_term.AsName } | "as"; IDENT "pattern" -> { Notation_term.AsNameOrPattern } | "as"; IDENT "strict"; IDENT "pattern" -> { Notation_term.AsStrictPattern } ] ] ; END coq-8.15.0/vernac/himsg.ml000066400000000000000000001733131417001151100153050ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* l := (Vars.substl !l c') :: !l; env | _ -> let t = Vars.substl !l (RelDecl.get_type decl) in let decl = decl |> RelDecl.map_name (named_hd env sigma t) |> RelDecl.map_value (Vars.substl !l) |> RelDecl.set_type t in l := (mkRel 1) :: List.map (Vars.lift 1) !l; push_rel decl env in let env = process_rel_context contract_context env in (env, List.map (Vars.substl !l) lc) let contract2 env sigma a b = match contract env sigma [a;b] with | env, [a;b] -> env,a,b | _ -> assert false let contract3 env sigma a b c = match contract env sigma [a;b;c] with | env, [a;b;c] -> env,a,b,c | _ -> assert false let contract4 env sigma a b c d = match contract env sigma [a;b;c;d] with | env, [a;b;c;d] -> (env,a,b,c),d | _ -> assert false let contract1 env sigma a v = match contract env sigma (a :: v) with | env, a::l -> env,a,l | _ -> assert false let rec contract3' env sigma a b c = function | OccurCheck (evk,d) -> let x,d = contract4 env sigma a b c d in x,OccurCheck(evk, d) | NotClean ((evk,args),env',d) -> let env',d,args = contract1 env' sigma d args in contract3 env sigma a b c,NotClean((evk,args),env',d) | ConversionFailed (env',t1,t2) -> let (env',t1,t2) = contract2 env' sigma t1 t2 in contract3 env sigma a b c, ConversionFailed (env',t1,t2) | IncompatibleInstances (env',ev,t1,t2) -> let (env',ev,t1,t2) = contract3 env' sigma (EConstr.mkEvar ev) t1 t2 in contract3 env sigma a b c, IncompatibleInstances (env',EConstr.destEvar sigma ev,t1,t2) | NotSameArgSize | NotSameHead | NoCanonicalStructure | MetaOccurInBody _ | InstanceNotSameType _ | InstanceNotFunctionalType _ | ProblemBeyondCapabilities | UnifUnivInconsistency _ as x -> contract3 env sigma a b c, x | CannotSolveConstraint ((pb,env',t,u),x) -> let env',t,u = contract2 env' sigma t u in let y,x = contract3' env sigma a b c x in y,CannotSolveConstraint ((pb,env',t,u),x) (** Ad-hoc reductions *) let j_nf_betaiotaevar env sigma j = { uj_val = j.uj_val; uj_type = Reductionops.nf_betaiota env sigma j.uj_type } let jv_nf_betaiotaevar env sigma jl = Array.Smart.map (fun j -> j_nf_betaiotaevar env sigma j) jl (** Printers *) let pr_lconstr_env e s c = quote (pr_lconstr_env e s c) let pr_leconstr_env e s c = quote (pr_leconstr_env e s c) let pr_ljudge_env e s c = let v,t = pr_ljudge_env e s c in (quote v,quote t) (** A canonisation procedure for constr such that comparing there externalisation catches more equalities *) let canonize_constr sigma c = (* replaces all the names in binders by [dn] ("default name"), ensures that [alpha]-equivalent terms will have the same externalisation. *) let open EConstr in let dn = Name.Anonymous in let rec canonize_binders c = match EConstr.kind sigma c with | Prod (x,t,b) -> mkProd({x with binder_name=dn},t,b) | Lambda (x,t,b) -> mkLambda({x with binder_name=dn},t,b) | LetIn (x,u,t,b) -> mkLetIn({x with binder_name=dn},u,t,b) | _ -> EConstr.map sigma canonize_binders c in canonize_binders c let rec display_expr_eq c1 c2 = let open Constrexpr in match CAst.(c1.v, c2.v) with | (CHole _ | CEvar _), _ | _, (CEvar _ | CHole _) -> true | _ -> Constrexpr_ops.constr_expr_eq_gen display_expr_eq c1 c2 (** Tries to realize when the two terms, albeit different are printed the same. *) let display_eq ~flags env sigma t1 t2 = (* terms are canonized, then their externalisation is compared syntactically *) let open Constrextern in let t1 = canonize_constr sigma t1 in let t2 = canonize_constr sigma t2 in let ct1 = Flags.with_options flags (fun () -> extern_constr env sigma t1) () in let ct2 = Flags.with_options flags (fun () -> extern_constr env sigma t2) () in display_expr_eq ct1 ct2 (** This function adds some explicit printing flags if the two arguments are printed alike. *) let rec pr_explicit_aux env sigma t1 t2 = function | [] -> (* no specified flags: default. *) Printer.pr_leconstr_env env sigma t1, Printer.pr_leconstr_env env sigma t2 | flags :: rem -> let equal = display_eq ~flags env sigma t1 t2 in if equal then (* The two terms are the same from the user point of view *) pr_explicit_aux env sigma t1 t2 rem else let open Constrextern in let ct1 = Flags.with_options flags (fun () -> extern_constr env sigma t1) () in let ct2 = Flags.with_options flags (fun () -> extern_constr env sigma t2) () in Ppconstr.pr_lconstr_expr env sigma ct1, Ppconstr.pr_lconstr_expr env sigma ct2 let explicit_flags = let open Constrextern in [ []; (* First, try with the current flags *) [print_implicits]; (* Then with implicit *) [print_universes]; (* Then with universes *) [print_universes; print_implicits]; (* With universes AND implicits *) [print_implicits; print_coercions; print_no_symbol]; (* Then more! *) [print_universes; print_implicits; print_coercions; print_no_symbol] (* and more! *) ] let with_diffs pm pn = if not (Proof_diffs.show_diffs ()) then pm, pn else try let tokenize_string = Proof_diffs.tokenize_string in Pp_diff.diff_pp ~tokenize_string pm pn with Pp_diff.Diff_Failure msg -> begin try ignore(Sys.getenv("HIDEDIFFFAILUREMSG")) with Not_found -> Proof_diffs.notify_proof_diff_failure msg end; pm, pn let pr_explicit env sigma t1 t2 = let p1, p2 = pr_explicit_aux env sigma t1 t2 explicit_flags in let p1, p2 = with_diffs p1 p2 in quote p1, quote p2 let pr_db env i = try match env |> lookup_rel i |> get_name with | Name id -> Id.print id | Anonymous -> str "<>" with Not_found -> str "UNBOUND_REL_" ++ int i let explain_unbound_rel env sigma n = let pe = pr_ne_context_of (str "In environment") env sigma in str "Unbound reference: " ++ pe ++ str "The reference " ++ int n ++ str " is free." let explain_unbound_var env v = let var = Id.print v in str "No such section variable or assumption: " ++ var ++ str "." let explain_not_type env sigma j = let pe = pr_ne_context_of (str "In environment") env sigma in let pc,pt = pr_ljudge_env env sigma j in pe ++ str "The term" ++ brk(1,1) ++ pc ++ spc () ++ str "has type" ++ spc () ++ pt ++ spc () ++ str "which should be Set, Prop or Type." let explain_bad_assumption env sigma j = let pe = pr_ne_context_of (str "In environment") env sigma in let pc,pt = pr_ljudge_env env sigma j in pe ++ str "Cannot declare a variable or hypothesis over the term" ++ brk(1,1) ++ pc ++ spc () ++ str "of type" ++ spc () ++ pt ++ spc () ++ str "because this term is not a type." let explain_reference_variables sigma id c = pr_global c ++ strbrk " depends on the variable " ++ Id.print id ++ strbrk " which is not declared in the context." let rec pr_disjunction pr = function | [a] -> pr a | [a;b] -> pr a ++ str " or" ++ spc () ++ pr b | a::l -> pr a ++ str "," ++ spc () ++ pr_disjunction pr l | [] -> assert false let explain_elim_arity env sigma ind c pj okinds = let open EConstr in let env = make_all_name_different env sigma in let pi = pr_inductive env (fst ind) in let pc = pr_leconstr_env env sigma c in let msg = match okinds with | Some(sorts,kp,ki,explanation) -> let sorts = Inductiveops.sorts_below sorts in let pki = Sorts.pr_sort_family ki in let pkp = Sorts.pr_sort_family kp in let explanation = match explanation with | NonInformativeToInformative -> "proofs can be eliminated only to build proofs" | StrongEliminationOnNonSmallType -> "strong elimination on non-small inductive types leads to paradoxes" | WrongArity -> "wrong arity" in let ppar = pr_disjunction (fun s -> quote (Sorts.pr_sort_family s)) sorts in let ppt = pr_leconstr_env env sigma (snd (decompose_prod_assum sigma pj.uj_type)) in hov 0 (str "the return type has sort" ++ spc () ++ ppt ++ spc () ++ str "while it" ++ spc () ++ str "should be " ++ ppar ++ str ".") ++ fnl () ++ hov 0 (str "Elimination of an inductive object of sort " ++ pki ++ brk(1,0) ++ str "is not allowed on a predicate in sort " ++ pkp ++ fnl () ++ str "because" ++ spc () ++ str explanation ++ str ".") | None -> str "ill-formed elimination predicate." in hov 0 ( str "Incorrect elimination of" ++ spc () ++ pc ++ spc () ++ str "in the inductive type" ++ spc () ++ quote pi ++ str ":") ++ fnl () ++ msg let explain_case_not_inductive env sigma cj = let env = make_all_name_different env sigma in let pc = pr_leconstr_env env sigma cj.uj_val in let pct = pr_leconstr_env env sigma cj.uj_type in match EConstr.kind sigma cj.uj_type with | Evar _ -> str "Cannot infer a type for this expression." | _ -> str "The term" ++ brk(1,1) ++ pc ++ spc () ++ str "has type" ++ brk(1,1) ++ pct ++ spc () ++ str "which is not a (co-)inductive type." let explain_number_branches env sigma cj expn = let env = make_all_name_different env sigma in let pc = pr_leconstr_env env sigma cj.uj_val in let pct = pr_leconstr_env env sigma cj.uj_type in str "Matching on term" ++ brk(1,1) ++ pc ++ spc () ++ str "of type" ++ brk(1,1) ++ pct ++ spc () ++ str "expects " ++ int expn ++ str " branches." let explain_ill_formed_branch env sigma c ci actty expty = let simp t = Reductionops.nf_betaiota env sigma t in let env = make_all_name_different env sigma in let pc = pr_leconstr_env env sigma c in let pa, pe = pr_explicit env sigma (simp actty) (simp expty) in strbrk "In pattern-matching on term" ++ brk(1,1) ++ pc ++ spc () ++ strbrk "the branch for constructor" ++ spc () ++ quote (pr_pconstructor env sigma ci) ++ spc () ++ str "has type" ++ brk(1,1) ++ pa ++ spc () ++ str "which should be" ++ brk(1,1) ++ pe ++ str "." let explain_generalization env sigma (name,var) j = let pe = pr_ne_context_of (str "In environment") env sigma in let pv = pr_letype_env env sigma var in let (pc,pt) = pr_ljudge_env (push_rel_assum (make_annot name Sorts.Relevant,var) env) sigma j in pe ++ str "Cannot generalize" ++ brk(1,1) ++ pv ++ spc () ++ str "over" ++ brk(1,1) ++ pc ++ str "," ++ spc () ++ str "it has type" ++ spc () ++ pt ++ spc () ++ str "which should be Set, Prop or Type." let explain_unification_error env sigma p1 p2 = function | None -> mt() | Some e -> let rec aux p1 p2 = function | OccurCheck (evk,rhs) -> [str "cannot define " ++ quote (pr_existential_key env sigma evk) ++ strbrk " with term " ++ pr_leconstr_env env sigma rhs ++ strbrk " that would depend on itself"] | NotClean ((evk,args),env,c) -> let env = make_all_name_different env sigma in [str "cannot instantiate " ++ quote (pr_existential_key env sigma evk) ++ strbrk " because " ++ pr_leconstr_env env sigma c ++ strbrk " is not in its scope" ++ (if List.is_empty args then mt() else strbrk ": available arguments are " ++ pr_sequence (pr_leconstr_env env sigma) (List.rev args))] | NotSameArgSize | NotSameHead | NoCanonicalStructure -> (* Error speaks from itself *) [] | ConversionFailed (env,t1,t2) -> let t1 = Reductionops.nf_betaiota env sigma t1 in let t2 = Reductionops.nf_betaiota env sigma t2 in if EConstr.eq_constr sigma t1 p1 && EConstr.eq_constr sigma t2 p2 then [] else let env = make_all_name_different env sigma in if not (EConstr.eq_constr sigma t1 p1) || not (EConstr.eq_constr sigma t2 p2) then let t1, t2 = pr_explicit env sigma t1 t2 in [str "cannot unify " ++ t1 ++ strbrk " and " ++ t2] else [] | IncompatibleInstances (env,ev,t1,t2) -> let env = make_all_name_different env sigma in let ev = pr_leconstr_env env sigma (EConstr.mkEvar ev) in let t1 = Reductionops.nf_betaiota env sigma t1 in let t2 = Reductionops.nf_betaiota env sigma t2 in let t1, t2 = pr_explicit env sigma t1 t2 in [ev ++ strbrk " has otherwise to unify with " ++ t1 ++ str " which is incompatible with " ++ t2] | MetaOccurInBody evk -> [str "instance for " ++ quote (pr_existential_key env sigma evk) ++ strbrk " refers to a metavariable - please report your example" ++ strbrk "at " ++ str Coq_config.wwwbugtracker ++ str "."] | InstanceNotSameType (evk,env,t,u) -> let t, u = pr_explicit env sigma t u in [str "unable to find a well-typed instantiation for " ++ quote (pr_existential_key env sigma evk) ++ strbrk ": cannot ensure that " ++ t ++ strbrk " is a subtype of " ++ u] | InstanceNotFunctionalType (evk,env,f,u) -> let env = make_all_name_different env sigma in let f = pr_leconstr_env env sigma f in let u = pr_leconstr_env env sigma u in [str "unable to find a well-typed instantiation for " ++ quote (pr_existential_key env sigma evk) ++ strbrk ": " ++ f ++ strbrk " is expected to have a functional type but it has type " ++ u] | UnifUnivInconsistency p -> [str "universe inconsistency: " ++ Univ.explain_universe_inconsistency (Termops.pr_evd_level sigma) p] | CannotSolveConstraint ((pb,env,t,u),e) -> let env = make_all_name_different env sigma in (strbrk "cannot satisfy constraint " ++ pr_leconstr_env env sigma t ++ str " == " ++ pr_leconstr_env env sigma u) :: aux t u e | ProblemBeyondCapabilities -> [] in match aux p1 p2 e with | [] -> mt () | l -> spc () ++ str "(" ++ prlist_with_sep pr_semicolon (fun x -> x) l ++ str ")" let explain_actual_type env sigma j t reason = let env = make_all_name_different env sigma in let j = j_nf_betaiotaevar env sigma j in let t = Reductionops.nf_betaiota env sigma t in (* Actually print *) let pe = pr_ne_context_of (str "In environment") env sigma in let pc = pr_leconstr_env env sigma (Environ.j_val j) in let (pt, pct) = pr_explicit env sigma t (Environ.j_type j) in let ppreason = explain_unification_error env sigma j.uj_type t reason in pe ++ hov 0 ( str "The term" ++ brk(1,1) ++ pc ++ spc () ++ str "has type" ++ brk(1,1) ++ pct ++ spc () ++ str "while it is expected to have type" ++ brk(1,1) ++ pt ++ ppreason ++ str ".") let explain_incorrect_primitive env sigma j exp = let env = make_all_name_different env sigma in let {uj_val=p;uj_type=t} = j in let t = Reductionops.nf_betaiota env sigma t in let exp = Reductionops.nf_betaiota env sigma exp in (* Actually print *) let pe = pr_ne_context_of (str "In environment") env sigma in let (pt, pct) = pr_explicit env sigma exp t in pe ++ hov 0 ( str "The primitive" ++ brk(1,1) ++ str (CPrimitives.op_or_type_to_string p) ++ spc () ++ str "has type" ++ brk(1,1) ++ pct ++ spc () ++ str "while it is expected to have type" ++ brk(1,1) ++ pt ++ str ".") let explain_cant_apply_bad_type env sigma (n,exptyp,actualtyp) rator randl = let randl = jv_nf_betaiotaevar env sigma randl in let actualtyp = Reductionops.nf_betaiota env sigma actualtyp in let env = make_all_name_different env sigma in let actualtyp, exptyp = pr_explicit env sigma actualtyp exptyp in let nargs = Array.length randl in (* let pe = pr_ne_context_of (str "in environment") env sigma in*) let pr,prt = pr_ljudge_env env sigma rator in let term_string1 = str (String.plural nargs "term") in let term_string2 = if nargs>1 then str "The " ++ pr_nth n ++ str " term" else str "This term" in let appl = prvect_with_sep fnl (fun c -> let pc,pct = pr_ljudge_env env sigma c in hov 2 (pc ++ spc () ++ str ": " ++ pct)) randl in str "Illegal application: " ++ (* pe ++ *) fnl () ++ str "The term" ++ brk(1,1) ++ pr ++ spc () ++ str "of type" ++ brk(1,1) ++ prt ++ spc () ++ str "cannot be applied to the " ++ term_string1 ++ fnl () ++ str " " ++ v 0 appl ++ fnl () ++ term_string2 ++ str " has type" ++ brk(1,1) ++ actualtyp ++ spc () ++ str "which should be coercible to" ++ brk(1,1) ++ exptyp ++ str "." let explain_cant_apply_not_functional env sigma rator randl = let env = make_all_name_different env sigma in let nargs = Array.length randl in (* let pe = pr_ne_context_of (str "in environment") env sigma in*) let pr = pr_leconstr_env env sigma rator.uj_val in let prt = pr_leconstr_env env sigma rator.uj_type in let appl = prvect_with_sep fnl (fun c -> let pc = pr_leconstr_env env sigma c.uj_val in let pct = pr_leconstr_env env sigma c.uj_type in hov 2 (pc ++ spc () ++ str ": " ++ pct)) randl in str "Illegal application (Non-functional construction): " ++ (* pe ++ *) fnl () ++ str "The expression" ++ brk(1,1) ++ pr ++ spc () ++ str "of type" ++ brk(1,1) ++ prt ++ spc () ++ str "cannot be applied to the " ++ str (String.plural nargs "term") ++ fnl () ++ str " " ++ v 0 appl let explain_unexpected_type env sigma actual_type expected_type = let pract, prexp = pr_explicit env sigma actual_type expected_type in str "Found type" ++ spc () ++ pract ++ spc () ++ str "where" ++ spc () ++ prexp ++ str " was expected." let explain_not_product env sigma c = let pr = pr_econstr_env env sigma c in str "The type of this term is a product" ++ spc () ++ str "while it is expected to be" ++ (if EConstr.isType sigma c then str " a sort" else (brk(1,1) ++ pr)) ++ str "." (* TODO: use the names *) (* (co)fixpoints *) let explain_ill_formed_rec_body env sigma err names i fixenv vdefj = let pr_lconstr_env env sigma c = pr_leconstr_env env sigma c in let prt_name i = match names.(i).binder_name with Name id -> str "Recursive definition of " ++ Id.print id | Anonymous -> str "The " ++ pr_nth i ++ str " definition" in let st = match err with (* Fixpoint guard errors *) | NotEnoughAbstractionInFixBody -> str "Not enough abstractions in the definition" | RecursionNotOnInductiveType c -> str "Recursive definition on" ++ spc () ++ pr_lconstr_env env sigma c ++ spc () ++ str "which should be a recursive inductive type" | RecursionOnIllegalTerm(j,(arg_env, arg),le,lt) -> let arg_env = make_all_name_different arg_env sigma in let called = match names.(j).binder_name with Name id -> Id.print id | Anonymous -> str "the " ++ pr_nth i ++ str " definition" in let pr_db x = quote (pr_db env x) in let vars = match (lt,le) with ([],[]) -> assert false | ([],[x]) -> str "a subterm of " ++ pr_db x | ([],_) -> str "a subterm of the following variables: " ++ pr_sequence pr_db le | ([x],_) -> pr_db x | _ -> str "one of the following variables: " ++ pr_sequence pr_db lt in str "Recursive call to " ++ called ++ spc () ++ strbrk "has principal argument equal to" ++ spc () ++ pr_lconstr_env arg_env sigma arg ++ strbrk " instead of " ++ vars | NotEnoughArgumentsForFixCall j -> let called = match names.(j).binder_name with Name id -> Id.print id | Anonymous -> str "the " ++ pr_nth i ++ str " definition" in str "Recursive call to " ++ called ++ str " has not enough arguments" (* CoFixpoint guard errors *) | CodomainNotInductiveType c -> str "The codomain is" ++ spc () ++ pr_lconstr_env env sigma c ++ spc () ++ str "which should be a coinductive type" | NestedRecursiveOccurrences -> str "Nested recursive occurrences" | UnguardedRecursiveCall c -> str "Unguarded recursive call in" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInTypeOfAbstraction c -> str "Recursive call forbidden in the domain of an abstraction:" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInNonRecArgOfConstructor c -> str "Recursive call on a non-recursive argument of constructor" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInTypeOfDef c -> str "Recursive call forbidden in the type of a recursive definition" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInCaseFun c -> str "Invalid recursive call in a branch of" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInCaseArg c -> str "Invalid recursive call in the argument of \"match\" in" ++ spc () ++ pr_lconstr_env env sigma c | RecCallInCasePred c -> str "Invalid recursive call in the \"return\" clause of \"match\" in" ++ spc () ++ pr_lconstr_env env sigma c | NotGuardedForm c -> str "Sub-expression " ++ pr_lconstr_env env sigma c ++ strbrk " not in guarded form (should be a constructor," ++ strbrk " an abstraction, a match, a cofix or a recursive call)" | ReturnPredicateNotCoInductive c -> str "The return clause of the following pattern matching should be" ++ strbrk " a coinductive type:" ++ spc () ++ pr_lconstr_env env sigma c | FixpointOnIrrelevantInductive -> strbrk "Fixpoints on proof irrelevant inductive types should produce proof irrelevant values" in prt_name i ++ str " is ill-formed." ++ fnl () ++ pr_ne_context_of (str "In environment") env sigma ++ st ++ str "." ++ fnl () ++ (try (* May fail with unresolved globals. *) let fixenv = make_all_name_different fixenv sigma in let pvd = pr_lconstr_env fixenv sigma vdefj.(i).uj_val in str"Recursive definition is:" ++ spc () ++ pvd ++ str "." with e when CErrors.noncritical e -> mt ()) let explain_ill_typed_rec_body env sigma i names vdefj vargs = let env = make_all_name_different env sigma in let pvd = pr_leconstr_env env sigma vdefj.(i).uj_val in let pvdt, pv = pr_explicit env sigma vdefj.(i).uj_type vargs.(i) in str "The " ++ (match vdefj with [|_|] -> mt () | _ -> pr_nth (i+1) ++ spc ()) ++ str "recursive definition" ++ spc () ++ pvd ++ spc () ++ str "has type" ++ spc () ++ pvdt ++ spc () ++ str "while it should be" ++ spc () ++ pv ++ str "." let explain_cant_find_case_type env sigma c = let env = make_all_name_different env sigma in let pe = pr_leconstr_env env sigma c in str "Cannot infer the return type of pattern-matching on" ++ ws 1 ++ pe ++ str "." let explain_occur_check env sigma ev rhs = let env = make_all_name_different env sigma in let pt = pr_leconstr_env env sigma rhs in str "Cannot define " ++ pr_existential_key env sigma ev ++ str " with term" ++ brk(1,1) ++ pt ++ spc () ++ str "that would depend on itself." let pr_trailing_ne_context_of env sigma = if List.is_empty (Environ.rel_context env) && List.is_empty (Environ.named_context env) then str "." else (strbrk " in environment:" ++ pr_context_unlimited env sigma) let rec explain_evar_kind env sigma evk ty = let open Evar_kinds in function | Evar_kinds.NamedHole id -> strbrk "the existential variable named " ++ Id.print id | Evar_kinds.QuestionMark {qm_record_field=None} -> strbrk "this placeholder of type " ++ ty | Evar_kinds.QuestionMark {qm_record_field=Some {fieldname; recordname}} -> str "field " ++ (Printer.pr_constant env fieldname) ++ str " of record " ++ (Printer.pr_inductive env recordname) | Evar_kinds.CasesType false -> strbrk "the type of this pattern-matching problem" | Evar_kinds.CasesType true -> strbrk "a subterm of type " ++ ty ++ strbrk " in the type of this pattern-matching problem" | Evar_kinds.BinderType (Name id) -> strbrk "the type of " ++ Id.print id | Evar_kinds.BinderType Anonymous -> strbrk "the type of this anonymous binder" | Evar_kinds.EvarType (ido,evk) -> let pp = match ido with | Some id -> str "?" ++ Id.print id | None -> try pr_existential_key env sigma evk with (* defined *) Not_found -> strbrk "an internal placeholder" in strbrk "the type of " ++ pp | Evar_kinds.ImplicitArg (c,(n,ido),b) -> let id = Option.get ido in strbrk "the implicit parameter " ++ Id.print id ++ spc () ++ str "of" ++ spc () ++ Nametab.pr_global_env Id.Set.empty c ++ strbrk " whose type is " ++ ty | Evar_kinds.InternalHole -> strbrk "an internal placeholder of type " ++ ty | Evar_kinds.TomatchTypeParameter (tyi,n) -> strbrk "the " ++ pr_nth n ++ strbrk " argument of the inductive type (" ++ pr_inductive env tyi ++ strbrk ") of this term" | Evar_kinds.GoalEvar -> strbrk "an existential variable of type " ++ ty | Evar_kinds.ImpossibleCase -> strbrk "the type of an impossible pattern-matching clause" | Evar_kinds.MatchingVar _ -> assert false | Evar_kinds.VarInstance id -> strbrk "an instance of type " ++ ty ++ str " for the variable " ++ Id.print id | Evar_kinds.SubEvar (where,evk') -> let rec find_source evk = let evi = Evd.find sigma evk in match snd evi.evar_source with | Evar_kinds.SubEvar (_,evk) -> find_source evk | src -> evi,src in let evi,src = find_source evk' in let pc = match evi.evar_body with | Evar_defined c -> pr_leconstr_env env sigma c | Evar_empty -> assert false in let ty' = evi.evar_concl in pr_existential_key env sigma evk ++ strbrk " in the partial instance " ++ pc ++ strbrk " found for " ++ explain_evar_kind env sigma evk (pr_leconstr_env env sigma ty') src let explain_typeclass_resolution env sigma evi k = match Typeclasses.class_of_constr env sigma evi.evar_concl with | Some _ -> let env = Evd.evar_filtered_env env evi in fnl () ++ str "Could not find an instance for " ++ pr_leconstr_env env sigma evi.evar_concl ++ pr_trailing_ne_context_of env sigma | _ -> mt() let explain_placeholder_kind env sigma c e = match e with | Some (SeveralInstancesFound n) -> strbrk " (several distinct possible type class instances found)" | None -> match Typeclasses.class_of_constr env sigma c with | Some _ -> strbrk " (no type class instance found)" | _ -> mt () let explain_unsolvable_implicit env sigma evk explain = let evi = Evarutil.nf_evar_info sigma (Evd.find_undefined sigma evk) in let env = Evd.evar_filtered_env env evi in let type_of_hole = pr_leconstr_env env sigma evi.evar_concl in let pe = pr_trailing_ne_context_of env sigma in strbrk "Cannot infer " ++ explain_evar_kind env sigma evk type_of_hole (snd evi.evar_source) ++ explain_placeholder_kind env sigma evi.evar_concl explain ++ pe let explain_var_not_found env id = str "The variable" ++ spc () ++ Id.print id ++ spc () ++ str "was not found" ++ spc () ++ str "in the current" ++ spc () ++ str "environment" ++ str "." let explain_evar_not_found env sigma id = let undef = Evar.Map.domain (Evd.undefined_map sigma) in let all_undef_evars = Evar.Set.elements undef in let f ev = Id.equal id (Termops.evar_suggested_name (Global.env ()) sigma ev) in if List.exists f all_undef_evars then (* The name is used for printing but is not user-given *) str "?" ++ Id.print id ++ strbrk " is a generated name. Only user-given names for existential variables" ++ strbrk " can be referenced. To give a user name to an existential variable," ++ strbrk " introduce it with the ?[name] syntax." else str "Unknown existential variable." let explain_wrong_case_info env (ind,u) ci = let pi = pr_inductive env ind in if Ind.CanOrd.equal ci.ci_ind ind then str "Pattern-matching expression on an object of inductive type" ++ spc () ++ pi ++ spc () ++ str "has invalid information." else let pc = pr_inductive env ci.ci_ind in str "A term of inductive type" ++ spc () ++ pi ++ spc () ++ str "was given to a pattern-matching expression on the inductive type" ++ spc () ++ pc ++ str "." let explain_cannot_unify env sigma m n e = let env = make_all_name_different env sigma in let pm, pn = pr_explicit env sigma m n in let ppreason = explain_unification_error env sigma m n e in let pe = pr_ne_context_of (str "In environment") env sigma in pe ++ str "Unable to unify" ++ brk(1,1) ++ pm ++ spc () ++ str "with" ++ brk(1,1) ++ pn ++ ppreason ++ str "." let explain_cannot_unify_local env sigma m n subn = let pm = pr_leconstr_env env sigma m in let pn = pr_leconstr_env env sigma n in let psubn = pr_leconstr_env env sigma subn in str "Unable to unify" ++ brk(1,1) ++ pm ++ spc () ++ str "with" ++ brk(1,1) ++ pn ++ spc () ++ str "as" ++ brk(1,1) ++ psubn ++ str " contains local variables." let explain_refiner_cannot_generalize env sigma ty = str "Cannot find a well-typed generalisation of the goal with type: " ++ pr_leconstr_env env sigma ty ++ str "." let explain_no_occurrence_found env sigma c id = str "Found no subterm matching " ++ pr_leconstr_env env sigma c ++ str " in " ++ (match id with | Some id -> Id.print id | None -> str"the current goal") ++ str "." let explain_cannot_unify_binding_type env sigma m n = let pm = pr_leconstr_env env sigma m in let pn = pr_leconstr_env env sigma n in str "This binding has type" ++ brk(1,1) ++ pm ++ spc () ++ str "which should be unifiable with" ++ brk(1,1) ++ pn ++ str "." let explain_cannot_find_well_typed_abstraction env sigma p l e = str "Abstracting over the " ++ str (String.plural (List.length l) "term") ++ spc () ++ hov 0 (pr_enum (fun c -> pr_leconstr_env env sigma c) l) ++ spc () ++ str "leads to a term" ++ spc () ++ pr_letype_env ~goal_concl_style:true env sigma p ++ spc () ++ str "which is ill-typed." ++ (match e with None -> mt () | Some e -> fnl () ++ str "Reason is: " ++ e) let explain_wrong_abstraction_type env sigma na abs expected result = let ppname = match na with Name id -> Id.print id ++ spc () | _ -> mt () in str "Cannot instantiate metavariable " ++ ppname ++ strbrk "of type " ++ pr_leconstr_env env sigma expected ++ strbrk " with abstraction " ++ pr_leconstr_env env sigma abs ++ strbrk " of incompatible type " ++ pr_leconstr_env env sigma result ++ str "." let explain_abstraction_over_meta _ m n = strbrk "Too complex unification problem: cannot find a solution for both " ++ Name.print m ++ spc () ++ str "and " ++ Name.print n ++ str "." let explain_non_linear_unification env sigma m t = strbrk "Cannot unambiguously instantiate " ++ Name.print m ++ str ":" ++ strbrk " which would require to abstract twice on " ++ pr_leconstr_env env sigma t ++ str "." let explain_unsatisfied_constraints env sigma cst = strbrk "Unsatisfied constraints: " ++ Univ.pr_constraints (Termops.pr_evd_level sigma) cst ++ spc () ++ str "(maybe a bugged tactic)." let explain_undeclared_universe env sigma l = strbrk "Undeclared universe: " ++ Termops.pr_evd_level sigma l ++ spc () ++ str "(maybe a bugged tactic)." let explain_disallowed_sprop () = Pp.(strbrk "SProp is disallowed because the " ++ str "\"Allow StrictProp\"" ++ strbrk " flag is off.") let explain_bad_relevance env = strbrk "Bad relevance (maybe a bugged tactic)." let explain_bad_invert env = strbrk "Bad case inversion (maybe a bugged tactic)." let explain_bad_variance env sigma ~lev ~expected ~actual = str "Incorrect variance for universe " ++ Termops.pr_evd_level sigma lev ++ str": expected " ++ Univ.Variance.pr expected ++ str " but cannot be less restrictive than " ++ Univ.Variance.pr actual ++ str "." let explain_type_error env sigma err = let env = make_all_name_different env sigma in match err with | UnboundRel n -> explain_unbound_rel env sigma n | UnboundVar v -> explain_unbound_var env v | NotAType j -> explain_not_type env sigma j | BadAssumption c -> explain_bad_assumption env sigma c | ReferenceVariables (id,c) -> explain_reference_variables sigma id c | ElimArity (ind, c, pj, okinds) -> explain_elim_arity env sigma ind c pj okinds | CaseNotInductive cj -> explain_case_not_inductive env sigma cj | NumberBranches (cj, n) -> explain_number_branches env sigma cj n | IllFormedBranch (c, i, actty, expty) -> explain_ill_formed_branch env sigma c i actty expty | Generalization (nvar, c) -> explain_generalization env sigma nvar c | ActualType (j, pt) -> explain_actual_type env sigma j pt None | IncorrectPrimitive (j, t) -> explain_incorrect_primitive env sigma j t | CantApplyBadType (t, rator, randl) -> explain_cant_apply_bad_type env sigma t rator randl | CantApplyNonFunctional (rator, randl) -> explain_cant_apply_not_functional env sigma rator randl | IllFormedRecBody (err, lna, i, fixenv, vdefj) -> explain_ill_formed_rec_body env sigma err lna i fixenv vdefj | IllTypedRecBody (i, lna, vdefj, vargs) -> explain_ill_typed_rec_body env sigma i lna vdefj vargs | WrongCaseInfo (ind,ci) -> explain_wrong_case_info env ind ci | UnsatisfiedConstraints cst -> explain_unsatisfied_constraints env sigma cst | UndeclaredUniverse l -> explain_undeclared_universe env sigma l | DisallowedSProp -> explain_disallowed_sprop () | BadRelevance -> explain_bad_relevance env | BadInvert -> explain_bad_invert env | BadVariance {lev;expected;actual} -> explain_bad_variance env sigma ~lev ~expected ~actual let pr_position (cl,pos) = let clpos = match cl with | None -> str " of the goal" | Some (id,Locus.InHyp) -> str " of hypothesis " ++ Id.print id | Some (id,Locus.InHypTypeOnly) -> str " of the type of hypothesis " ++ Id.print id | Some (id,Locus.InHypValueOnly) -> str " of the body of hypothesis " ++ Id.print id in int pos ++ clpos let explain_cannot_unify_occurrences env sigma nested ((cl2,pos2),t2) ((cl1,pos1),t1) e = if nested then str "Found nested occurrences of the pattern at positions " ++ int pos1 ++ strbrk " and " ++ pr_position (cl2,pos2) ++ str "." else let ppreason = match e with | None -> mt() | Some (c1,c2,e) -> explain_unification_error env sigma c1 c2 (Some e) in str "Found incompatible occurrences of the pattern" ++ str ":" ++ spc () ++ str "Matched term " ++ pr_leconstr_env env sigma t2 ++ strbrk " at position " ++ pr_position (cl2,pos2) ++ strbrk " is not compatible with matched term " ++ pr_leconstr_env env sigma t1 ++ strbrk " at position " ++ pr_position (cl1,pos1) ++ ppreason ++ str "." let pr_constraints printenv env sigma evars cstrs = let (ev, evi) = Evar.Map.choose evars in if Evar.Map.for_all (fun ev' evi' -> eq_named_context_val evi.evar_hyps evi'.evar_hyps) evars then let l = Evar.Map.bindings evars in let env' = reset_with_named_context evi.evar_hyps env in let pe = if printenv then pr_ne_context_of (str "In environment:") env' sigma else mt () in let evs = prlist (fun (ev, evi) -> fnl () ++ pr_existential_key (Global.env ()) sigma ev ++ str " : " ++ pr_leconstr_env env' sigma evi.evar_concl ++ fnl ()) l in h (pe ++ evs ++ pr_evar_constraints sigma cstrs) else let filter evk _ = Evar.Map.mem evk evars in pr_evar_map_filter ~with_univs:false filter env sigma let explain_unsatisfiable_constraints env sigma constr comp = let (_, constraints) = Evd.extract_all_conv_pbs sigma in let tcs = Evd.get_typeclass_evars sigma in let undef = Evd.undefined_map sigma in (* Only keep evars that are subject to resolution and members of the given component. *) let is_kept evk _ = match comp with | None -> Evar.Set.mem evk tcs | Some comp -> Evar.Set.mem evk tcs && Evar.Set.mem evk comp in let undef = let m = Evar.Map.filter is_kept undef in if Evar.Map.is_empty m then undef else m in match constr with | None -> str "Unable to satisfy the following constraints:" ++ fnl () ++ pr_constraints true env sigma undef constraints | Some (ev, k) -> let cstr = let remaining = Evar.Map.remove ev undef in if not (Evar.Map.is_empty remaining) then str "With the following constraints:" ++ fnl () ++ pr_constraints false env sigma remaining constraints else mt () in let info = Evar.Map.find ev undef in explain_typeclass_resolution env sigma info k ++ fnl () ++ cstr let rec explain_pretype_error env sigma err = let env = Evardefine.env_nf_betaiotaevar sigma env in let env = make_all_name_different env sigma in match err with | CantFindCaseType c -> explain_cant_find_case_type env sigma c | ActualTypeNotCoercible (j,t,e) -> let {uj_val = c; uj_type = actty} = j in let (env, c, actty, expty), e = contract3' env sigma c actty t e in let j = {uj_val = c; uj_type = actty} in explain_actual_type env sigma j expty (Some e) | UnifOccurCheck (ev,rhs) -> explain_occur_check env sigma ev rhs | UnsolvableImplicit (evk,exp) -> explain_unsolvable_implicit env sigma evk exp | VarNotFound id -> explain_var_not_found env id | EvarNotFound id -> explain_evar_not_found env sigma id | UnexpectedType (actual,expect) -> let env, actual, expect = contract2 env sigma actual expect in explain_unexpected_type env sigma actual expect | NotProduct c -> explain_not_product env sigma c | CannotUnify (m,n,e) -> let env, m, n = contract2 env sigma m n in explain_cannot_unify env sigma m n e | CannotUnifyLocal (m,n,sn) -> explain_cannot_unify_local env sigma m n sn | CannotGeneralize ty -> explain_refiner_cannot_generalize env sigma ty | NoOccurrenceFound (c, id) -> explain_no_occurrence_found env sigma c id | CannotUnifyBindingType (m,n) -> explain_cannot_unify_binding_type env sigma m n | CannotFindWellTypedAbstraction (p,l,e) -> explain_cannot_find_well_typed_abstraction env sigma p l (Option.map (fun (env',e) -> explain_pretype_error env' sigma e) e) | WrongAbstractionType (n,a,t,u) -> explain_wrong_abstraction_type env sigma n a t u | AbstractionOverMeta (m,n) -> explain_abstraction_over_meta env m n | NonLinearUnification (m,c) -> explain_non_linear_unification env sigma m c | TypingError t -> explain_type_error env sigma t | CannotUnifyOccurrences (b,c1,c2,e) -> explain_cannot_unify_occurrences env sigma b c1 c2 e | UnsatisfiableConstraints (c,comp) -> explain_unsatisfiable_constraints env sigma c comp | DisallowedSProp -> explain_disallowed_sprop () (* Module errors *) open Modops let explain_not_match_error = function | InductiveFieldExpected _ -> strbrk "an inductive definition is expected" | DefinitionFieldExpected -> strbrk "a definition is expected. Hint: you can rename the \ inductive or constructor and add a definition mapping the \ old name to the new name" | ModuleFieldExpected -> strbrk "a module is expected" | ModuleTypeFieldExpected -> strbrk "a module type is expected" | NotConvertibleInductiveField id | NotConvertibleConstructorField id -> str "types given to " ++ Id.print id ++ str " differ" | NotConvertibleBodyField -> str "the body of definitions differs" | NotConvertibleTypeField (env, typ1, typ2) -> str "expected type" ++ spc () ++ quote (Printer.safe_pr_lconstr_env env (Evd.from_env env) typ2) ++ spc () ++ str "but found type" ++ spc () ++ quote (Printer.safe_pr_lconstr_env env (Evd.from_env env) typ1) | NotSameConstructorNamesField -> str "constructor names differ" | NotSameInductiveNameInBlockField -> str "inductive types names differ" | FiniteInductiveFieldExpected isfinite -> str "type is expected to be " ++ str (if isfinite then "coinductive" else "inductive") | InductiveNumbersFieldExpected n -> str "number of inductive types differs" | InductiveParamsNumberField n -> str "inductive type has not the right number of parameters" | RecordFieldExpected isrecord -> str "type is expected " ++ str (if isrecord then "" else "not ") ++ str "to be a record" | RecordProjectionsExpected nal -> (if List.length nal >= 2 then str "expected projection names are " else str "expected projection name is ") ++ pr_enum (function Name id -> Id.print id | _ -> str "_") nal | NotEqualInductiveAliases -> str "Aliases to inductive types do not match" | CumulativeStatusExpected b -> let status b = if b then str"cumulative" else str"non-cumulative" in str "a " ++ status b ++ str" declaration was expected, but a " ++ status (not b) ++ str" declaration was found" | PolymorphicStatusExpected b -> let status b = if b then str"polymorphic" else str"monomorphic" in str "a " ++ status b ++ str" declaration was expected, but a " ++ status (not b) ++ str" declaration was found" | IncompatibleUniverses incon -> str"the universe constraints are inconsistent: " ++ Univ.explain_universe_inconsistency UnivNames.(pr_with_global_universes empty_binders) incon | IncompatiblePolymorphism (env, t1, t2) -> str "conversion of polymorphic values generates additional constraints: " ++ quote (Printer.safe_pr_lconstr_env env (Evd.from_env env) t1) ++ spc () ++ str "compared to " ++ spc () ++ quote (Printer.safe_pr_lconstr_env env (Evd.from_env env) t2) | IncompatibleConstraints { got; expect } -> let open Univ in let pr_auctx auctx = let sigma = Evd.from_ctx (UState.of_binders (Printer.universe_binders_with_opt_names auctx None)) in let uctx = AbstractContext.repr auctx in Printer.pr_universe_instance_constraints sigma (UContext.instance uctx) (UContext.constraints uctx) in str "incompatible polymorphic binders: got" ++ spc () ++ h (pr_auctx got) ++ spc() ++ str "but expected" ++ spc() ++ h (pr_auctx expect) ++ (if not (Int.equal (AbstractContext.size got) (AbstractContext.size expect)) then mt() else fnl() ++ str "(incompatible constraints)") | IncompatibleVariance -> str "incompatible variance information" let explain_signature_mismatch l spec why = str "Signature components for label " ++ Label.print l ++ str " do not match:" ++ spc () ++ explain_not_match_error why ++ str "." let explain_label_already_declared l = str "The label " ++ Label.print l ++ str " is already declared." let explain_application_to_not_path _ = strbrk "A module cannot be applied to another module application or " ++ strbrk "with-expression; you must give a name to the intermediate result " ++ strbrk "module first." let explain_not_a_functor () = str "Application of a non-functor." let explain_is_a_functor () = str "Illegal use of a functor." let explain_incompatible_module_types mexpr1 mexpr2 = let open Declarations in let rec get_arg = function | NoFunctor _ -> 0 | MoreFunctor (_, _, ty) -> succ (get_arg ty) in let len1 = get_arg mexpr1.mod_type in let len2 = get_arg mexpr2.mod_type in if len1 <> len2 then str "Incompatible module types: module expects " ++ int len2 ++ str " arguments, found " ++ int len1 ++ str "." else str "Incompatible module types." let explain_not_equal_module_paths mp1 mp2 = str "Non equal modules." let explain_no_such_label l = str "No such label " ++ Label.print l ++ str "." let explain_incompatible_labels l l' = str "Opening and closing labels are not the same: " ++ Label.print l ++ str " <> " ++ Label.print l' ++ str "!" let explain_not_a_module s = quote (str s) ++ str " is not a module." let explain_not_a_module_type s = quote (str s) ++ str " is not a module type." let explain_not_a_constant l = quote (Label.print l) ++ str " is not a constant." let explain_incorrect_label_constraint l = str "Incorrect constraint for label " ++ quote (Label.print l) ++ str "." let explain_generative_module_expected l = str "The module " ++ Label.print l ++ str " is not generative." ++ strbrk " Only components of generative modules can be changed" ++ strbrk " using the \"with\" construct." let explain_label_missing l s = str "The field " ++ Label.print l ++ str " is missing in " ++ str s ++ str "." let explain_include_restricted_functor mp = let q = Nametab.shortest_qualid_of_module mp in str "Cannot include the functor " ++ Libnames.pr_qualid q ++ strbrk " since it has a restricted signature. " ++ strbrk "You may name first an instance of this functor, and include it." let explain_module_error = function | SignatureMismatch (l,spec,err) -> explain_signature_mismatch l spec err | LabelAlreadyDeclared l -> explain_label_already_declared l | ApplicationToNotPath mexpr -> explain_application_to_not_path mexpr | NotAFunctor -> explain_not_a_functor () | IsAFunctor -> explain_is_a_functor () | IncompatibleModuleTypes (m1,m2) -> explain_incompatible_module_types m1 m2 | NotEqualModulePaths (mp1,mp2) -> explain_not_equal_module_paths mp1 mp2 | NoSuchLabel l -> explain_no_such_label l | IncompatibleLabels (l1,l2) -> explain_incompatible_labels l1 l2 | NotAModule s -> explain_not_a_module s | NotAModuleType s -> explain_not_a_module_type s | NotAConstant l -> explain_not_a_constant l | IncorrectWithConstraint l -> explain_incorrect_label_constraint l | GenerativeModuleExpected l -> explain_generative_module_expected l | LabelMissing (l,s) -> explain_label_missing l s | IncludeRestrictedFunctor mp -> explain_include_restricted_functor mp (* Module internalization errors *) (* let explain_declaration_not_path _ = str "Declaration is not a path." *) let explain_not_module_nor_modtype s = quote (str s) ++ str " is not a module or module type." let explain_incorrect_with_in_module () = str "The syntax \"with\" is not allowed for modules." let explain_incorrect_module_application () = str "Illegal application to a module type." let explain_module_internalization_error = let open Modintern in function | NotAModuleNorModtype s -> explain_not_module_nor_modtype s | IncorrectWithInModule -> explain_incorrect_with_in_module () | IncorrectModuleApplication -> explain_incorrect_module_application () (* Typeclass errors *) let explain_not_a_class env sigma c = pr_econstr_env env sigma c ++ str" is not a declared type class." let explain_unbound_method env sigma cid { CAst.v = id } = str "Unbound method name " ++ Id.print (id) ++ spc () ++ str"of class" ++ spc () ++ pr_global cid ++ str "." let explain_typeclass_error env sigma = function | NotAClass c -> explain_not_a_class env sigma c | UnboundMethod (cid, id) -> explain_unbound_method env sigma cid id (* Refiner errors *) let explain_refiner_bad_type env sigma arg ty conclty = let pm, pn = with_diffs (pr_lconstr_env env sigma ty) (pr_leconstr_env env sigma conclty) in str "Refiner was given an argument" ++ brk(1,1) ++ pr_lconstr_env env sigma arg ++ spc () ++ str "of type" ++ brk(1,1) ++ pm ++ spc () ++ str "instead of" ++ brk(1,1) ++ pn ++ str "." let explain_refiner_unresolved_bindings l = str "Unable to find an instance for the " ++ str (String.plural (List.length l) "variable") ++ spc () ++ prlist_with_sep pr_comma Name.print l ++ str"." let explain_refiner_cannot_apply env sigma t harg = str "In refiner, a term of type" ++ brk(1,1) ++ pr_lconstr_env env sigma t ++ spc () ++ str "could not be applied to" ++ brk(1,1) ++ pr_lconstr_env env sigma harg ++ str "." let explain_intro_needs_product () = str "Introduction tactics needs products." let explain_non_linear_proof env sigma c = str "Cannot refine with term" ++ brk(1,1) ++ pr_lconstr_env env sigma c ++ spc () ++ str "because a metavariable has several occurrences." let explain_meta_in_type env sigma c = str "In refiner, a meta appears in the type " ++ brk(1,1) ++ pr_leconstr_env env sigma c ++ str " of another meta" let explain_no_such_hyp id = str "No such hypothesis: " ++ Id.print id let explain_refiner_error env sigma = function | BadType (arg,ty,conclty) -> explain_refiner_bad_type env sigma arg ty conclty | UnresolvedBindings t -> explain_refiner_unresolved_bindings t | CannotApply (t,harg) -> explain_refiner_cannot_apply env sigma t harg | IntroNeedsProduct -> explain_intro_needs_product () | NonLinearProof c -> explain_non_linear_proof env sigma c | MetaInType c -> explain_meta_in_type env sigma c | NoSuchHyp id -> explain_no_such_hyp id (* Inductive errors *) let error_non_strictly_positive env c v = let pc = pr_lconstr_env env (Evd.from_env env) c in let pv = pr_lconstr_env env (Evd.from_env env) v in str "Non strictly positive occurrence of " ++ pv ++ str " in" ++ brk(1,1) ++ pc ++ str "." let error_ill_formed_inductive env c v = let pc = pr_lconstr_env env (Evd.from_env env) c in let pv = pr_lconstr_env env (Evd.from_env env) v in str "Not enough arguments applied to the " ++ pv ++ str " in" ++ brk(1,1) ++ pc ++ str "." let error_ill_formed_constructor env id c v nparams nargs = let pv = pr_lconstr_env env (Evd.from_env env) v in let atomic = Int.equal (nb_prod Evd.empty (EConstr.of_constr c)) (* FIXME *) 0 in str "The type of constructor" ++ brk(1,1) ++ Id.print id ++ brk(1,1) ++ str "is not valid;" ++ brk(1,1) ++ strbrk (if atomic then "it must be " else "its conclusion must be ") ++ pv ++ (* warning: because of implicit arguments it is difficult to say which parameters must be explicitly given *) (if not (Int.equal nparams 0) then strbrk " applied to its " ++ str (String.plural nparams "parameter") else mt()) ++ (if not (Int.equal nargs 0) then str (if not (Int.equal nparams 0) then " and" else " applied") ++ strbrk " to some " ++ str (String.plural nargs "argument") else mt()) ++ str "." let pr_ltype_using_barendregt_convention_env env c = (* Use goal_concl_style as an approximation of Barendregt's convention (?) *) quote (pr_ltype_env ~goal_concl_style:true env (Evd.from_env env) c) let error_bad_ind_parameters env c n v1 v2 = let pc = pr_ltype_using_barendregt_convention_env env c in let pv1 = pr_lconstr_env env (Evd.from_env env) v1 in let pv2 = pr_lconstr_env env (Evd.from_env env) v2 in str "Last occurrence of " ++ pv2 ++ str " must have " ++ pv1 ++ str " as " ++ pr_nth n ++ str " argument in" ++ brk(1,1) ++ pc ++ str "." let error_same_names_types id = str "The name" ++ spc () ++ Id.print id ++ spc () ++ str "is used more than once." let error_same_names_constructors id = str "The constructor name" ++ spc () ++ Id.print id ++ spc () ++ str "is used more than once." let error_same_names_overlap idl = strbrk "The following names are used both as type names and constructor " ++ str "names:" ++ spc () ++ prlist_with_sep pr_comma Id.print idl ++ str "." let error_not_an_arity env c = str "The type" ++ spc () ++ pr_lconstr_env env (Evd.from_env env) c ++ spc () ++ str "is not an arity." let error_bad_entry () = str "Bad inductive definition." let error_large_non_prop_inductive_not_in_type () = str "Large non-propositional inductive types must be in Type." let error_inductive_missing_constraints (us,ind_univ) = let pr_u = Univ.Universe.pr_with UnivNames.(pr_with_global_universes empty_binders) in str "Missing universe constraint declared for inductive type:" ++ spc() ++ v 0 (prlist_with_sep spc (fun u -> hov 0 (pr_u u ++ str " <= " ++ pr_u ind_univ)) (Univ.Universe.Set.elements us)) (* Recursion schemes errors *) let error_not_allowed_case_analysis env isrec kind i = str (if isrec then "Induction" else "Case analysis") ++ strbrk " on sort " ++ pr_sort Evd.empty kind ++ strbrk " is not allowed for inductive definition " ++ pr_inductive env (fst i) ++ str "." let error_not_allowed_dependent_analysis env isrec i = str "Dependent " ++ str (if isrec then "induction" else "case analysis") ++ strbrk " is not allowed for inductive definition " ++ pr_inductive env i ++ str "." let error_not_mutual_in_scheme env ind ind' = if Ind.CanOrd.equal ind ind' then str "The inductive type " ++ pr_inductive env ind ++ str " occurs twice." else str "The inductive types " ++ pr_inductive env ind ++ spc () ++ str "and" ++ spc () ++ pr_inductive env ind' ++ spc () ++ str "are not mutually defined." (* Inductive constructions errors *) let explain_inductive_error = function | NonPos (env,c,v) -> error_non_strictly_positive env c v | NotEnoughArgs (env,c,v) -> error_ill_formed_inductive env c v | NotConstructor (env,id,c,v,n,m) -> error_ill_formed_constructor env id c v n m | NonPar (env,c,n,v1,v2) -> error_bad_ind_parameters env c n v1 v2 | SameNamesTypes id -> error_same_names_types id | SameNamesConstructors id -> error_same_names_constructors id | SameNamesOverlap idl -> error_same_names_overlap idl | NotAnArity (env, c) -> error_not_an_arity env c | BadEntry -> error_bad_entry () | LargeNonPropInductiveNotInType -> error_large_non_prop_inductive_not_in_type () | MissingConstraints csts -> error_inductive_missing_constraints csts (* Primitive errors *) let explain_incompatible_prim_declarations (type a) (act:a Primred.action_kind) (x:a) (y:a) = let open Primred in let env = Global.env() in (* The newer constant/inductive (either coming from Primitive or a Require) may be absent from the nametab as the error got raised while adding it to the safe_env. In that case we can't use nametab printing. There are still cases where the constant/inductive is added separately from its retroknowledge (using Register), so we still try nametab based printing. *) match act with | IncompatTypes typ -> let px = try pr_constant env x with Not_found -> Constant.print x in str "Cannot declare " ++ px ++ str " as primitive " ++ str (CPrimitives.prim_type_to_string typ) ++ str ": " ++ pr_constant env y ++ str " is already declared." | IncompatInd ind -> let px = try pr_inductive env x with Not_found -> MutInd.print (fst x) in str "Cannot declare " ++ px ++ str " as primitive " ++ str (CPrimitives.prim_ind_to_string ind) ++ str ": " ++ pr_inductive env y ++ str " is already declared." (* Recursion schemes errors *) let explain_recursion_scheme_error env = function | NotAllowedCaseAnalysis (isrec,k,i) -> error_not_allowed_case_analysis env isrec k i | NotMutualInScheme (ind,ind')-> error_not_mutual_in_scheme env ind ind' | NotAllowedDependentAnalysis (isrec, i) -> error_not_allowed_dependent_analysis env isrec i (* Pattern-matching errors *) let explain_bad_pattern env sigma cstr ty = let env = make_all_name_different env sigma in let pt = pr_leconstr_env env sigma ty in let pc = pr_constructor env cstr in str "Found the constructor " ++ pc ++ brk(1,1) ++ str "while matching a term of type " ++ pt ++ brk(1,1) ++ str "which is not an inductive type." let explain_bad_constructor env cstr ind = let pi = pr_inductive env ind in (* let pc = pr_constructor env cstr in*) let pt = pr_inductive env (inductive_of_constructor cstr) in str "Found a constructor of inductive type " ++ pt ++ brk(1,1) ++ str "while a constructor of " ++ pi ++ brk(1,1) ++ str "is expected." let decline_string n s = if Int.equal n 0 then str "no " ++ str s ++ str "s" else if Int.equal n 1 then str "1 " ++ str s else (int n ++ str " " ++ str s ++ str "s") let explain_wrong_numarg_pattern expanded nargs expected_nassums expected_ndecls pp = (if expanded then strbrk "Once notations are expanded, the resulting " else strbrk "The ") ++ pp ++ strbrk " is expected to be applied to " ++ decline_string expected_nassums "argument" ++ (if expected_nassums = expected_ndecls then mt () else strbrk " (or " ++ decline_string expected_ndecls "argument" ++ strbrk " when including variables for local definitions)") ++ strbrk " while it is actually applied to " ++ decline_string nargs "argument" ++ str "." let explain_wrong_numarg_constructor env cstr expanded nargs expected_nassums expected_ndecls = let pp = strbrk "constructor " ++ pr_constructor env cstr ++ strbrk " (in type " ++ pr_inductive env (inductive_of_constructor cstr) ++ strbrk ")" in explain_wrong_numarg_pattern expanded nargs expected_nassums expected_ndecls pp let explain_wrong_numarg_inductive env ind expanded nargs expected_nassums expected_ndecls = let pp = strbrk "inductive type " ++ pr_inductive env ind in explain_wrong_numarg_pattern expanded nargs expected_nassums expected_ndecls pp let explain_unused_clause env pats = str "Pattern \"" ++ hov 0 (prlist_with_sep pr_comma pr_cases_pattern pats) ++ strbrk "\" is redundant in this clause." let explain_non_exhaustive env pats = str "Non exhaustive pattern-matching: no clause found for " ++ str (String.plural (List.length pats) "pattern") ++ spc () ++ hov 0 (prlist_with_sep pr_comma pr_cases_pattern pats) let explain_cannot_infer_predicate env sigma typs = let typs = Array.to_list typs in let env = make_all_name_different env sigma in let pr_branch (cstr,typ) = let cstr,_ = EConstr.decompose_app sigma cstr in str "For " ++ pr_leconstr_env env sigma cstr ++ str ": " ++ pr_leconstr_env env sigma typ in str "Unable to unify the types found in the branches:" ++ spc () ++ hov 0 (prlist_with_sep fnl pr_branch typs) let explain_pattern_matching_error env sigma = function | BadPattern (c,t) -> explain_bad_pattern env sigma c t | BadConstructor (c,ind) -> explain_bad_constructor env c ind | WrongNumargConstructor {cstr; expanded; nargs; expected_nassums; expected_ndecls} -> explain_wrong_numarg_constructor env cstr expanded nargs expected_nassums expected_ndecls | WrongNumargInductive {ind; expanded; nargs; expected_nassums; expected_ndecls} -> explain_wrong_numarg_inductive env ind expanded nargs expected_nassums expected_ndecls | UnusedClause tms -> explain_unused_clause env tms | NonExhaustive tms -> explain_non_exhaustive env tms | CannotInferPredicate typs -> explain_cannot_infer_predicate env sigma typs let explain_reduction_tactic_error = function | Tacred.InvalidAbstraction (env,sigma,c,(env',e)) -> let e = map_ptype_error EConstr.of_constr e in str "The abstracted term" ++ spc () ++ quote (pr_letype_env ~goal_concl_style:true env sigma c) ++ spc () ++ str "is not well typed." ++ fnl () ++ explain_type_error env' (Evd.from_env env') e let explain_prim_token_notation_error kind env sigma = function | Notation.UnexpectedTerm c -> (strbrk "Unexpected term " ++ pr_constr_env env sigma c ++ strbrk (" while parsing a "^kind^" notation.")) | Notation.UnexpectedNonOptionTerm c -> (strbrk "Unexpected non-option term " ++ pr_constr_env env sigma c ++ strbrk (" while parsing a "^kind^" notation.")) (** Registration of generic errors Nota: explain_exn does NOT end with a newline anymore! *) exception Unhandled let wrap_unhandled f e = try Some (f e) with Unhandled -> None let explain_exn_default = function (* Basic interaction exceptions *) | Stream.Error txt -> hov 0 (str "Syntax error: " ++ str txt ++ str ".") | CLexer.Error.E err -> hov 0 (str (CLexer.Error.to_string err)) | Sys_error msg -> hov 0 (str "System error: " ++ quote (str msg)) | Out_of_memory -> hov 0 (str "Out of memory.") | Stack_overflow -> hov 0 (str "Stack overflow.") | CErrors.Timeout -> hov 0 (str "Timeout!") | Sys.Break -> hov 0 (str "User interrupt.") (* Otherwise, not handled here *) | _ -> raise Unhandled let _ = CErrors.register_handler (wrap_unhandled explain_exn_default) let rec vernac_interp_error_handler = function | Univ.UniverseInconsistency i -> str "Universe inconsistency." ++ spc() ++ Univ.explain_universe_inconsistency UnivNames.(pr_with_global_universes empty_binders) i ++ str "." | TypeError(ctx,te) -> let te = map_ptype_error EConstr.of_constr te in explain_type_error ctx Evd.empty te | PretypeError(ctx,sigma,te) -> explain_pretype_error ctx sigma te | Notation.PrimTokenNotationError(kind,ctx,sigma,te) -> explain_prim_token_notation_error kind ctx sigma te | Typeclasses_errors.TypeClassError(env, sigma, te) -> explain_typeclass_error env sigma te | InductiveError e -> explain_inductive_error e | Primred.IncompatibleDeclarations (act,x,y) -> explain_incompatible_prim_declarations act x y | Modops.ModuleTypingError e -> explain_module_error e | Modintern.ModuleInternalizationError e -> explain_module_internalization_error e | RecursionSchemeError (env,e) -> explain_recursion_scheme_error env e | Cases.PatternMatchingError (env,sigma,e) -> explain_pattern_matching_error env sigma e | Tacred.ReductionTacticError e -> explain_reduction_tactic_error e | Logic.RefinerError (env, sigma, e) -> explain_refiner_error env sigma e | Nametab.GlobalizationError q -> str "The reference" ++ spc () ++ Libnames.pr_qualid q ++ spc () ++ str "was not found" ++ spc () ++ str "in the current" ++ spc () ++ str "environment." | Tacticals.FailError (i,s) -> let s = Lazy.force s in str "Tactic failure" ++ (if Pp.ismt s then s else str ": " ++ s) ++ if Int.equal i 0 then str "." else str " (level " ++ int i ++ str")." | Logic_monad.TacticFailure e -> vernac_interp_error_handler e | _ -> raise Unhandled let _ = CErrors.register_handler (wrap_unhandled vernac_interp_error_handler) coq-8.15.0/vernac/himsg.mli000066400000000000000000000020221417001151100154420ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Evd.evar_map -> Pretype_errors.type_error -> Pp.t val explain_pretype_error : Environ.env -> Evd.evar_map -> Pretype_errors.pretype_error -> Pp.t val explain_refiner_error : Environ.env -> Evd.evar_map -> Logic.refiner_error -> Pp.t coq-8.15.0/vernac/indschemes.ml000066400000000000000000000505371417001151100163220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* !elim_flag) ; optwrite = (fun b -> elim_flag := b) } let bifinite_elim_flag = ref false let () = declare_bool_option { optdepr = false; optkey = ["Nonrecursive";"Elimination";"Schemes"]; optread = (fun () -> !bifinite_elim_flag) ; optwrite = (fun b -> bifinite_elim_flag := b) } let case_flag = ref false let () = declare_bool_option { optdepr = false; optkey = ["Case";"Analysis";"Schemes"]; optread = (fun () -> !case_flag) ; optwrite = (fun b -> case_flag := b) } let eq_flag = ref false let () = declare_bool_option { optdepr = false; optkey = ["Boolean";"Equality";"Schemes"]; optread = (fun () -> !eq_flag) ; optwrite = (fun b -> eq_flag := b) } let is_eq_flag () = !eq_flag let eq_dec_flag = ref false let () = declare_bool_option { optdepr = false; optkey = ["Decidable";"Equality";"Schemes"]; optread = (fun () -> !eq_dec_flag) ; optwrite = (fun b -> eq_dec_flag := b) } let rewriting_flag = ref false let () = declare_bool_option { optdepr = false; optkey = ["Rewriting";"Schemes"]; optread = (fun () -> !rewriting_flag) ; optwrite = (fun b -> rewriting_flag := b) } (* Util *) let define ~poly name sigma c types = let univs = Evd.univ_entry ~poly sigma in let entry = Declare.definition_entry ~univs ?types c in let kind = Decls.(IsDefinition Scheme) in let kn = declare_constant ~kind ~name (DefinitionEntry entry) in definition_message name; kn (* Boolean equality *) let declare_beq_scheme_gen names kn = ignore (define_mutual_scheme beq_scheme_kind names kn) let alarm what internal msg = let debug = false in match internal with | UserAutomaticRequest -> (if debug then Feedback.msg_debug (hov 0 msg ++ fnl () ++ what ++ str " not defined.")); None | UserIndividualRequest -> Some msg let try_declare_scheme what f internal names kn = try f names kn with e -> let e = Exninfo.capture e in let rec extract_exn = function Logic_monad.TacticFailure e -> extract_exn e | e -> e in let msg = match extract_exn (fst e) with | ParameterWithoutEquality cst -> alarm what internal (str "Boolean equality not found for parameter " ++ Printer.pr_global cst ++ str".") | InductiveWithProduct -> alarm what internal (str "Unable to decide equality of functional arguments.") | InductiveWithSort -> alarm what internal (str "Unable to decide equality of type arguments.") | NonSingletonProp ind -> alarm what internal (str "Cannot extract computational content from proposition " ++ quote (Printer.pr_inductive (Global.env()) ind) ++ str ".") | EqNotFound (ind',ind) -> alarm what internal (str "Boolean equality on " ++ quote (Printer.pr_inductive (Global.env()) ind') ++ strbrk " is missing.") | UndefinedCst s -> alarm what internal (strbrk "Required constant " ++ str s ++ str " undefined.") | DeclareUniv.AlreadyDeclared (kind, id) as exn -> let msg = CErrors.print exn in alarm what internal msg | DecidabilityMutualNotSupported -> alarm what internal (str "Decidability lemma for mutual inductive types not supported.") | EqUnknown s -> alarm what internal (str "Found unsupported " ++ str s ++ str " while building Boolean equality.") | NoDecidabilityCoInductive -> alarm what internal (str "Scheme Equality is only for inductive types.") | DecidabilityIndicesNotSupported -> alarm what internal (str "Inductive types with annotations not supported.") | ConstructorWithNonParametricInductiveType ind -> alarm what internal (strbrk "Unsupported constructor with an argument whose type is a non-parametric inductive type." ++ strbrk " Type " ++ quote (Printer.pr_inductive (Global.env()) ind) ++ str " is applied to an argument which is not a variable.") | e when CErrors.noncritical e -> alarm what internal (str "Unexpected error during scheme creation: " ++ CErrors.print e) | _ -> Exninfo.iraise e in match msg with | None -> () | Some msg -> Exninfo.iraise (UserError msg, snd e) let beq_scheme_msg mind = let mib = Global.lookup_mind mind in (* TODO: mutual inductive case *) str "Boolean equality on " ++ pr_enum (fun ind -> quote (Printer.pr_inductive (Global.env()) ind)) (List.init (Array.length mib.mind_packets) (fun i -> (mind,i))) let declare_beq_scheme_with l kn = try_declare_scheme (beq_scheme_msg kn) declare_beq_scheme_gen UserIndividualRequest l kn let try_declare_beq_scheme kn = (* TODO: handle Fix, eventually handle proof-irrelevance; improve decidability by depending on decidability for the parameters rather than on the bl and lb properties *) try_declare_scheme (beq_scheme_msg kn) declare_beq_scheme_gen UserAutomaticRequest [] kn let declare_beq_scheme = declare_beq_scheme_with [] (* Case analysis schemes *) let declare_one_case_analysis_scheme ind = let (mib,mip) = Global.lookup_inductive ind in let kind = inductive_sort_family mip in let dep = if kind == InProp then case_scheme_kind_from_prop else if not (Inductiveops.has_dependent_elim mib) then case_scheme_kind_from_type else case_dep_scheme_kind_from_type in let kelim = elim_sort (mib,mip) in (* in case the inductive has a type elimination, generates only one induction scheme, the other ones share the same code with the appropriate type *) if Sorts.family_leq InType kelim then define_individual_scheme dep None ind (* Induction/recursion schemes *) let kinds_from_prop = [InType,rect_scheme_kind_from_prop; InProp,ind_scheme_kind_from_prop; InSet,rec_scheme_kind_from_prop; InSProp,sind_scheme_kind_from_prop] let kinds_from_type = [InType,rect_dep_scheme_kind_from_type; InProp,ind_dep_scheme_kind_from_type; InSet,rec_dep_scheme_kind_from_type; InSProp,sind_dep_scheme_kind_from_type] let nondep_kinds_from_type = [InType,rect_scheme_kind_from_type; InProp,ind_scheme_kind_from_type; InSet,rec_scheme_kind_from_type; InSProp,sind_scheme_kind_from_type] let declare_one_induction_scheme ind = let (mib,mip) = Global.lookup_inductive ind in let kind = inductive_sort_family mip in let from_prop = kind == InProp in let depelim = Inductiveops.has_dependent_elim mib in let kelim = Inductiveops.sorts_below (elim_sort (mib,mip)) in let kelim = if Global.sprop_allowed () then kelim else List.filter (fun s -> s <> InSProp) kelim in let elims = List.map_filter (fun (sort,kind) -> if List.mem_f Sorts.family_equal sort kelim then Some kind else None) (if from_prop then kinds_from_prop else if depelim then kinds_from_type else nondep_kinds_from_type) in List.iter (fun kind -> define_individual_scheme kind None ind) elims let declare_induction_schemes kn = let mib = Global.lookup_mind kn in if mib.mind_finite <> Declarations.CoFinite then begin for i = 0 to Array.length mib.mind_packets - 1 do declare_one_induction_scheme (kn,i); done; end (* Decidable equality *) let declare_eq_decidability_gen names kn = let mib = Global.lookup_mind kn in if mib.mind_finite <> Declarations.CoFinite then define_mutual_scheme eq_dec_scheme_kind names kn let eq_dec_scheme_msg ind = (* TODO: mutual inductive case *) str "Decidable equality on " ++ quote (Printer.pr_inductive (Global.env()) ind) let declare_eq_decidability_scheme_with l kn = try_declare_scheme (eq_dec_scheme_msg (kn,0)) declare_eq_decidability_gen UserIndividualRequest l kn let try_declare_eq_decidability kn = try_declare_scheme (eq_dec_scheme_msg (kn,0)) declare_eq_decidability_gen UserAutomaticRequest [] kn let declare_eq_decidability = declare_eq_decidability_scheme_with [] let ignore_error f x = try f x with e when CErrors.noncritical e -> () let declare_rewriting_schemes ind = if Hipattern.is_inductive_equality (Global.env ()) ind then begin define_individual_scheme rew_r2l_scheme_kind None ind; define_individual_scheme rew_r2l_dep_scheme_kind None ind; define_individual_scheme rew_r2l_forward_dep_scheme_kind None ind; (* These ones expect the equality to be symmetric; the first one also *) (* needs eq *) ignore_error (define_individual_scheme rew_l2r_scheme_kind None) ind; ignore_error (define_individual_scheme sym_involutive_scheme_kind None) ind; ignore_error (define_individual_scheme rew_l2r_dep_scheme_kind None) ind; ignore_error (define_individual_scheme rew_l2r_forward_dep_scheme_kind None) ind end let warn_cannot_build_congruence = CWarnings.create ~name:"cannot-build-congruence" ~category:"schemes" (fun () -> strbrk "Cannot build congruence scheme because eq is not found") let declare_congr_scheme ind = let env = Global.env () in let sigma = Evd.from_env env in if Hipattern.is_equality_type env sigma (EConstr.of_constr (mkInd ind)) (* FIXME *) then begin if try Coqlib.check_required_library Coqlib.logic_module_name; true with e when CErrors.noncritical e -> false then define_individual_scheme congr_scheme_kind None ind else warn_cannot_build_congruence () end let declare_sym_scheme ind = if Hipattern.is_inductive_equality (Global.env ()) ind then (* Expect the equality to be symmetric *) ignore_error (define_individual_scheme sym_scheme_kind None) ind (* Scheme command *) let smart_global_inductive y = smart_global_inductive y let rec split_scheme env l = match l with | [] -> [],[] | (Some id,t)::q -> let l1,l2 = split_scheme env q in ( match t with | InductionScheme (x,y,z) -> ((id,x,smart_global_inductive y,z)::l1),l2 | CaseScheme (x,y,z) -> ((id,x,smart_global_inductive y,z)::l1),l2 | EqualityScheme x -> l1,((Some id,smart_global_inductive x)::l2) ) (* if no name has been provided, we build one from the types of the ind requested *) | (None,t)::q -> let l1,l2 = split_scheme env q in let names inds recs isdep y z = let ind = smart_global_inductive y in let sort_of_ind = inductive_sort_family (snd (lookup_mind_specif env ind)) in let suffix = ( match sort_of_ind with | InProp -> if isdep then (match z with | InSProp -> inds ^ "s_dep" | InProp -> inds ^ "_dep" | InSet -> recs ^ "_dep" | InType -> recs ^ "t_dep") else ( match z with | InSProp -> inds ^ "s" | InProp -> inds | InSet -> recs | InType -> recs ^ "t" ) | _ -> if isdep then (match z with | InSProp -> inds ^ "s" | InProp -> inds | InSet -> recs | InType -> recs ^ "t" ) else (match z with | InSProp -> inds ^ "s_nodep" | InProp -> inds ^ "_nodep" | InSet -> recs ^ "_nodep" | InType -> recs ^ "t_nodep") ) in let newid = add_suffix (Nametab.basename_of_global (GlobRef.IndRef ind)) suffix in let newref = CAst.make newid in ((newref,isdep,ind,z)::l1),l2 in match t with | CaseScheme (x,y,z) -> names "_case" "_case" x y z | InductionScheme (x,y,z) -> names "_ind" "_rec" x y z | EqualityScheme x -> l1,((None,smart_global_inductive x)::l2) let do_mutual_induction_scheme ?(force_mutual=false) lnamedepindsort = let lrecnames = List.map (fun ({CAst.v},_,_,_) -> v) lnamedepindsort and env0 = Global.env() in let sigma, lrecspec, _ = List.fold_right (fun (_,dep,ind,sort) (evd, l, inst) -> let evd, indu, inst = match inst with | None -> let _, ctx = Typeops.type_of_global_in_context env0 (GlobRef.IndRef ind) in let u, ctx = UnivGen.fresh_instance_from ctx None in let evd = Evd.from_ctx (UState.of_context_set ctx) in evd, (ind,u), Some u | Some ui -> evd, (ind, ui), inst in (evd, (indu,dep,sort) :: l, inst)) lnamedepindsort (Evd.from_env env0,[],None) in let sigma, listdecl = Indrec.build_mutual_induction_scheme env0 sigma ~force_mutual lrecspec in let poly = (* NB: build_mutual_induction_scheme forces nonempty list of mutual inductives (force_mutual is about the generated schemes) *) let _,_,ind,_ = List.hd lnamedepindsort in Global.is_polymorphic (GlobRef.IndRef ind) in let declare decl fi lrecref = let decltype = Retyping.get_type_of env0 sigma (EConstr.of_constr decl) in let decltype = EConstr.to_constr sigma decltype in let cst = define ~poly fi sigma decl (Some decltype) in GlobRef.ConstRef cst :: lrecref in let _ = List.fold_right2 declare listdecl lrecnames [] in fixpoint_message None lrecnames let get_common_underlying_mutual_inductive env = function | [] -> assert false | (id,(mind,i as ind))::l as all -> match List.filter (fun (_,(mind',_)) -> not (Environ.QMutInd.equal env mind mind')) l with | (_,ind')::_ -> raise (RecursionSchemeError (env, NotMutualInScheme (ind,ind'))) | [] -> if not (List.distinct_f Int.compare (List.map snd (List.map snd all))) then user_err Pp.(str "A type occurs twice"); mind, List.map_filter (function (Some id,(_,i)) -> Some (i,id.CAst.v) | (None,_) -> None) all let do_scheme l = let env = Global.env() in let ischeme,escheme = split_scheme env l in (* we want 1 kind of scheme at a time so we check if the user tried to declare different schemes at once *) if not (List.is_empty ischeme) && not (List.is_empty escheme) then user_err Pp.(str "Do not declare equality and induction scheme at the same time.") else ( if not (List.is_empty ischeme) then do_mutual_induction_scheme ischeme else let mind,l = get_common_underlying_mutual_inductive env escheme in declare_beq_scheme_with l mind; declare_eq_decidability_scheme_with l mind ) (**********************************************************************) (* Combined scheme *) (* Matthieu Sozeau, Dec 2006 *) let list_split_rev_at index l = let rec aux i acc = function hd :: tl when Int.equal i index -> acc, tl | hd :: tl -> aux (succ i) (hd :: acc) tl | [] -> failwith "List.split_when: Invalid argument" in aux 0 [] l let fold_left' f = function [] -> invalid_arg "fold_left'" | hd :: tl -> List.fold_left f hd tl let mk_coq_and sigma = Evd.fresh_global (Global.env ()) sigma (Coqlib.lib_ref "core.and.type") let mk_coq_conj sigma = Evd.fresh_global (Global.env ()) sigma (Coqlib.lib_ref "core.and.conj") let mk_coq_prod sigma = Evd.fresh_global (Global.env ()) sigma (Coqlib.lib_ref "core.prod.type") let mk_coq_pair sigma = Evd.fresh_global (Global.env ()) sigma (Coqlib.lib_ref "core.prod.intro") let build_combined_scheme env schemes = let sigma = Evd.from_env env in let sigma, defs = List.fold_left_map (fun sigma cst -> let sigma, c = Evd.fresh_constant_instance env sigma cst in sigma, (c, Typeops.type_of_constant_in env c)) sigma schemes in let find_inductive ty = let (ctx, arity) = decompose_prod ty in let (_, last) = List.hd ctx in match Constr.kind last with | App (ind, args) -> let ind = destInd ind in let (_,spec) = Inductive.lookup_mind_specif env (fst ind) in ctx, ind, spec.mind_nrealargs | _ -> ctx, destInd last, 0 in let (c, t) = List.hd defs in let ctx, ind, nargs = find_inductive t in (* We check if ALL the predicates are in Prop, if so we use propositional conjunction '/\', otherwise we use the simple product '*'. *) let inprop = let inprop (_,t) = Retyping.get_sort_family_of env sigma (EConstr.of_constr t) == Sorts.InProp in List.for_all inprop defs in let mk_and, mk_conj = if inprop then (mk_coq_and, mk_coq_conj) else (mk_coq_prod, mk_coq_pair) in (* Number of clauses, including the predicates quantification *) let prods = nb_prod sigma (EConstr.of_constr t) - (nargs + 1) in let sigma, coqand = mk_and sigma in let sigma, coqconj = mk_conj sigma in let relargs = rel_vect 0 prods in let concls = List.rev_map (fun (cst, t) -> mkApp(mkConstU cst, relargs), snd (decompose_prod_n prods t)) defs in let concl_bod, concl_typ = fold_left' (fun (accb, acct) (cst, x) -> mkApp (EConstr.to_constr sigma coqconj, [| x; acct; cst; accb |]), mkApp (EConstr.to_constr sigma coqand, [| x; acct |])) concls in let ctx, _ = list_split_rev_at prods (List.rev_map (fun (x, y) -> LocalAssum (x, y)) ctx) in let typ = List.fold_left (fun d c -> Term.mkProd_wo_LetIn c d) concl_typ ctx in let body = it_mkLambda_or_LetIn concl_bod ctx in let sigma = Typing.check env sigma (EConstr.of_constr body) (EConstr.of_constr typ) in (sigma, body, typ) let do_combined_scheme name schemes = let open CAst in let csts = List.map (fun {CAst.loc;v} -> let qualid = qualid_of_ident v in try Nametab.locate_constant qualid with Not_found -> user_err ?loc Pp.(pr_qualid qualid ++ str " is not declared.")) schemes in let sigma,body,typ = build_combined_scheme (Global.env ()) csts in (* It is possible for the constants to have different universe polymorphism from each other, however that is only when the user manually defined at least one of them (as Scheme would pick the polymorphism of the inductive block). In that case if they want some other polymorphism they can also manually define the combined scheme. *) let poly = Global.is_polymorphic (GlobRef.ConstRef (List.hd csts)) in ignore (define ~poly name.v sigma body (Some typ)); fixpoint_message None [name.v] (**********************************************************************) let map_inductive_block f kn n = for i=0 to n-1 do f (kn,i) done let declare_default_schemes kn = let mib = Global.lookup_mind kn in let n = Array.length mib.mind_packets in if !elim_flag && (mib.mind_finite <> Declarations.BiFinite || !bifinite_elim_flag) && mib.mind_typing_flags.check_positive then declare_induction_schemes kn; if !case_flag then map_inductive_block declare_one_case_analysis_scheme kn n; if is_eq_flag() then try_declare_beq_scheme kn; if !eq_dec_flag then try_declare_eq_decidability kn; if !rewriting_flag then map_inductive_block declare_congr_scheme kn n; if !rewriting_flag then map_inductive_block declare_sym_scheme kn n; if !rewriting_flag then map_inductive_block declare_rewriting_schemes kn n coq-8.15.0/vernac/indschemes.mli000066400000000000000000000037721417001151100164720ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit val declare_eq_decidability : MutInd.t -> unit (** Build and register a congruence scheme for an equality-like inductive type *) val declare_congr_scheme : inductive -> unit (** Build and register rewriting schemes for an equality-like inductive type *) val declare_rewriting_schemes : inductive -> unit (** Mutual Minimality/Induction scheme. [force_mutual] forces the construction of eliminators having the same predicates and methods even if some of the inductives are not recursive. By default it is [false] and some of the eliminators are defined as simple case analysis. *) val do_mutual_induction_scheme : ?force_mutual:bool -> (lident * bool * inductive * Sorts.family) list -> unit (** Main calls to interpret the Scheme command *) val do_scheme : (lident option * scheme) list -> unit (** Combine a list of schemes into a conjunction of them *) val build_combined_scheme : env -> Constant.t list -> Evd.evar_map * constr * types val do_combined_scheme : lident -> lident list -> unit (** Hook called at each inductive type definition *) val declare_default_schemes : MutInd.t -> unit coq-8.15.0/vernac/library.ml000066400000000000000000000445011417001151100156360ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ObjFile.open_in ~file) f (************************************************************************) (** Serialized objects loaded on-the-fly *) exception Faulty of string module Delayed : sig type 'a delayed val in_delayed : string -> ObjFile.in_handle -> segment:string -> 'a delayed * Digest.t val fetch_delayed : 'a delayed -> 'a end = struct type 'a delayed = { del_file : string; del_off : int64; del_digest : Digest.t; } let in_delayed f ch ~segment = let seg = ObjFile.get_segment ch ~segment in let digest = seg.ObjFile.hash in { del_file = f; del_digest = digest; del_off = seg.ObjFile.pos; }, digest (** Fetching a table of opaque terms at position [pos] in file [f], expecting to find first a copy of [digest]. *) let fetch_delayed del = let { del_digest = digest; del_file = f; del_off = pos; } = del in let ch = open_in_bin f in let obj, digest' = try let () = LargeFile.seek_in ch pos in let obj = System.marshal_in f ch in let digest' = Digest.input ch in obj, digest' with e -> close_in ch; raise e in close_in ch; if not (String.equal digest digest') then raise (Faulty f); obj end open Delayed (************************************************************************) (*s Modules on disk contain the following informations (after the magic number, and before the digest). *) type compilation_unit_name = DirPath.t type library_disk = { md_compiled : Safe_typing.compiled_library; md_objects : Declaremods.library_objects; } type summary_disk = { md_name : compilation_unit_name; md_deps : (compilation_unit_name * Safe_typing.vodigest) array; md_ocaml : string; } (*s Modules loaded in memory contain the following informations. They are kept in the global table [libraries_table]. *) type library_t = { library_name : compilation_unit_name; library_data : library_disk; library_deps : (compilation_unit_name * Safe_typing.vodigest) array; library_digests : Safe_typing.vodigest; library_extra_univs : Univ.ContextSet.t; } type library_summary = { libsum_name : compilation_unit_name; libsum_digests : Safe_typing.vodigest; } (* This is a map from names to loaded libraries *) let libraries_table : library_summary DPmap.t ref = Summary.ref DPmap.empty ~name:"LIBRARY" (* This is the map of loaded libraries filename *) (* (not synchronized so as not to be caught in the states on disk) *) let libraries_filename_table = ref DPmap.empty (* These are the _ordered_ sets of loaded, imported and exported libraries *) let libraries_loaded_list = Summary.ref [] ~name:"LIBRARY-LOAD" (* Opaque proof tables *) (* various requests to the tables *) let find_library dir = DPmap.find dir !libraries_table let try_find_library dir = try find_library dir with Not_found -> user_err (str "Unknown library " ++ DirPath.print dir ++ str ".") let register_library_filename dir f = (* Not synchronized: overwrite the previous binding if one existed *) (* from a previous play of the session *) libraries_filename_table := DPmap.add dir f !libraries_filename_table let library_full_filename dir = try DPmap.find dir !libraries_filename_table with Not_found -> "" let overwrite_library_filenames f = let f = if Filename.is_relative f then Filename.concat (Sys.getcwd ()) f else f in DPmap.iter (fun dir _ -> register_library_filename dir f) !libraries_table let library_is_loaded dir = try let _ = find_library dir in true with Not_found -> false (* If a library is loaded several time, then the first occurrence must be performed first, thus the libraries_loaded_list ... *) let register_loaded_library m = let libname = m.libsum_name in let rec aux = function | [] -> if Flags.get_native_compiler () then begin let dirname = Filename.dirname (library_full_filename libname) in Nativelib.enable_library dirname libname end; [libname] | m'::_ as l when DirPath.equal m' libname -> l | m'::l' -> m' :: aux l' in libraries_loaded_list := aux !libraries_loaded_list; libraries_table := DPmap.add libname m !libraries_table let loaded_libraries () = !libraries_loaded_list (** Delayed / available tables of opaque terms *) type table_status = | ToFetch of Opaques.opaque_disk delayed | Fetched of Opaques.opaque_disk let opaque_tables = ref (DPmap.empty : table_status DPmap.t) let add_opaque_table dp st = opaque_tables := DPmap.add dp st !opaque_tables let access_table what tables dp i = let t = match DPmap.find dp !tables with | Fetched t -> t | ToFetch f -> let dir_path = Names.DirPath.to_string dp in Flags.if_verbose Feedback.msg_info (str"Fetching " ++ str what++str" from disk for " ++ str dir_path); let t = try fetch_delayed f with Faulty f -> user_err (str "The file " ++ str f ++ str " (bound to " ++ str dir_path ++ str ") is corrupted,\ncannot load some " ++ str what ++ str " in it.\n") in tables := DPmap.add dp (Fetched t) !tables; t in Opaques.get_opaque_disk i t let access_opaque_table o = let (sub, ci, dp, i) = Opaqueproof.repr o in let ans = if DirPath.equal dp (Global.current_dirpath ()) then Opaques.get_current_opaque i else let what = "opaque proofs" in access_table what opaque_tables dp i in match ans with | None -> None | Some (c, ctx) -> let (c, ctx) = Cooking.cook_constr ci (c, ctx) in let c = Mod_subst.(List.fold_right subst_mps sub c) in Some (c, ctx) let indirect_accessor = { Global.access_proof = access_opaque_table; } (************************************************************************) (* Internalise libraries *) type seg_sum = summary_disk type seg_lib = library_disk type seg_univ = (* true = vivo, false = vi *) Univ.ContextSet.t * bool type seg_proofs = Opaques.opaque_disk let mk_library sd md digests univs = { library_name = sd.md_name; library_data = md; library_deps = sd.md_deps; library_digests = digests; library_extra_univs = univs; } let mk_summary m = { libsum_name = m.library_name; libsum_digests = m.library_digests; } let intern_from_file f = let ch = raw_intern_library f in let (lsd : seg_sum), digest_lsd = ObjFile.marshal_in_segment ch ~segment:"summary" in let ((lmd : seg_lib), digest_lmd) = ObjFile.marshal_in_segment ch ~segment:"library" in let (univs : seg_univ option), digest_u = ObjFile.marshal_in_segment ch ~segment:"universes" in let ((del_opaque : seg_proofs delayed),_) = in_delayed f ch ~segment:"opaques" in ObjFile.close_in ch; System.check_caml_version ~caml:lsd.md_ocaml ~file:f; register_library_filename lsd.md_name f; add_opaque_table lsd.md_name (ToFetch del_opaque); let open Safe_typing in match univs with | None -> mk_library lsd lmd (Dvo_or_vi digest_lmd) Univ.ContextSet.empty | Some (uall,true) -> mk_library lsd lmd (Dvivo (digest_lmd,digest_u)) uall | Some (_,false) -> mk_library lsd lmd (Dvo_or_vi digest_lmd) Univ.ContextSet.empty let rec intern_library ~lib_resolver (needed, contents) (dir, f) from = (* Look if in the current logical environment *) try (find_library dir).libsum_digests, (needed, contents) with Not_found -> (* Look if already listed and consequently its dependencies too *) try (DPmap.find dir contents).library_digests, (needed, contents) with Not_found -> Feedback.feedback(Feedback.FileDependency (from, DirPath.to_string dir)); (* [dir] is an absolute name which matches [f] which must be in loadpath *) let f = match f with Some f -> f | None -> lib_resolver dir in let m = intern_from_file f in if not (DirPath.equal dir m.library_name) then user_err (str "The file " ++ str f ++ str " contains library" ++ spc () ++ DirPath.print m.library_name ++ spc () ++ str "and not library" ++ spc() ++ DirPath.print dir ++ str "."); Feedback.feedback (Feedback.FileLoaded(DirPath.to_string dir, f)); m.library_digests, intern_library_deps ~lib_resolver (needed, contents) dir m f and intern_library_deps ~lib_resolver libs dir m from = let needed, contents = Array.fold_left (intern_mandatory_library ~lib_resolver dir from) libs m.library_deps in (dir :: needed, DPmap.add dir m contents ) and intern_mandatory_library ~lib_resolver caller from libs (dir,d) = let digest, libs = intern_library ~lib_resolver libs (dir, None) (Some from) in if not (Safe_typing.digest_match ~actual:digest ~required:d) then user_err (str "Compiled library " ++ DirPath.print caller ++ str " (in file " ++ str from ++ str ") makes inconsistent assumptions \ over library " ++ DirPath.print dir); libs let rec_intern_library ~lib_resolver libs (dir, f) = let _, libs = intern_library ~lib_resolver libs (dir, Some f) None in libs let native_name_from_filename f = let ch = raw_intern_library f in let (lmd : seg_sum), digest_lmd = ObjFile.marshal_in_segment ch ~segment:"summary" in Nativecode.mod_uid_of_dirpath lmd.md_name (**********************************************************************) (*s [require_library] loads and possibly opens a library. This is a synchronized operation. It is performed as follows: preparation phase: (functions require_library* ) the library and its dependencies are read from to disk (using intern_* ) [they are read from disk to ensure that at section/module discharging time, the physical library referred to outside the section/module is the one that was used at type-checking time in the section/module] execution phase: (through add_leaf and cache_require) the library is loaded in the environment and Nametab, the objects are registered etc, using functions from Declaremods (via load_library, which recursively loads its dependencies) *) let register_library m = let l = m.library_data in Declaremods.register_library m.library_name l.md_compiled l.md_objects m.library_digests m.library_extra_univs; register_loaded_library (mk_summary m) (* Follow the semantics of Anticipate object: - called at module or module type closing when a Require occurs in the module or module type - not called from a library (i.e. a module identified with a file) *) let load_require _ (_,(needed,modl,_)) = List.iter register_library needed let open_require i (_,(_,modl,export)) = Option.iter (fun export -> let mpl = List.map (fun m -> unfiltered, MPfile m) modl in (* TODO support filters in Require *) Declaremods.import_modules ~export mpl) export (* [needed] is the ordered list of libraries not already loaded *) let cache_require o = load_require 1 o; open_require 1 o let discharge_require (_,o) = Some o (* open_function is never called from here because an Anticipate object *) type require_obj = library_t list * DirPath.t list * bool option let in_require : require_obj -> obj = declare_object {(default_object "REQUIRE") with cache_function = cache_require; load_function = load_require; open_function = (fun _ _ -> assert false); discharge_function = discharge_require; classify_function = (fun o -> Anticipate o) } (* Require libraries, import them if [export <> None], mark them for export if [export = Some true] *) let warn_require_in_module = CWarnings.create ~name:"require-in-module" ~category:"fragile" (fun () -> strbrk "Use of “Require” inside a module is fragile." ++ spc() ++ strbrk "It is not recommended to use this functionality in finished proof scripts.") let require_library_from_dirpath ~lib_resolver modrefl export = let needed, contents = List.fold_left (rec_intern_library ~lib_resolver) ([], DPmap.empty) modrefl in let needed = List.rev_map (fun dir -> DPmap.find dir contents) needed in let modrefl = List.map fst modrefl in if Lib.is_module_or_modtype () then begin warn_require_in_module (); add_anonymous_leaf (in_require (needed,modrefl,None)); Option.iter (fun export -> (* TODO import filters *) List.iter (fun m -> Declaremods.import_module unfiltered ~export (MPfile m)) modrefl) export end else add_anonymous_leaf (in_require (needed,modrefl,export)); () (************************************************************************) (*s Initializing the compilation of a library. *) type ('uid, 'doc) tasks = (('uid, 'doc) Stateid.request * bool) list let load_library_todo f = let ch = raw_intern_library f in let (s0 : seg_sum), _ = ObjFile.marshal_in_segment ch ~segment:"summary" in let (s1 : seg_lib), _ = ObjFile.marshal_in_segment ch ~segment:"library" in let (s2 : seg_univ option), _ = ObjFile.marshal_in_segment ch ~segment:"universes" in let (tasks : (Opaqueproof.opaque_handle option, 'doc) tasks option), _ = ObjFile.marshal_in_segment ch ~segment:"tasks" in let (s4 : seg_proofs), _ = ObjFile.marshal_in_segment ch ~segment:"opaques" in ObjFile.close_in ch; System.check_caml_version ~caml:s0.md_ocaml ~file:f; if tasks = None then user_err (str "Not a .vio file."); if s2 = None then user_err (str "Not a .vio file."); if snd (Option.get s2) then user_err (str "Not a .vio file."); s0, s1, Option.get s2, Option.get tasks, s4 (************************************************************************) (*s [save_library dir] ends library [dir] and save it to the disk. *) let current_deps () = let map name = let m = try_find_library name in (name, m.libsum_digests) in List.map map !libraries_loaded_list let error_recursively_dependent_library dir = user_err (strbrk "Unable to use logical name " ++ DirPath.print dir ++ strbrk " to save current library because" ++ strbrk " it already depends on a library of this name.") type 'doc todo_proofs = | ProofsTodoNone (* for .vo *) | ProofsTodoSomeEmpty of Future.UUIDSet.t (* for .vos *) | ProofsTodoSome of Future.UUIDSet.t * (Future.UUID.t, 'doc) tasks (* for .vio *) (* We now use two different digests in a .vo file. The first one only covers half of the file, without the opaque table. It is used for identifying this version of this library : this digest is the one leading to "inconsistent assumptions" messages. The other digest comes at the very end, and covers everything before it. This one is used for integrity check of the whole file when loading the opaque table. *) (* Security weakness: file might have been changed on disk between writing the content and computing the checksum... *) let save_library_base f sum lib univs tasks proofs = let ch = raw_extern_library f in try ObjFile.marshal_out_segment ch ~segment:"summary" (sum : seg_sum); ObjFile.marshal_out_segment ch ~segment:"library" (lib : seg_lib); ObjFile.marshal_out_segment ch ~segment:"universes" (univs : seg_univ option); ObjFile.marshal_out_segment ch ~segment:"tasks" (tasks : (Opaqueproof.opaque_handle option, 'doc) tasks option); ObjFile.marshal_out_segment ch ~segment:"opaques" (proofs : seg_proofs); ObjFile.close_out ch with reraise -> let reraise = Exninfo.capture reraise in ObjFile.close_out ch; Feedback.msg_warning (str "Removed file " ++ str f); Sys.remove f; Exninfo.iraise reraise let save_library_to todo_proofs ~output_native_objects dir f = assert( let expected_extension = match todo_proofs with | ProofsTodoNone -> ".vo" | ProofsTodoSomeEmpty _ -> ".vos" | ProofsTodoSome _ -> ".vio" in Filename.check_suffix f expected_extension); let except = match todo_proofs with | ProofsTodoNone -> Future.UUIDSet.empty | ProofsTodoSomeEmpty except -> except | ProofsTodoSome (except,l) -> except in (* Ensure that the call below is performed with all opaques joined *) let () = Opaques.Summary.join ~except () in let opaque_table, f2t_map = Opaques.dump ~except () in let () = assert (not (Future.UUIDSet.is_empty except) || Safe_typing.is_joined_environment (Global.safe_env ())) in let cenv, seg, ast = Declaremods.end_library ~output_native_objects dir in let tasks, utab = match todo_proofs with | ProofsTodoNone -> None, None | ProofsTodoSomeEmpty _except -> None, Some (Univ.ContextSet.empty,false) | ProofsTodoSome (_except, tasks) -> let tasks = List.map Stateid.(fun (r,b) -> try { r with uuid = Some (Future.UUIDMap.find r.uuid f2t_map) }, b with Not_found -> assert b; { r with uuid = None }, b) tasks in Some tasks, Some (Univ.ContextSet.empty,false) in let sd = { md_name = dir; md_deps = Array.of_list (current_deps ()); md_ocaml = Coq_config.caml_version; } in let md = { md_compiled = cenv; md_objects = seg; } in if Array.exists (fun (d,_) -> DirPath.equal d dir) sd.md_deps then error_recursively_dependent_library dir; (* Writing vo payload *) save_library_base f sd md utab tasks opaque_table; (* Writing native code files *) if output_native_objects then let fn = Filename.dirname f ^"/"^ Nativecode.mod_uid_of_dirpath dir in Nativelib.compile_library ast fn let save_library_raw f sum lib univs proofs = save_library_base f sum lib (Some univs) None proofs let get_used_load_paths () = String.Set.elements (List.fold_left (fun acc m -> String.Set.add (Filename.dirname (library_full_filename m)) acc) String.Set.empty !libraries_loaded_list) let _ = Nativelib.get_load_paths := get_used_load_paths coq-8.15.0/vernac/library.mli000066400000000000000000000057321417001151100160120ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* CUnix.physical_path) -> (DirPath.t * string) list -> bool option -> unit (** {6 Start the compilation of a library } *) (** Segments of a library *) type seg_sum type seg_lib type seg_univ = (* all_cst, finished? *) Univ.ContextSet.t * bool type seg_proofs = Opaques.opaque_disk (** End the compilation of a library and save it to a ".vo" file, a ".vio" file, or a ".vos" file, depending on the todo_proofs argument. [output_native_objects]: when producing vo objects, also compile the native-code version. *) type ('uid, 'doc) tasks = (('uid, 'doc) Stateid.request * bool) list type 'doc todo_proofs = | ProofsTodoNone (* for .vo *) | ProofsTodoSomeEmpty of Future.UUIDSet.t (* for .vos *) | ProofsTodoSome of Future.UUIDSet.t * (Future.UUID.t, 'doc) tasks (* for .vio *) val save_library_to : 'document todo_proofs -> output_native_objects:bool -> DirPath.t -> string -> unit val load_library_todo : CUnix.physical_path -> seg_sum * seg_lib * seg_univ * (Opaqueproof.opaque_handle option, 'doc) tasks * seg_proofs val save_library_raw : string -> seg_sum -> seg_lib -> seg_univ -> seg_proofs -> unit (** {6 Interrogate the status of libraries } *) (** - Tell if a library is loaded *) val library_is_loaded : DirPath.t -> bool (** - Tell which libraries are loaded *) val loaded_libraries : unit -> DirPath.t list (** - Return the full filename of a loaded library. *) val library_full_filename : DirPath.t -> string (** - Overwrite the filename of all libraries (used when restoring a state) *) val overwrite_library_filenames : string -> unit (** {6 Native compiler. } *) val native_name_from_filename : string -> string (** {6 Opaque accessors} *) val indirect_accessor : Global.indirect_accessor coq-8.15.0/vernac/loadpath.ml000066400000000000000000000265721417001151100157760ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* raise Not_found | [p] -> p | _ -> anomaly_too_many_paths phys_dir (* get the list of load paths that correspond to a given logical path *) let find_with_logical_path dirpath = List.filter (fun p -> Names.DirPath.equal p.path_logical dirpath) !load_paths let remove_load_path dir = let filter p = not (String.equal p.path_physical dir) in load_paths := List.filter filter !load_paths let warn_overriding_logical_loadpath = CWarnings.create ~name:"overriding-logical-loadpath" ~category:"loadpath" (fun (phys_path, old_path, coq_path) -> Pp.(seq [str phys_path; strbrk " was previously bound to " ; DP.print old_path; strbrk "; it is remapped to " ; DP.print coq_path])) let add_load_path root phys_path coq_path ~implicit = let phys_path = CUnix.canonical_path_name phys_path in let filter p = String.equal p.path_physical phys_path in let binding = { path_logical = coq_path; path_physical = phys_path; path_implicit = implicit; path_root = root; } in match List.filter filter !load_paths with | [] -> load_paths := binding :: !load_paths | [{ path_logical = old_path; path_implicit = old_implicit }] -> let replace = if DP.equal coq_path old_path then implicit <> old_implicit else let () = (* Do not warn when overriding the default "-I ." path *) if not (DP.equal old_path Libnames.default_root_prefix) then warn_overriding_logical_loadpath (phys_path, old_path, coq_path) in true in if replace then begin remove_load_path phys_path; load_paths := binding :: !load_paths; end | _ -> anomaly_too_many_paths phys_path let filter_path f = let rec aux = function | [] -> [] | p :: l -> if f p.path_logical then (p.path_physical, p.path_logical) :: aux l else aux l in aux !load_paths let eq_root (phys,log_path) (phys',log_path') = String.equal phys phys' && Names.DirPath.equal log_path log_path' let add_path root file = function | [] -> [root,[file]] | (root',l) :: l' as l'' -> if eq_root root root' then (root', file::l) :: l' else (root,[file]) :: l'' let expand_path ?root dir = let exact_path = match root with None -> dir | Some root -> Libnames.append_dirpath root dir in let rec aux = function | [] -> [], [] | { path_physical = ph; path_logical = lg; path_implicit = implicit; path_root } :: l -> let full, others = aux l in if DP.equal exact_path lg then (* Most recent full match comes first *) (ph, lg) :: full, others else let success = match root with | None -> implicit && Libnames.is_dirpath_suffix_of dir lg | Some root -> Libnames.(is_dirpath_prefix_of root lg && is_dirpath_suffix_of dir (drop_dirpath_prefix root lg)) in if success then (* Only keep partial path in the same "-R" block *) full, add_path path_root (ph, lg) others else full, others in let full, others = aux !load_paths in (* Returns the dirpath matching exactly and the ordered list of -R/-Q blocks with subdirectories that matches *) full, List.map snd others let locate_file fname = let paths = List.map physical !load_paths in let _,longfname = System.find_file_in_path ~warn:(not !Flags.quiet) paths fname in longfname (************************************************************************) (*s Locate absolute or partially qualified library names in the path *) type library_location = LibLoaded | LibInPath type locate_error = LibUnmappedDir | LibNotFound type 'a locate_result = ('a, locate_error) result let warn_several_object_files = CWarnings.create ~name:"several-object-files" ~category:"require" Pp.(fun (vi, vo) -> seq [ str "Loading"; spc (); str vi ; strbrk " instead of "; str vo ; strbrk " because it is more recent." ]) let select_vo_file ~find base = let find ext = try let name = Names.Id.to_string base ^ ext in let lpath, file = find name in Some (lpath, file) with Not_found -> None in (* If [!Flags.load_vos_libraries] and the .vos file exists and this file is not empty Then load this library Else load the most recent between the .vo file and the .vio file, or if there is only of the two files, take this one, or raise an error if both are missing. *) let load_most_recent_of_vo_and_vio () = match find ".vo", find ".vio" with | None, None -> Error LibNotFound | Some res, None | None, Some res -> Ok res | Some (_, vo), Some (_, vi as resvi) when Unix.((stat vo).st_mtime < (stat vi).st_mtime) -> warn_several_object_files (vi, vo); Ok resvi | Some resvo, Some _ -> Ok resvo in if !Flags.load_vos_libraries then begin match find ".vos" with | Some (_, vos as resvos) when (Unix.stat vos).Unix.st_size > 0 -> Ok resvos | _ -> load_most_recent_of_vo_and_vio() end else load_most_recent_of_vo_and_vio() let find_first loadpath base = match System.all_in_path loadpath base with | [] -> raise Not_found | f :: _ -> f let find_unique fullqid loadpath base = match System.all_in_path loadpath base with | [] -> raise Not_found | [f] -> f | _::_ as l -> CErrors.user_err Pp.(str "Required library " ++ Libnames.pr_qualid fullqid ++ strbrk " matches several files in path (found " ++ pr_enum str (List.map snd l) ++ str ").") let locate_absolute_library dir : CUnix.physical_path locate_result = (* Search in loadpath *) let pref, base = Libnames.split_dirpath dir in let loadpath = filter_path (fun dir -> DP.equal dir pref) in match loadpath with | [] -> Error LibUnmappedDir | _ -> match select_vo_file ~find:(find_first loadpath) base with | Ok (_, file) -> Ok file | Error fail -> Error fail let locate_qualified_library ?root qid : (library_location * DP.t * CUnix.physical_path) locate_result = (* Search library in loadpath *) let dir, base = Libnames.repr_qualid qid in match expand_path ?root dir with | [], [] -> Error LibUnmappedDir | full_matches, others -> let result = (* Priority to exact matches *) match select_vo_file ~find:(find_first full_matches) base with | Ok _ as x -> x | Error _ -> (* Looking otherwise in -R/-Q blocks of partial matches *) let rec aux = function | [] -> Error LibUnmappedDir | block :: rest -> match select_vo_file ~find:(find_unique qid block) base with | Ok _ as x -> x | Error _ -> aux rest in aux others in match result with | Ok (dir,file) -> let library = Libnames.add_dirpath_suffix dir base in (* Look if loaded *) if Library.library_is_loaded library then Ok (LibLoaded, library, Library.library_full_filename library) (* Otherwise, look for it in the file system *) else Ok (LibInPath, library, file) | Error _ as e -> e let error_unmapped_dir qid = let prefix, _ = Libnames.repr_qualid qid in CErrors.user_err Pp.(seq [ str "Cannot load "; Libnames.pr_qualid qid; str ":"; spc () ; str "no physical path bound to"; spc () ; DP.print prefix; fnl () ]) let error_lib_not_found qid = let vos = !Flags.load_vos_libraries in let vos_msg = if vos then [Pp.str " (while searching for a .vos file)"] else [] in CErrors.user_err Pp.(seq ([ str "Cannot find library "; Libnames.pr_qualid qid; str" in loadpath"]@vos_msg)) let try_locate_absolute_library dir = match locate_absolute_library dir with | Ok res -> res | Error LibUnmappedDir -> error_unmapped_dir (Libnames.qualid_of_dirpath dir) | Error LibNotFound -> error_lib_not_found (Libnames.qualid_of_dirpath dir) (** { 5 Extending the load path } *) type vo_path = { unix_path : string (** Filesystem path containing vo/ml files *) ; coq_path : DP.t (** Coq prefix for the path *) ; implicit : bool (** [implicit = true] avoids having to qualify with [coq_path] *) ; has_ml : bool (** If [has_ml] is true, the directory will also be added to the ml include path *) ; recursive : bool (** [recursive] will determine whether we explore sub-directories *) } let warn_cannot_open_path = CWarnings.create ~name:"cannot-open-path" ~category:"filesystem" (fun unix_path -> Pp.(str "Cannot open " ++ str unix_path)) let warn_cannot_use_directory = CWarnings.create ~name:"cannot-use-directory" ~category:"filesystem" (fun d -> Pp.(str "Directory " ++ str d ++ strbrk " cannot be used as a Coq identifier (skipped)")) let convert_string d = try Names.Id.of_string d with | CErrors.UserError _ -> let d = Unicode.escaped_if_non_utf8 d in warn_cannot_use_directory d; raise Exit let add_vo_path lp = let unix_path = lp.unix_path in let implicit = lp.implicit in let recursive = lp.recursive in if System.exists_dir unix_path then let dirs = if recursive then System.all_subdirs ~unix_path else [] in let prefix = DP.repr lp.coq_path in let convert_dirs (lp, cp) = try let path = List.rev_map convert_string cp @ prefix in Some (lp, DP.make path) with Exit -> None in let dirs = List.map_filter convert_dirs dirs in let () = if lp.has_ml && not lp.recursive then Mltop.add_ml_dir unix_path else if lp.has_ml && lp.recursive then (List.iter (fun (lp,_) -> Mltop.add_ml_dir lp) dirs; Mltop.add_ml_dir unix_path) else () in let root = (unix_path,lp.coq_path) in let add (path, dir) = add_load_path root path ~implicit dir in (* deeper dirs registered first and thus be found last *) let dirs = List.rev dirs in let () = List.iter add dirs in add_load_path root unix_path ~implicit lp.coq_path else warn_cannot_open_path unix_path coq-8.15.0/vernac/loadpath.mli000066400000000000000000000055351417001151100161430ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* DirPath.t (** Get the logical path (Coq module hierarchy) of a loadpath. *) val physical : t -> CUnix.physical_path (** Get the physical path of a loadpath *) val pp : t -> Pp.t (** Print a load path *) val get_load_paths : unit -> t list (** Get the current loadpath association. *) val remove_load_path : CUnix.physical_path -> unit (** Remove the current logical path binding associated to a given physical path, if any. *) val find_load_path : CUnix.physical_path -> t (** Get the binding associated with a physical path. Raises [Not_found] if there is none. *) val find_with_logical_path : Names.DirPath.t -> t list (** get the list of load paths that correspond to a given logical path *) val locate_file : string -> string (** Locate a file among the registered paths. Do not use this function, as it does not respect the visibility of paths. *) (** {6 Locate a library in the load path } *) type library_location = LibLoaded | LibInPath type locate_error = LibUnmappedDir | LibNotFound type 'a locate_result = ('a, locate_error) result val locate_qualified_library : ?root:DirPath.t -> Libnames.qualid -> (library_location * DirPath.t * CUnix.physical_path) locate_result (** Locates a library by implicit name. @raise LibUnmappedDir if the library is not in the path @raise LibNotFound if there is no corresponding file in the path *) val try_locate_absolute_library : DirPath.t -> string (** {6 Extending the Load Path } *) (** Adds a path to the Coq and ML paths *) type vo_path = { unix_path : string (** Filesystem path containing vo/ml files *) ; coq_path : DirPath.t (** Coq prefix for the path *) ; implicit : bool (** [implicit = true] avoids having to qualify with [coq_path] *) ; has_ml : bool (** If [has_ml] is true, the directory will also be added to the ml include path *) ; recursive : bool (** [recursive] will determine whether we explore sub-directories *) } val add_vo_path : vo_path -> unit coq-8.15.0/vernac/locality.ml000066400000000000000000000066431417001151100160170ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* ImportNeedQualified | false -> ImportDefaultBehavior (** Positioning locality for commands supporting discharging and export outside of modules *) (* For commands whose default is to discharge and export: Global is the default and is neutral; Local in a section deactivates discharge, Local not in a section deactivates export *) let make_non_locality = function Some false -> false | _ -> true let make_locality = function Some true -> true | _ -> false let warn_local_declaration = CWarnings.create ~name:"local-declaration" ~category:"scope" Pp.(fun () -> Pp.strbrk "Interpreting this declaration as if " ++ strbrk "a global declaration prefixed by \"Local\", " ++ strbrk "i.e. as a global declaration which shall not be " ++ strbrk "available without qualification when imported.") let enforce_locality_exp locality_flag discharge = let open Vernacexpr in match locality_flag, discharge with | Some b, NoDischarge -> Global (importability_of_bool b) | None, NoDischarge -> Global ImportDefaultBehavior | None, DoDischarge when not (Global.sections_are_opened ()) -> (* If a Let/Variable is defined outside a section, then we consider it as a local definition *) warn_local_declaration (); Global ImportNeedQualified | None, DoDischarge -> Discharge | Some true, DoDischarge -> CErrors.user_err Pp.(str "Local not allowed in this case") | Some false, DoDischarge -> CErrors.user_err Pp.(str "Global not allowed in this case") let enforce_locality locality_flag = make_locality locality_flag (* For commands whose default is to not discharge but to export: Global in sections forces discharge, Global not in section is the default; Local in sections is the default, Local not in section forces non-export *) let make_section_locality = function Some b -> b | None -> Global.sections_are_opened () let enforce_section_locality locality_flag = make_section_locality locality_flag (** Positioning locality for commands supporting export but not discharge *) (* For commands whose default is to export (if not in section): Global in sections is forbidden, Global not in section is neutral; Local in sections is the default, Local not in section forces non-export *) let make_module_locality = function | Some false -> if Global.sections_are_opened () then CErrors.user_err Pp.(str "This command does not support the Global option in sections."); false | Some true -> true | None -> false let enforce_module_locality locality_flag = make_module_locality locality_flag coq-8.15.0/vernac/locality.mli000066400000000000000000000037731417001151100161710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool val make_non_locality : bool option -> bool val enforce_locality_exp : bool option -> Vernacexpr.discharge -> locality val enforce_locality : bool option -> bool (** For commands whose default is to not discharge but to export: Global in sections forces discharge, Global not in section is the default; Local in sections is the default, Local not in section forces non-export *) val make_section_locality : bool option -> bool val enforce_section_locality : bool option -> bool (** * Positioning locality for commands supporting export but not discharge *) (** For commands whose default is to export (if not in section): Global in sections is forbidden, Global not in section is neutral; Local in sections is the default, Local not in section forces non-export *) val make_module_locality : bool option -> bool val enforce_module_locality : bool option -> bool coq-8.15.0/vernac/metasyntax.ml000066400000000000000000002321431417001151100163700ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* user_err Pp.(str "Unknown or unprintable grammar entry.") | entries -> let pr_one (Pcoq.AnyEntry e) = str "Entry " ++ str (Pcoq.Entry.name e) ++ str " is" ++ fnl () ++ pr_entry e in prlist pr_one entries let pr_grammar = function | "constr" | "term" | "binder_constr" -> str "Entry constr is" ++ fnl () ++ pr_entry Pcoq.Constr.constr ++ str "and lconstr is" ++ fnl () ++ pr_entry Pcoq.Constr.lconstr ++ str "where binder_constr is" ++ fnl () ++ pr_entry Pcoq.Constr.binder_constr ++ str "and term is" ++ fnl () ++ pr_entry Pcoq.Constr.term | "pattern" -> pr_entry Pcoq.Constr.pattern | "vernac" -> str "Entry vernac_control is" ++ fnl () ++ pr_entry Pvernac.Vernac_.vernac_control ++ str "Entry command is" ++ fnl () ++ pr_entry Pvernac.Vernac_.command ++ str "Entry syntax is" ++ fnl () ++ pr_entry Pvernac.Vernac_.syntax ++ str "Entry gallina is" ++ fnl () ++ pr_entry Pvernac.Vernac_.gallina ++ str "Entry gallina_ext is" ++ fnl () ++ pr_entry Pvernac.Vernac_.gallina_ext | name -> pr_registered_grammar name let pr_custom_grammar name = pr_registered_grammar ("custom:"^name) (**********************************************************************) (* Parse a format (every terminal starting with a letter or a single quote (except a single quote alone) must be quoted) *) let parse_format ({CAst.loc;v=str} : lstring) = let len = String.length str in (* TODO: update the line of the location when the string contains newlines *) let make_loc i j = Option.map (Loc.shift_loc (i+1) (j-len)) loc in let push_token loc a = function | (i,cur)::l -> (i,(loc,a)::cur)::l | [] -> assert false in let push_white i n l = if Int.equal n 0 then l else push_token (make_loc i (i+n)) (UnpTerminal (String.make n ' ')) l in let close_box start stop b = function | (_,a)::(_::_ as l) -> push_token (make_loc start stop) (UnpBox (b,a)) l | [a] -> user_err ?loc:(make_loc start stop) Pp.(str "Non terminated box in format.") | [] -> assert false in let close_quotation start i = if i < len && str.[i] == '\'' then if (Int.equal (i+1) len || str.[i+1] == ' ') then i+1 else user_err ?loc:(make_loc (i+1) (i+1)) Pp.(str "Space expected after quoted expression.") else user_err ?loc:(make_loc start (i-1)) Pp.(str "Beginning of quoted expression expected to be ended by a quote.") in let rec spaces n i = if i < len && str.[i] == ' ' then spaces (n+1) (i+1) else n in let rec nonspaces quoted n i = if i < len && str.[i] != ' ' then if str.[i] == '\'' && quoted && (i+1 >= len || str.[i+1] == ' ') then if Int.equal n 0 then user_err ?loc:(make_loc (i-1) i) Pp.(str "Empty quoted token.") else n else nonspaces quoted (n+1) (i+1) else if quoted then user_err ?loc:(make_loc i i) Pp.(str "Spaces are not allowed in (quoted) symbols.") else n in let rec parse_non_format i = let n = nonspaces false 0 i in push_token (make_loc i (i+n-1)) (UnpTerminal (String.sub str i n)) (parse_token 1 (i+n)) and parse_quoted n k i = if i < len then match str.[i] with (* Parse " // " *) | '/' when i+1 < len && str.[i+1] == '/' -> (* We discard the useless n spaces... *) push_token (make_loc (i-n) (i+1)) (UnpCut PpFnl) (parse_token 1 (close_quotation i (i+2))) (* Parse " .. / .. " *) | '/' when i+1 < len -> let p = spaces 0 (i+1) in push_token (make_loc (i-n) (i+p)) (UnpCut (PpBrk (n,p))) (parse_token 1 (close_quotation i (i+p+1))) | c -> (* The spaces are real spaces *) push_white (i-n-1-k) n (match c with | '[' -> if i+1 < len then match str.[i+1] with (* Parse " [h .. ", *) | 'h' when i+1 <= len && str.[i+2] == 'v' -> (parse_box i (fun n -> PpHVB n) (i+3)) (* Parse " [v .. ", *) | 'v' -> parse_box i (fun n -> PpVB n) (i+2) (* Parse " [ .. ", *) | ' ' | '\'' -> parse_box i (fun n -> PpHOVB n) (i+1) | _ -> user_err ?loc:(make_loc i i) Pp.(str "\"v\", \"hv\", \" \" expected after \"[\" in format.") else user_err ?loc:(make_loc i i) Pp.(str "\"v\", \"hv\" or \" \" expected after \"[\" in format.") (* Parse "]" *) | ']' -> ((i,[]) :: parse_token 1 (close_quotation i (i+1))) (* Parse a non formatting token *) | c -> let n = nonspaces true 0 i in push_token (make_loc i (i+n-1)) (UnpTerminal (String.sub str (i-1) (n+2))) (parse_token 1 (close_quotation i (i+n)))) else if Int.equal n 0 then [] else user_err ?loc:(make_loc (len-n) len) Pp.(str "Ending spaces non part of a format annotation.") and parse_box start box i = let n = spaces 0 i in close_box start (i+n-1) (box n) (parse_token 1 (close_quotation i (i+n))) and parse_token k i = let n = spaces 0 i in let i = i+n in if i < len then match str.[i] with (* Parse a ' *) | '\'' when i+1 >= len || str.[i+1] == ' ' -> push_white (i-n) (n-k) (push_token (make_loc i (i+1)) (UnpTerminal "'") (parse_token 1 (i+1))) (* Parse the beginning of a quoted expression *) | '\'' -> parse_quoted (n-k) k (i+1) (* Otherwise *) | _ -> push_white (i-n) (n-k) (parse_non_format i) else push_white (i-n) n [(len,[])] in if not (String.is_empty str) then match parse_token 0 0 with | [_,l] -> l | (i,_)::_ -> user_err ?loc:(make_loc i i) Pp.(str "Box closed without being opened.") | [] -> assert false else [] (***********************) (* Analyzing notations *) (* Find non-terminal tokens of notation *) (* To protect alphabetic tokens and quotes from being seen as variables *) let quote_notation_token x = let n = String.length x in let norm = CLexer.is_ident x in if (n > 0 && norm) || (n > 2 && x.[0] == '\'') then "'"^x^"'" else x let is_numeral_in_constr entry symbs = match entry, List.filter (function Break _ -> false | _ -> true) symbs with | InConstrEntry, ([Terminal "-"; Terminal x] | [Terminal x]) -> NumTok.Unsigned.parse_string x <> None | _ -> false let analyze_notation_tokens ~onlyprinting ~infix entry df = let df = if infix then quote_notation_token df else df in let { recvars; mainvars; symbols } as res = decompose_raw_notation df in (* don't check for nonlinearity if printing only, see Bug 5526 *) (if not onlyprinting then match List.duplicates Id.equal (mainvars @ List.map snd recvars) with | id :: _ -> user_err (str "Variable " ++ Id.print id ++ str " occurs more than once.") | _ -> ()); let isnumeral = is_numeral_in_constr entry symbols in res, isnumeral let adjust_symbols vars notation_symbols = let x = Namegen.next_ident_away (Id.of_string "x") vars in let y = Namegen.next_ident_away (Id.of_string "y") (Id.Set.add x vars) in let notation_symbols = { recvars = notation_symbols.recvars; mainvars = x::notation_symbols.mainvars@[y]; symbols = NonTerminal x :: notation_symbols.symbols @ [NonTerminal y]; } in x, y, notation_symbols let adjust_reserved_infix_notation notation_symbols = let vars = Id.Set.of_list (List.map_filter (function NonTerminal x -> Some x | _ -> None) notation_symbols.symbols) in let _, _, notation_symbols = adjust_symbols vars notation_symbols in notation_symbols let adjust_infix_notation df notation_symbols c = let vars = names_of_constr_expr c in let x, y, notation_symbols = adjust_symbols vars notation_symbols in let df = Id.to_string x ^ " " ^ df ^ " " ^ Id.to_string y in let inject_var x = CAst.make @@ CRef (qualid_of_ident x,None) in let metas = [inject_var x; inject_var y] in let c = mkAppC (c,metas) in df, notation_symbols, c let warn_unexpected_primitive_token_modifier = CWarnings.create ~name:"primitive-token-modifier" ~category:"parsing" (fun () -> str "Notations for numbers are primitive; skipping this modifier.") let check_no_syntax_modifiers_for_numeral = function | [] -> () | l -> List.iter (function {CAst.loc} -> warn_unexpected_primitive_token_modifier ?loc ()) l let error_not_same_scope x y = user_err (str "Variables " ++ Id.print x ++ str " and " ++ Id.print y ++ str " must be in the same scope.") (**********************************************************************) (* Build pretty-printing rules *) let pr_notation_entry = function | InConstrEntry -> str "constr" | InCustomEntry s -> str "custom " ++ str s let precedence_of_position_and_level from_level = function | NumLevel n, BorderProd (b,Some a) -> (let open Gramlib.Gramext in match a, b with | RightA, Left -> LevelLt n | RightA, Right -> LevelLe n | LeftA, Left -> LevelLe n | LeftA, Right -> LevelLt n | NonA, _ -> LevelLt n), Some b | NumLevel n, _ -> LevelLe n, None | NextLevel, _ -> LevelLt from_level, None | DefaultLevel, _ -> LevelSome, None (** Computing precedences of subentries for parsing *) let precedence_of_entry_type (from_custom,from_level) = function | ETConstr (custom,_,x) when notation_entry_eq custom from_custom -> fst (precedence_of_position_and_level from_level x) | ETConstr (custom,_,(NumLevel n,_)) -> LevelLe n | ETConstr (custom,_,(NextLevel,_)) -> user_err (strbrk "\"next level\" is only for sub-expressions in the same entry as where the notation is (" ++ quote (pr_notation_entry custom) ++ strbrk " is different from " ++ quote (pr_notation_entry from_custom) ++ str ").") | ETPattern (_,n) -> let n = match n with None -> 0 | Some n -> n in LevelLe n | _ -> LevelSome (* should not matter *) (** Computing precedences for future insertion of parentheses at the time of printing using hard-wired constr levels *) let unparsing_precedence_of_entry_type from_level = function | ETConstr (InConstrEntry,_,x) -> (* Possible insertion of parentheses at printing time to deal with precedence in a constr entry is managed using [prec_less] in [ppconstr.ml] *) precedence_of_position_and_level from_level x | ETConstr (custom,_,_) -> (* Precedence of printing for a custom entry is managed using explicit insertion of entry coercions at the time of building a [constr_expr] *) LevelSome, None | ETPattern (_,n) -> (* in constr *) LevelLe (match n with Some n -> n | None -> 0), None | _ -> LevelSome, None (* should not matter *) (* Some breaking examples *) (* "x = y" : "x /1 = y" (breaks before any symbol) *) (* "x =S y" : "x /1 =S /1 y" (protect from confusion; each side for symmetry)*) (* "+ {" : "+ {" may breaks reversibility without space but oth. not elegant *) (* "x y" : "x spc y" *) (* "{ x } + { y }" : "{ x } / + { y }" *) (* "< x , y > { z , t }" : "< x , / y > / { z , / t }" *) let starts_with_left_bracket s = let l = String.length s in not (Int.equal l 0) && (s.[0] == '{' || s.[0] == '[' || s.[0] == '(') let ends_with_right_bracket s = let l = String.length s in not (Int.equal l 0) && (s.[l-1] == '}' || s.[l-1] == ']' || s.[l-1] == ')') let is_left_bracket s = starts_with_left_bracket s && not (ends_with_right_bracket s) let is_right_bracket s = not (starts_with_left_bracket s) && ends_with_right_bracket s let is_comma s = let l = String.length s in not (Int.equal l 0) && (s.[0] == ',' || s.[0] == ';') let is_operator s = let l = String.length s in not (Int.equal l 0) && (s.[0] == '+' || s.[0] == '*' || s.[0] == '=' || s.[0] == '-' || s.[0] == '/' || s.[0] == '<' || s.[0] == '>' || s.[0] == '@' || s.[0] == '\\' || s.[0] == '&' || s.[0] == '~' || s.[0] == '$') let is_non_terminal = function | NonTerminal _ | SProdList _ -> true | _ -> false let is_next_non_terminal b = function | [] -> b | pr :: _ -> is_non_terminal pr let is_next_terminal = function Terminal _ :: _ -> true | _ -> false let is_next_break = function Break _ :: _ -> true | _ -> false let add_break n l = (None,UnpCut (PpBrk(n,0))) :: l let add_break_if_none n b = function | (_,UnpCut (PpBrk _)) :: _ as l -> l | [] when not b -> [] | l -> (None,UnpCut (PpBrk(n,0))) :: l let check_open_binder isopen sl m = let pr_token = function | Terminal s -> str s | Break n -> str "␣" | _ -> assert false in if isopen && not (List.is_empty sl) then user_err (str "as " ++ Id.print m ++ str " is a non-closed binder, no such \"" ++ prlist_with_sep spc pr_token sl ++ strbrk "\" is allowed to occur.") let unparsing_metavar i from typs = let x = List.nth typs (i-1) in let prec,side = unparsing_precedence_of_entry_type from x in match x with | ETConstr _ | ETGlobal | ETBigint -> UnpMetaVar (prec,side) | ETPattern _ | ETName _ | ETIdent -> UnpBinderMetaVar (prec,NotQuotedPattern) | ETBinder isopen -> UnpBinderMetaVar (prec,QuotedPattern) (* Heuristics for building default printing rules *) let index_id id l = List.index Id.equal id l let make_hunks etyps symbols from_level = let vars,typs = List.split etyps in let rec make b = function | NonTerminal m :: prods -> let i = index_id m vars in let u = unparsing_metavar i from_level typs in if is_next_non_terminal b prods then (None, u) :: add_break_if_none 1 b (make b prods) else (None, u) :: make_with_space b prods | Terminal s :: prods when (* true to simulate presence of non-terminal *) b || List.exists is_non_terminal prods -> if (is_comma s || is_operator s) then (* Always a breakable space after comma or separator *) (None, UnpTerminal s) :: add_break_if_none 1 b (make b prods) else if is_right_bracket s && is_next_terminal prods then (* Always no space after right bracked, but possibly a break *) (None, UnpTerminal s) :: add_break_if_none 0 b (make b prods) else if is_left_bracket s && is_next_non_terminal b prods then (None, UnpTerminal s) :: make b prods else if not (is_next_break prods) then (* Add rigid space, no break, unless user asked for something *) (None, UnpTerminal (s^" ")) :: make b prods else (* Rely on user spaces *) (None, UnpTerminal s) :: make b prods | Terminal s :: prods -> (* Separate but do not cut a trailing sequence of terminal *) (match prods with | Terminal _ :: _ -> (None,UnpTerminal (s^" ")) :: make b prods | _ -> (None,UnpTerminal s) :: make b prods) | Break n :: prods -> add_break n (make b prods) | SProdList (m,sl) :: prods -> let i = index_id m vars in let typ = List.nth typs (i-1) in let prec,side = unparsing_precedence_of_entry_type from_level typ in let sl' = (* If no separator: add a break *) if List.is_empty sl then add_break 1 [] (* We add NonTerminal for simulation but remove it afterwards *) else make true sl in let hunk = match typ with | ETConstr _ -> UnpListMetaVar (prec,List.map snd sl',side) | ETBinder isopen -> check_open_binder isopen sl m; UnpBinderListMetaVar (isopen,List.map snd sl') | _ -> assert false in (None, hunk) :: make_with_space b prods | [] -> [] and make_with_space b prods = match prods with | Terminal s' :: prods'-> if is_operator s' then (* A rigid space before operator and a breakable after *) (None,UnpTerminal (" "^s')) :: add_break_if_none 1 b (make b prods') else if is_comma s' then (* No space whatsoever before comma *) make b prods else if is_right_bracket s' then make b prods else (* A breakable space between any other two terminals *) add_break_if_none 1 b (make b prods) | (NonTerminal _ | SProdList _) :: _ -> (* A breakable space before a non-terminal *) add_break_if_none 1 b (make b prods) | Break _ :: _ -> (* Rely on user wish *) make b prods | [] -> [] in make false symbols (* Build default printing rules from explicit format *) let error_format ?loc () = user_err ?loc Pp.(str "The format does not match the notation.") let warn_format_break = CWarnings.create ~name:"notation-both-format-and-spaces" ~category:"parsing" (fun () -> strbrk "Discarding format implicitly indicated by multiple spaces in notation because an explicit format modifier is given.") let has_ldots l = List.exists (function (_,UnpTerminal s) -> String.equal s (Id.to_string Notation_ops.ldots_var) | _ -> false) l let rec split_format_at_ldots hd = function | (loc,UnpTerminal s) :: fmt when String.equal s (Id.to_string Notation_ops.ldots_var) -> loc, List.rev hd, fmt | u :: fmt -> check_no_ldots_in_box u; split_format_at_ldots (u::hd) fmt | [] -> raise Exit and check_no_ldots_in_box = function | (_,UnpBox (_,fmt)) -> (try let loc,_,_ = split_format_at_ldots [] fmt in user_err ?loc Pp.(str ("The special symbol \"..\" must occur at the same formatting depth than the variables of which it is the ellipse.")) with Exit -> ()) | _ -> () let error_not_same ?loc () = user_err ?loc Pp.(str "The format is not the same on the right- and left-hand sides of the special token \"..\".") let find_prod_list_loc sfmt fmt = (* [fmt] is some [UnpTerminal x :: sfmt @ UnpTerminal ".." :: sfmt @ UnpTerminal y :: rest] *) if List.is_empty sfmt then (* No separators; we highlight the sequence "x .." *) Loc.merge_opt (fst (List.hd fmt)) (fst (List.hd (List.tl fmt))) else (* A separator; we highlight the separating sequence *) Loc.merge_opt (fst (List.hd sfmt)) (fst (List.last sfmt)) let is_blank s = let n = String.length s in let rec aux i s = i >= n || s.[i] = ' ' && aux (i+1) s in aux 0 s let is_formatting = function | (_,UnpCut _) -> true | (_,UnpTerminal s) -> is_blank s | _ -> false let rec is_var_in_recursive_format = function | (_,UnpTerminal s) when not (is_blank s) -> true | (loc,UnpBox (b,l)) -> (match List.filter (fun a -> not (is_formatting a)) l with | [a] -> is_var_in_recursive_format a | _ -> error_not_same ?loc ()) | _ -> false let rec check_eq_var_upto_name = function | (_,UnpTerminal s1), (_,UnpTerminal s2) when not (is_blank s1 && is_blank s2) || s1 = s2 -> () | (_,UnpBox (b1,l1)), (_,UnpBox (b2,l2)) when b1 = b2 -> List.iter check_eq_var_upto_name (List.combine l1 l2) | (_,UnpCut b1), (_,UnpCut b2) when b1 = b2 -> () | _, (loc,_) -> error_not_same ?loc () let skip_var_in_recursive_format = function | a :: sl when is_var_in_recursive_format a -> a, sl | (loc,_) :: _ -> error_not_same ?loc () | [] -> assert false let read_recursive_format sl fmt = (* Turn [[UnpTerminal s :: some-list @ UnpTerminal ".." :: same-some-list @ UnpTerminal s' :: rest] *) (* into [(some-list,rest)] *) let get_head fmt = let var,sl = skip_var_in_recursive_format fmt in try var, split_format_at_ldots [] sl with Exit -> error_not_same ?loc:(fst (List.last (if sl = [] then fmt else sl))) () in let rec get_tail = function | (loc,a) :: sepfmt, (_,b) :: fmt when (=) a b -> get_tail (sepfmt, fmt) (* FIXME *) | [], tail -> skip_var_in_recursive_format tail | (loc,_) :: _, ([] | (_,UnpTerminal _) :: _)-> error_not_same ?loc () | _, (loc,_)::_ -> error_not_same ?loc () in let var1, (loc, slfmt, fmt) = get_head fmt in let var2, res = get_tail (slfmt, fmt) in check_eq_var_upto_name (var1,var2); (* To do, though not so important: check that the names match the names in the notation *) slfmt, res let hunks_of_format (from_level,(vars,typs)) symfmt = let rec aux = function | symbs, (_,(UnpTerminal s' as u)) :: fmt when String.equal s' (String.make (String.length s') ' ') -> let symbs, l = aux (symbs,fmt) in symbs, u :: l | Terminal s :: symbs, (_,UnpTerminal s') :: fmt when String.equal s (String.drop_simple_quotes s') -> let symbs, l = aux (symbs,fmt) in symbs, UnpTerminal s :: l | NonTerminal s :: symbs, (_,UnpTerminal s') :: fmt when Id.equal s (Id.of_string s') -> let i = index_id s vars in let symbs, l = aux (symbs,fmt) in symbs, unparsing_metavar i from_level typs :: l | symbs, (_,(UnpCut _ as u)) :: fmt -> let symbs, l = aux (symbs,fmt) in symbs, u :: l | SProdList (m,sl) :: symbs, fmt when has_ldots fmt -> let i = index_id m vars in let typ = List.nth typs (i-1) in let prec,side = unparsing_precedence_of_entry_type from_level typ in let loc_slfmt,rfmt = read_recursive_format sl fmt in let sl, slfmt = aux (sl,loc_slfmt) in if not (List.is_empty sl) then error_format ?loc:(find_prod_list_loc loc_slfmt fmt) (); let symbs, l = aux (symbs,rfmt) in let hunk = match typ with | ETConstr _ -> UnpListMetaVar (prec,slfmt,side) | ETBinder isopen -> check_open_binder isopen sl m; UnpBinderListMetaVar (isopen,slfmt) | _ -> assert false in symbs, hunk :: l | symbs, (_,UnpBox (a,b)) :: fmt -> let symbs', b' = aux (symbs,b) in let symbs', l = aux (symbs',fmt) in symbs', UnpBox (a,List.map (fun x -> (None,x)) b') :: l | symbs, [] -> symbs, [] | Break _ :: symbs, fmt -> warn_format_break (); aux (symbs,fmt) | _, fmt -> error_format ?loc:(fst (List.hd fmt)) () in match aux symfmt with | [], l -> l | _ -> error_format () (**********************************************************************) (* Build parsing rules *) let assoc_of_type from n (_,typ) = precedence_of_entry_type (from,n) typ let is_not_small_constr = function ETProdConstr _ -> true | _ -> false let distribute a ll = List.map (fun l -> a @ l) ll (* Expand LIST1(t,sep);sep;t;...;t (with the trailing pattern occurring p times, possibly p=0) into the combination of t;sep;t;...;t;sep;t (p+1 times) t;sep;t;...;t;sep;t;sep;t (p+2 times) ... t;sep;t;...;t;sep;t;...;t;sep;t (p+n times) t;sep;t;...;t;sep;t;...;t;sep;t;LIST1(t,sep) *) let expand_list_rule s typ tkl x n p ll = let camlp5_message_name = Some (add_suffix x ("_"^string_of_int n)) in let main = GramConstrNonTerminal (ETProdConstr (s,typ), camlp5_message_name) in let tks = List.map (fun (kw,s) -> GramConstrTerminal (kw, s)) tkl in let rec aux i hds ll = if i < p then aux (i+1) (main :: tks @ hds) ll else if Int.equal i (p+n) then let hds = GramConstrListMark (p+n,true,p) :: hds @ [GramConstrNonTerminal (ETProdConstrList (s, typ,tkl), Some x)] in distribute hds ll else distribute (GramConstrListMark (i+1,false,p) :: hds @ [main]) ll @ aux (i+1) (main :: tks @ hds) ll in aux 0 [] ll let is_constr_typ (s,lev) x etyps = match List.assoc x etyps with (* TODO: factorize these rules with the ones computing the effective sublevel sent to camlp5, so as to include the case of DefaultLevel which are valid *) | ETConstr (s',_,(lev',InternalProd | (NumLevel _ | NextLevel as lev'), _)) -> Notation.notation_entry_eq s s' && production_level_eq lev lev' | _ -> false let include_possible_similar_trailing_pattern typ etyps sl l = let rec aux n = function | Terminal s :: sl, Terminal s'::l' when s = s' -> aux n (sl,l') | [], NonTerminal x ::l' when is_constr_typ typ x etyps -> try_aux n l' | Break _ :: sl, l -> aux n (sl,l) | sl, Break _ :: l -> aux n (sl,l) | _ -> raise Exit and try_aux n l = try aux (n+1) (sl,l) with Exit -> n,l in try_aux 0 l let prod_entry_type = function | ETIdent -> ETProdIdent | ETName _ -> ETProdName | ETGlobal -> ETProdReference | ETBigint -> ETProdBigint | ETBinder o -> ETProdOneBinder o | ETConstr (s,_,p) -> ETProdConstr (s,p) | ETPattern (_,n) -> ETProdPattern (match n with None -> 0 | Some n -> n) let keyword_needed need s = (* Ensure that IDENT articulation terminal symbols are keywords *) match CLexer.terminal s with | Tok.PIDENT (Some k) -> if need then Flags.if_verbose Feedback.msg_info (str "Identifier '" ++ str k ++ str "' now a keyword"); need | _ -> match NumTok.Unsigned.parse_string s with | Some n -> if need then Flags.if_verbose Feedback.msg_info (str "Number '" ++ NumTok.Unsigned.print n ++ str "' now a keyword"); need | _ -> true let make_production (_,lev,_) etyps symbols = let rec aux need = function | [] -> [[]] | NonTerminal m :: l -> let typ = prod_entry_type (List.assoc m etyps) in distribute [GramConstrNonTerminal (typ, Some m)] (aux (is_not_small_constr typ) l) | Terminal s :: l -> let keyword = keyword_needed need s in distribute [GramConstrTerminal (keyword,s)] (aux false l) | Break _ :: l -> aux need l | SProdList (x,sl) :: l -> let tkl = List.flatten (List.map (function Terminal s -> [s] | Break _ -> [] | _ -> anomaly (Pp.str "Found a non terminal token in recursive notation separator.")) sl) in let tkl = List.map_i (fun i x -> let need = (i=0) in (keyword_needed need x, x)) 0 tkl in match List.assoc x etyps with | ETConstr (s,_,(lev,_ as typ)) -> let p,l' = include_possible_similar_trailing_pattern (s,lev) etyps sl l in expand_list_rule s typ tkl x 1 p (aux true l') | ETBinder o -> check_open_binder o sl x; let typ = if o then (assert (tkl = []); ETBinderOpen) else ETBinderClosed tkl in distribute [GramConstrNonTerminal (ETProdBinderList typ, Some x)] (aux false l) | _ -> user_err Pp.(str "Components of recursive patterns in notation must be terms or binders.") in let need = (* a leading ident/number factorizes iff at level 0 *) lev <> 0 in aux need symbols let rec find_symbols c_current c_next c_last = function | [] -> [] | NonTerminal id :: sl -> let prec = if not (List.is_empty sl) then c_current else c_last in (id, prec) :: (find_symbols c_next c_next c_last sl) | Terminal s :: sl -> find_symbols c_next c_next c_last sl | Break n :: sl -> find_symbols c_current c_next c_last sl | SProdList (x,_) :: sl' -> (x,c_next)::(find_symbols c_next c_next c_last sl') let border = function | (_,(ETConstr(_,_,(_,BorderProd (_,a))))) :: _ -> a | _ -> None let recompute_assoc typs = let open Gramlib.Gramext in match border typs, border (List.rev typs) with | Some LeftA, Some RightA -> assert false | Some LeftA, _ -> Some LeftA | _, Some RightA -> Some RightA | _ -> None (**************************************************************************) (* Registration of syntax extensions (parsing/printing, no interpretation)*) let pr_arg_level from (lev,typ) = let pplev = function | LevelLt n when Int.equal n from -> spc () ++ str "at next level" | LevelLe n -> spc () ++ str "at level " ++ int n | LevelLt n -> spc () ++ str "at level below " ++ int n | LevelSome -> mt () in Ppvernac.pr_set_entry_type (fun _ -> (*TO CHECK*) mt()) typ ++ pplev lev let pr_level ntn (from,fromlevel,args) typs = (match from with InConstrEntry -> mt () | InCustomEntry s -> str "in " ++ str s ++ spc()) ++ str "at level " ++ int fromlevel ++ spc () ++ str "with arguments" ++ spc() ++ prlist_with_sep pr_comma (pr_arg_level fromlevel) (List.combine args typs) let error_incompatible_level ntn oldprec oldtyps prec typs = user_err (str "Notation " ++ pr_notation ntn ++ str " is already defined" ++ spc() ++ pr_level ntn oldprec oldtyps ++ spc() ++ str "while it is now required to be" ++ spc() ++ pr_level ntn prec typs ++ str ".") let error_parsing_incompatible_level ntn ntn' oldprec oldtyps prec typs = user_err (str "Notation " ++ pr_notation ntn ++ str " relies on a parsing rule for " ++ pr_notation ntn' ++ spc() ++ str " which is already defined" ++ spc() ++ pr_level ntn oldprec oldtyps ++ spc() ++ str "while it is now required to be" ++ spc() ++ pr_level ntn prec typs ++ str ".") let warn_incompatible_format = CWarnings.create ~name:"notation-incompatible-format" ~category:"parsing" (fun (specific,ntn) -> let head,scope = match specific with | None -> str "Notation", mt () | Some LastLonelyNotation -> str "Lonely notation", mt () | Some (NotationInScope sc) -> str "Notation", strbrk (" in scope " ^ sc) in head ++ spc () ++ pr_notation ntn ++ strbrk " was already defined with a different format" ++ scope ++ str ".") type syntax_extension = { synext_level : level; synext_nottyps : constr_entry_key list; synext_notgram : notation_grammar option; synext_notprint : generic_notation_printing_rules option; } type syntax_rules = | PrimTokenSyntax | SpecificSyntax of syntax_extension let syntax_rules_iter f = function | PrimTokenSyntax -> () | SpecificSyntax synext -> f synext let check_reserved_format ntn rules rules' = try let { notation_printing_reserved = reserved; notation_printing_rules = generic_rules } = rules in if reserved && (not (List.for_all2eq unparsing_eq rules'.notation_printing_unparsing generic_rules.notation_printing_unparsing) || rules'.notation_printing_extra <> generic_rules.notation_printing_extra) then warn_incompatible_format (None,ntn) with Not_found -> () let specific_format_to_declare (specific,ntn as specific_ntn) rules = try let specific_rules = Ppextend.find_specific_notation_printing_rule specific_ntn in if not (List.for_all2eq unparsing_eq rules.notation_printing_unparsing specific_rules.notation_printing_unparsing) || rules.notation_printing_extra <> specific_rules.notation_printing_extra then (warn_incompatible_format (Some specific,ntn); true) else false with Not_found -> true type syntax_extension_obj = locality_flag * (notation * syntax_extension) let check_and_extend_constr_grammar ntn rule = try let ntn_for_grammar = rule.notgram_notation in if notation_eq ntn ntn_for_grammar then raise Not_found; let prec = rule.notgram_level in let typs = rule.notgram_typs in let oldprec = Notation.level_of_notation ntn_for_grammar in let oldparsing = try Some (Notgram_ops.grammar_of_notation ntn_for_grammar) with Not_found -> None in let oldtyps = Notgram_ops.subentries_of_notation ntn_for_grammar in if not (Notation.level_eq prec oldprec) && oldparsing <> None then error_parsing_incompatible_level ntn ntn_for_grammar oldprec oldtyps prec typs; if oldparsing = None then raise Not_found with Not_found -> Egramcoq.extend_constr_grammar rule let cache_one_syntax_extension (ntn,synext) = let prec = synext.synext_level in (* Check and ensure that the level and the precomputed parsing rule is declared *) let oldparsing = try let oldprec = Notation.level_of_notation ntn in let oldparsing = try Some (Notgram_ops.grammar_of_notation ntn) with Not_found -> None in let oldtyps = Notgram_ops.subentries_of_notation ntn in if not (Notation.level_eq prec oldprec && List.for_all2 Extend.constr_entry_key_eq synext.synext_nottyps oldtyps) && (oldparsing <> None || synext.synext_notgram = None) then error_incompatible_level ntn oldprec oldtyps prec synext.synext_nottyps; oldparsing with Not_found -> (* Declare the level and the precomputed parsing rule *) let () = Notation.declare_notation_level ntn prec in let () = Notgram_ops.declare_notation_subentries ntn synext.synext_nottyps in let () = Option.iter (Notgram_ops.declare_notation_grammar ntn) synext.synext_notgram in None in (* Declare the parsing rule *) begin match oldparsing, synext.synext_notgram with | None, Some grams -> List.iter (check_and_extend_constr_grammar ntn) grams | _ -> (* The grammars rules are canonically derived from the string and the precedence*) () end; (* Printing *) Option.iter (declare_generic_notation_printing_rules ntn) synext.synext_notprint let cache_syntax_extension (_, (_, sy)) = cache_one_syntax_extension sy let subst_syntax_extension (subst, (local, (ntn, synext))) = (local, (ntn, synext)) let classify_syntax_definition (local, _ as o) = if local then Dispose else Substitute o let open_syntax_extension i o = if Int.equal i 1 then cache_syntax_extension o let inSyntaxExtension : syntax_extension_obj -> obj = declare_object {(default_object "SYNTAX-EXTENSION") with open_function = simple_open ~cat:notation_cat open_syntax_extension; cache_function = cache_syntax_extension; subst_function = subst_syntax_extension; classify_function = classify_syntax_definition} (**************************************************************************) (* Precedences *) (* Interpreting user-provided modifiers *) (* XXX: We could move this to the parser itself *) module NotationMods = struct type notation_modifier = { assoc : Gramlib.Gramext.g_assoc option; level : int option; etyps : (Id.t * simple_constr_prod_entry_key) list; (* common to syn_data below *) format : lstring option; extra : (string * string) list; } let default = { assoc = None; level = None; etyps = []; format = None; extra = []; } end (* To be turned into a fatal warning in 8.14 *) let warn_deprecated_ident_entry = CWarnings.create ~name:"deprecated-ident-entry" ~category:"deprecated" (fun () -> strbrk "grammar entry \"ident\" permitted \"_\" in addition to proper identifiers; this use is deprecated and its meaning will change in the future; use \"name\" instead.") let interp_modifiers entry modl = let open NotationMods in let rec interp subtyps acc = function | [] -> subtyps, acc | CAst.{loc;v} :: l -> match v with | SetEntryType (s,typ) -> let id = Id.of_string s in if Id.List.mem_assoc id acc.etyps then user_err ?loc (str s ++ str " is already assigned to an entry or constr level."); interp subtyps { acc with etyps = (id,typ) :: acc.etyps; } l | SetItemLevel ([],bko,n) -> interp subtyps acc l | SetItemLevel (s::idl,bko,n) -> let id = Id.of_string s in if Id.List.mem_assoc id acc.etyps then user_err ?loc (str s ++ str " is already assigned to an entry or constr level."); interp ((id,bko,n)::subtyps) acc ((CAst.make ?loc @@ SetItemLevel (idl,bko,n))::l) | SetLevel n -> (match entry with | InCustomEntry s -> if acc.level <> None then user_err ?loc (str ("isolated \"at level " ^ string_of_int n ^ "\" unexpected.")) else user_err ?loc (str ("use \"in custom " ^ s ^ " at level " ^ string_of_int n ^ "\"") ++ spc () ++ str "rather than" ++ spc () ++ str ("\"at level " ^ string_of_int n ^ "\"") ++ spc () ++ str "isolated.") | InConstrEntry -> if acc.level <> None then user_err ?loc (str "A level is already assigned."); interp subtyps { acc with level = Some n; } l) | SetCustomEntry (s,Some n) -> (* Note: name of entry already registered in interp_non_syntax_modifiers *) if acc.level <> None then user_err ?loc (str ("isolated \"at level " ^ string_of_int (Option.get acc.level) ^ "\" unexpected.")); interp subtyps { acc with level = Some n } l | SetAssoc a -> if not (Option.is_empty acc.assoc) then user_err ?loc Pp.(str "An associativity is given more than once."); interp subtyps { acc with assoc = Some a; } l | SetOnlyParsing | SetOnlyPrinting | SetCustomEntry (_,None) | SetFormat _ | SetItemScope _ -> (* interpreted in interp_non_syntax_modifiers *) assert false in let subtyps, mods = interp [] default modl in (* interpret item levels wrt to main entry *) let extra_etyps = List.map (fun (id,bko,n) -> (id,ETConstr (entry,bko,n))) subtyps in (* Temporary hack: "ETName false" (i.e. "ident" in deprecation phase) means "ETIdent" for custom entries *) let mods = { mods with etyps = List.map (function | (id,ETName false) -> if entry = InConstrEntry then (warn_deprecated_ident_entry (); (id,ETName true)) else (id,ETIdent) | x -> x) mods.etyps } in { mods with etyps = extra_etyps@mods.etyps } let check_useless_entry_types recvars mainvars etyps = let vars = let (l1,l2) = List.split recvars in l1@l2@mainvars in match List.filter (fun (x,etyp) -> not (List.mem x vars)) etyps with | (x,_)::_ -> user_err (Id.print x ++ str " is unbound in the notation.") | _ -> () type notation_main_data = { onlyparsing : bool; onlyprinting : bool; deprecation : Deprecation.t option; entry : notation_entry; format : unparsing Loc.located list option; extra : (string * string) list; itemscopes : (Id.t * scope_name) list; } let warn_only_parsing_reserved_notation = CWarnings.create ~name:"irrelevant-reserved-notation-only-parsing" ~category:"parsing" (fun () -> strbrk "The only parsing modifier has no effect in Reserved Notation.") let warn_only_parsing_discarded_format = CWarnings.create ~name:"discarded-format-only-parsing" ~category:"parsing" (fun () -> strbrk "The format modifier has no effect for only-parsing notations.") let error_onlyparsing_onlyprinting ?loc = user_err ?loc (str "A notation cannot be both \"only printing\" and \"only parsing\".") let set_onlyparsing ?loc ~reserved main_data = if reserved then (warn_only_parsing_reserved_notation ?loc (); main_data) else (if main_data.onlyparsing then user_err ?loc (str "\"only parsing\" is given more than once."); if main_data.onlyprinting then error_onlyparsing_onlyprinting ?loc; { main_data with onlyparsing = true }) let set_onlyprinting ?loc main_data = if main_data.onlyprinting then user_err ?loc (str "\"only printing\" is given more than once."); if main_data.onlyparsing then error_onlyparsing_onlyprinting ?loc; { main_data with onlyprinting = true } let set_custom_entry ?loc main_data entry' = match main_data.entry with | InConstrEntry -> { main_data with entry = InCustomEntry entry' } | _ -> user_err ?loc (str "\"in custom\" is given more than once.") let warn_irrelevant_format = CWarnings.create ~name:"irrelevant-format-only-parsing" ~category:"parsing" (fun () -> str "The format modifier is irrelevant for only-parsing rules.") let set_format ?loc main_data format = if not (Option.is_empty main_data.format) then user_err ?loc Pp.(str "A format is given more than once."); let format = if main_data.onlyparsing then (warn_irrelevant_format ?loc (); None) else Some (parse_format format) in { main_data with format } let set_extra_format ?loc main_data (k,s) = if List.mem_assoc k main_data.extra then user_err ?loc Pp.(str "A format for " ++ str k ++ str " is given more than once."); let extra = if main_data.onlyparsing then (warn_irrelevant_format ?loc (); main_data.extra) else (k,s.CAst.v)::main_data.extra in { main_data with extra } let set_item_scope ?loc main_data ids sc = let itemscopes = List.map (fun id -> (Id.of_string id,sc)) ids @ main_data.itemscopes in match List.duplicates (fun (id1,_) (id2,_) -> Id.equal id1 id2) itemscopes with | (id,_)::_ -> user_err ?loc (str "Notation scope for argument " ++ Id.print id ++ str " can be specified only once.") | [] -> { main_data with itemscopes } let interp_non_syntax_modifiers ~reserved ~infix ~syndef deprecation mods = let set (main_data,rest) = CAst.with_loc_val (fun ?loc -> function | SetOnlyParsing -> if not (Option.is_empty main_data.format && List.is_empty main_data.extra) then (warn_only_parsing_discarded_format ?loc (); (main_data, rest)) else (set_onlyparsing ?loc ~reserved main_data,rest) | SetOnlyPrinting when not syndef -> (set_onlyprinting ?loc main_data,rest) | SetCustomEntry (entry,None) when not syndef -> (set_custom_entry ?loc main_data entry,rest) | SetCustomEntry (entry,Some _) as x when not syndef -> (set_custom_entry main_data entry,CAst.make ?loc x :: rest) | SetEntryType _ when infix -> user_err ?loc Pp.(str "Unexpected entry type in infix notation.") | SetItemLevel _ when infix -> user_err ?loc Pp.(str "Unexpected entry level in infix notation.") | SetFormat (TextFormat s) when not syndef -> (set_format ?loc main_data s, rest) | SetFormat (ExtraFormat (k,s)) when not syndef -> (set_extra_format ?loc main_data (k,s), rest) | SetItemScope (ids,sc) -> (set_item_scope ?loc main_data ids sc, rest) | modif -> (main_data,(CAst.make ?loc modif)::rest)) in let main_data = { onlyparsing = false; onlyprinting = false; deprecation; entry = InConstrEntry; format = None; extra = []; itemscopes = [] } in List.fold_left set (main_data,[]) mods (* Check if an interpretation can be used for printing a cases printing *) let has_no_binders_type = List.for_all (fun (_,(_,typ)) -> match typ with | NtnTypeBinder _ | NtnTypeBinderList -> false | NtnTypeConstr | NtnTypeConstrList -> true) (* Compute precedences from modifiers (or find default ones) *) let set_entry_type from n etyps (x,typ) = let make_lev n s = match typ with | BorderProd _ -> NumLevel n | InternalProd -> DefaultLevel in let typ = try match List.assoc x etyps, typ with | ETConstr (s,bko,DefaultLevel), _ -> if notation_entry_eq from s then ETConstr (s,bko,(make_lev n s,typ)) else ETConstr (s,bko,(DefaultLevel,typ)) | ETConstr (s,bko,n), BorderProd (left,_) -> ETConstr (s,bko,(n,BorderProd (left,None))) | ETConstr (s,bko,n), InternalProd -> ETConstr (s,bko,(n,InternalProd)) | ETPattern (b,n), _ -> ETPattern (b,n) | (ETIdent | ETName _ | ETBigint | ETGlobal | ETBinder _ as x), _ -> x with Not_found -> ETConstr (from,None,(make_lev n from,typ)) in (x,typ) let join_auxiliary_recursive_types recvars etyps = List.fold_right (fun (x,y) typs -> let xtyp = try Some (List.assoc x etyps) with Not_found -> None in let ytyp = try Some (List.assoc y etyps) with Not_found -> None in match xtyp,ytyp with | None, None -> typs | Some _, None -> typs | None, Some ytyp -> (x,ytyp)::typs | Some xtyp, Some ytyp when (=) xtyp ytyp -> typs (* FIXME *) | Some xtyp, Some ytyp -> user_err (strbrk "In " ++ Id.print x ++ str " .. " ++ Id.print y ++ strbrk ", both ends have incompatible types.")) recvars etyps let internalization_type_of_entry_type = function | ETBinder _ -> NtnInternTypeOnlyBinder | ETConstr _ | ETBigint | ETGlobal | ETIdent | ETName _ | ETPattern _ -> NtnInternTypeAny None let make_internalization_vars recvars maintyps = let maintyps = List.map (on_snd internalization_type_of_entry_type) maintyps in let extratyps = List.map (fun (x,y) -> (y,List.assoc x maintyps)) recvars in maintyps @ extratyps let make_interpretation_type isrec isonlybinding default_if_binding = function (* Parsed as constr list *) | ETConstr (_,None,_) when isrec -> NtnTypeConstrList (* Parsed as constr, but interpreted as a binder *) | ETConstr (_,Some bk,_) -> NtnTypeBinder (NtnBinderParsedAsConstr bk) | ETConstr (_,None,_) when isonlybinding -> NtnTypeBinder (NtnBinderParsedAsConstr default_if_binding) (* Parsed as constr, interpreted as constr *) | ETConstr (_,None,_) -> NtnTypeConstr (* Others *) | ETIdent -> NtnTypeBinder NtnParsedAsIdent | ETName _ -> NtnTypeBinder NtnParsedAsName | ETPattern (ppstrict,_) -> NtnTypeBinder (NtnParsedAsPattern ppstrict) (* Parsed as ident/pattern, primarily interpreted as binder; maybe strict at printing *) | ETBigint | ETGlobal -> NtnTypeConstr | ETBinder _ -> if isrec then NtnTypeBinderList else NtnTypeBinder NtnParsedAsBinder let subentry_of_constr_prod_entry from_level = function (* Specific 8.2 approximation *) | ETConstr (InCustomEntry s,_,x) -> let n = match fst (precedence_of_position_and_level from_level x) with | LevelLt n -> n-1 | LevelLe n -> n | LevelSome -> max_int in InCustomEntryLevel (s,n) (* level and use of parentheses for coercion is hard-wired for "constr"; we don't remember the level *) | ETConstr (InConstrEntry,_,_) -> InConstrEntrySomeLevel | _ -> InConstrEntrySomeLevel let make_interpretation_vars (* For binders, default is to parse only as an ident *) ?(default_if_binding=AsName) recvars level allvars typs = let eq_subscope (sc1, l1) (sc2, l2) = Option.equal String.equal sc1 sc2 && List.equal String.equal l1 l2 in let check (x, y) = let (_,scope1) = Id.Map.find x allvars in let (_,scope2) = Id.Map.find y allvars in if not (eq_subscope scope1 scope2) then error_not_same_scope x y in let () = List.iter check recvars in let useless_recvars = List.map snd recvars in let mainvars = Id.Map.filter (fun x _ -> not (Id.List.mem x useless_recvars)) allvars in Id.Map.mapi (fun x (isonlybinding, sc) -> let typ = Id.List.assoc x typs in ((subentry_of_constr_prod_entry level typ,sc), make_interpretation_type (Id.List.mem_assoc x recvars) isonlybinding default_if_binding typ)) mainvars let check_rule_productivity l = if List.for_all (function NonTerminal _ | Break _ -> true | _ -> false) l then user_err Pp.(str "A notation must include at least one symbol."); if (match l with SProdList _ :: _ -> true | _ -> false) then user_err Pp.(str "A recursive notation must start with at least one symbol.") let warn_notation_bound_to_variable = CWarnings.create ~name:"notation-bound-to-variable" ~category:"parsing" (fun () -> strbrk "This notation will not be used for printing as it is bound to a single variable.") let warn_non_reversible_notation = CWarnings.create ~name:"non-reversible-notation" ~category:"parsing" (function | APrioriReversible -> assert false | HasLtac -> strbrk "This notation contains Ltac expressions: it will not be used for printing." | NonInjective ids -> let n = List.length ids in strbrk (String.plural n "Variable") ++ spc () ++ pr_enum Id.print ids ++ spc () ++ strbrk (if n > 1 then "do" else "does") ++ str " not occur in the right-hand side." ++ spc() ++ strbrk "The notation will not be used for printing as it is not reversible.") let is_coercion level typs = match level, typs with | Some (custom,n,_), [_,e] -> (match e, custom with | ETConstr _, _ -> let customkey = make_notation_entry_level custom n in let subentry = subentry_of_constr_prod_entry n e in if notation_entry_level_eq subentry customkey then None else Some (IsEntryCoercion subentry) | ETGlobal, InCustomEntry s -> Some (IsEntryGlobal (s,n)) | ETIdent, InCustomEntry s -> Some (IsEntryIdent (s,n)) | _ -> None) | Some _, _ -> assert false | None, _ -> None let printability level typs onlyparsing reversibility = function | NVar _ when reversibility = APrioriReversible -> let coe = is_coercion level typs in let onlyparsing = if not onlyparsing && Option.is_empty coe then (warn_notation_bound_to_variable (); true) else onlyparsing in onlyparsing, coe | _ -> (if not onlyparsing && reversibility <> APrioriReversible then (warn_non_reversible_notation reversibility; true) else onlyparsing),None let find_precedence custom lev etyps symbols onlyprint = let first_symbol = let rec aux = function | Break _ :: t -> aux t | h :: t -> Some h | [] -> None in aux symbols in let last_is_terminal () = let rec aux b = function | Break _ :: t -> aux b t | Terminal _ :: t -> aux true t | _ :: t -> aux false t | [] -> b in aux false symbols in match first_symbol with | None -> [],0 | Some (NonTerminal x) -> let test () = if onlyprint then if Option.is_empty lev then user_err Pp.(str "Explicit level needed in only-printing mode when the level of the leftmost non-terminal is given.") else [],Option.get lev else user_err Pp.(str "The level of the leftmost non-terminal cannot be changed.") in (try match List.assoc x etyps, custom with | ETConstr (s,_,(NumLevel _ | NextLevel)), s' when s = s' -> test () | (ETIdent | ETName _ | ETBigint | ETGlobal), _ -> begin match lev with | None -> ([fun () -> Flags.if_verbose (Feedback.msg_info ?loc:None) (strbrk "Setting notation at level 0.")],0) | Some 0 -> ([],0) | _ -> user_err Pp.(str "A notation starting with an atomic expression must be at level 0.") end | (ETPattern _ | ETBinder _), InConstrEntry when not onlyprint -> (* Don't know exactly if we can make sense of this case *) user_err Pp.(str "Binders or patterns not supported in leftmost position.") | (ETPattern _ | ETBinder _ | ETConstr _), _ -> (* Give a default ? *) if Option.is_empty lev then user_err Pp.(str "Need an explicit level.") else [],Option.get lev with Not_found -> if Option.is_empty lev then user_err Pp.(str "A left-recursive notation must have an explicit level.") else [],Option.get lev) | Some (Terminal _) when last_is_terminal () -> if Option.is_empty lev then ([fun () -> Flags.if_verbose (Feedback.msg_info ?loc:None) (strbrk "Setting notation at level 0.")], 0) else [],Option.get lev | Some _ -> if Option.is_empty lev then user_err Pp.(str "Cannot determine the level."); [],Option.get lev let check_curly_brackets_notation_exists () = try let _ = Notation.level_of_notation (InConstrEntry,"{ _ }") in () with Not_found -> user_err Pp.(str "Notations involving patterns of the form \"{ _ }\" are treated \n\ specially and require that the notation \"{ _ }\" is already reserved.") (* Remove patterns of the form "{ _ }", unless it is the "{ _ }" notation *) let remove_curly_brackets l = let rec skip_break acc = function | Break _ as br :: l -> skip_break (br::acc) l | l -> List.rev acc, l in let rec aux deb = function | [] -> [] | Terminal "{" as t1 :: l -> let br,next = skip_break [] l in (match next with | NonTerminal _ as x :: l' -> let br',next' = skip_break [] l' in (match next' with | Terminal "}" as t2 :: l'' -> if deb && List.is_empty l'' then [t1;x;t2] else begin check_curly_brackets_notation_exists (); x :: aux false l'' end | l1 -> t1 :: br @ x :: br' @ aux false l1) | l0 -> t1 :: aux false l0) | x :: l -> x :: aux false l in aux true l let has_implicit_format symbols = List.exists (function Break _ -> true | _ -> false) symbols (* Because of the special treatment for { }, the grammar rule sent to the parser may be different than what the user sees; e.g. for "{ A } + { B }", it is "A + B" which is sent to the parser *) type syn_pa_data = { ntn_for_grammar : notation; prec_for_grammar : level; typs_for_grammar : constr_entry_key list; need_squash : bool; } module SynData = struct type subentry_types = (Id.t * constr_entry_key) list (* XXX: Document *) type syn_data = { (* XXX: Callback to printing, must remove *) msgs : (unit -> unit) list; (* Notation data for parsing *) level : level; subentries : subentry_types; pa_syntax_data : subentry_types * symbol list; pp_syntax_data : subentry_types * symbol list; not_data : syn_pa_data; } end let find_subentry_types from n assoc etyps symbols = let typs = find_symbols (BorderProd(Left,assoc)) (InternalProd) (BorderProd(Right,assoc)) symbols in let sy_typs = List.map (set_entry_type from n etyps) typs in let prec = List.map (assoc_of_type from n) sy_typs in sy_typs, prec let check_locality_compatibility local custom i_typs = if not local then let subcustom = List.map_filter (function _,ETConstr (InCustomEntry s,_,_) -> Some s | _ -> None) i_typs in let allcustoms = match custom with InCustomEntry s -> s::subcustom | _ -> subcustom in List.iter (fun s -> if Egramcoq.locality_of_custom_entry s then user_err (strbrk "Notation has to be declared local as it depends on custom entry " ++ str s ++ strbrk " which is local.")) (List.uniquize allcustoms) let compute_syntax_data ~local main_data notation_symbols ntn mods = let open SynData in let open NotationMods in if main_data.itemscopes <> [] then user_err (str "General notations don't support 'in scope'."); let {recvars;mainvars;symbols} = notation_symbols in let assoc = Option.append mods.assoc (Some Gramlib.Gramext.NonA) in let _ = check_useless_entry_types recvars mainvars mods.etyps in (* Notations for interp and grammar *) let msgs,n = find_precedence main_data.entry mods.level mods.etyps symbols main_data.onlyprinting in let symbols_for_grammar = if main_data.entry = InConstrEntry then remove_curly_brackets symbols else symbols in let need_squash = not (List.equal Notation.symbol_eq symbols symbols_for_grammar) in let ntn_for_grammar = if need_squash then make_notation_key main_data.entry symbols_for_grammar else ntn in if main_data.entry = InConstrEntry && not main_data.onlyprinting then check_rule_productivity symbols_for_grammar; (* To globalize... *) let etyps = join_auxiliary_recursive_types recvars mods.etyps in let sy_typs, prec = find_subentry_types main_data.entry n assoc etyps symbols in let sy_typs_for_grammar, prec_for_grammar = if need_squash then find_subentry_types main_data.entry n assoc etyps symbols_for_grammar else sy_typs, prec in check_locality_compatibility local main_data.entry sy_typs; let pa_sy_data = (sy_typs_for_grammar,symbols_for_grammar) in let pp_sy_data = (sy_typs,symbols) in let sy_fulldata = { ntn_for_grammar; prec_for_grammar = (main_data.entry,n,prec_for_grammar); typs_for_grammar = List.map snd sy_typs_for_grammar; need_squash } in (* Return relevant data for interpretation and for parsing/printing *) { msgs; level = (main_data.entry,n,prec); subentries = sy_typs; pa_syntax_data = pa_sy_data; pp_syntax_data = pp_sy_data; not_data = sy_fulldata; } (**********************************************************************) (* Registration of notations interpretation *) type notation_obj = { notobj_local : bool; notobj_scope : scope_name option; notobj_interp : interpretation; notobj_coercion : entry_coercion_kind option; notobj_use : notation_use option; notobj_deprecation : Deprecation.t option; notobj_notation : notation * notation_location; notobj_specific_pp_rules : notation_printing_rules option; notobj_also_in_cases_pattern : bool; } let load_notation_common silently_define_scope_if_undefined _ (_, nobj) = (* When the default shall be to require that a scope already exists *) (* the call to ensure_scope will have to be removed *) if silently_define_scope_if_undefined then (* Don't warn if the scope is not defined: *) (* there was already a warning at "cache" time *) Option.iter Notation.declare_scope nobj.notobj_scope else Option.iter Notation.ensure_scope nobj.notobj_scope let load_notation = load_notation_common true let open_notation i (_, nobj) = if Int.equal i 1 then begin let scope = nobj.notobj_scope in let (ntn, df) = nobj.notobj_notation in let pat = nobj.notobj_interp in let deprecation = nobj.notobj_deprecation in let scope = match scope with None -> LastLonelyNotation | Some sc -> NotationInScope sc in let also_in_cases_pattern = nobj.notobj_also_in_cases_pattern in (* Declare the notation *) (match nobj.notobj_use with | Some use -> Notation.declare_notation (scope,ntn) pat df ~use ~also_in_cases_pattern nobj.notobj_coercion deprecation | None -> ()); (* Declare specific format if any *) (match nobj.notobj_specific_pp_rules with | Some pp_sy -> if specific_format_to_declare (scope,ntn) pp_sy then Ppextend.declare_specific_notation_printing_rules (scope,ntn) pp_sy | None -> ()) end let cache_notation o = load_notation_common false 1 o; open_notation 1 o let subst_notation (subst, nobj) = { nobj with notobj_interp = subst_interpretation subst nobj.notobj_interp; } let classify_notation nobj = if nobj.notobj_local then Dispose else Substitute nobj let inNotation : notation_obj -> obj = declare_object {(default_object "NOTATION") with open_function = simple_open ~cat:notation_cat open_notation; cache_function = cache_notation; subst_function = subst_notation; load_function = load_notation; classify_function = classify_notation} (**********************************************************************) let with_lib_stk_protection f x = let fs = Lib.freeze () in try let a = f x in Lib.unfreeze fs; a with reraise -> let reraise = Exninfo.capture reraise in let () = Lib.unfreeze fs in Exninfo.iraise reraise let with_syntax_protection f x = with_lib_stk_protection (Pcoq.with_grammar_rule_protection (with_notation_protection f)) x (**********************************************************************) (* Recovering existing syntax *) exception NoSyntaxRule let recover_notation_syntax ntn = try let prec = Notation.level_of_notation ntn in let pa_typs = Notgram_ops.subentries_of_notation ntn in let pa_rule = try Some (Notgram_ops.grammar_of_notation ntn) with Not_found -> None in let pp_rule = try Some (find_generic_notation_printing_rule ntn) with Not_found -> None in { synext_level = prec; synext_nottyps = pa_typs; synext_notgram = pa_rule; synext_notprint = pp_rule; } with Not_found -> raise NoSyntaxRule let recover_squash_syntax sy = let sq = recover_notation_syntax (InConstrEntry,"{ _ }") in match sq.synext_notgram with | Some gram -> sy :: gram | None -> raise NoSyntaxRule (**********************************************************************) (* Main entry point for building parsing and printing rules *) let make_pa_rule (typs,symbols) parsing_data = let { ntn_for_grammar; prec_for_grammar; typs_for_grammar; need_squash } = parsing_data in let assoc = recompute_assoc typs in let prod = make_production prec_for_grammar typs symbols in let sy = { notgram_level = prec_for_grammar; notgram_assoc = assoc; notgram_notation = ntn_for_grammar; notgram_prods = prod; notgram_typs = typs_for_grammar; } in (* By construction, the rule for "{ _ }" is declared, but we need to redeclare it because the file where it is declared needs not be open when the current file opens (especially in presence of -nois) *) if need_squash then recover_squash_syntax sy else [sy] let make_pp_rule level (typs,symbols) fmt = match fmt with | None -> let hunks = make_hunks typs symbols level in if List.exists (function _,(UnpCut (PpBrk _) | UnpListMetaVar _) -> true | _ -> false) hunks then [UnpBox (PpHOVB 0,hunks)] else (* Optimization to work around what seems an ocaml Format bug (see Mantis #7804/#7807) *) List.map snd hunks (* drop locations which are dummy *) | Some fmt -> hunks_of_format (level, List.split typs) (symbols, fmt) let make_parsing_rules main_data (sd : SynData.syn_data) = let open SynData in if main_data.onlyprinting then None else Some (make_pa_rule sd.pa_syntax_data sd.not_data) let make_generic_printing_rules reserved main_data ntn sd = let open SynData in let custom,level,_ = sd.level in let make_rule rule = { notation_printing_reserved = reserved; notation_printing_rules = { notation_printing_unparsing = rule; notation_printing_level = level; notation_printing_extra = main_data.extra; } } in try let rules = (Ppextend.find_generic_notation_printing_rule ntn) in match main_data.format with | None when not (has_implicit_format (snd sd.pp_syntax_data)) -> (* No intent to define a format, we reuse the existing generic rules *) Some rules | _ -> let rules' = make_rule (make_pp_rule level sd.pp_syntax_data main_data.format) in check_reserved_format ntn rules rules'.notation_printing_rules; Some rules' with Not_found -> Some (make_rule (make_pp_rule level sd.pp_syntax_data main_data.format)) let make_syntax_rules reserved main_data ntn sd = let open SynData in (* Prepare the parsing and printing rules *) let pa_rules = make_parsing_rules main_data sd in let pp_rules = make_generic_printing_rules reserved main_data ntn sd in { synext_level = sd.level; synext_nottyps = List.map snd sd.subentries; synext_notgram = pa_rules; synext_notprint = pp_rules; } (**********************************************************************) (* Main entry point for building specific printing rules *) let merge_extra extra1 extra2 = List.fold_left (fun extras (k,s) -> (k,s) :: List.remove_assoc k extras) extra1 extra2 let make_specific_printing_rules etyps symbols level pp_rule (format,new_extra) = match level with | None -> None | Some (_,level,_) -> let old_extra = match pp_rule with Some { notation_printing_rules = { notation_printing_extra } } -> notation_printing_extra | None -> [] in match format, new_extra, pp_rule with | None, [], Some _ when not (has_implicit_format symbols) -> None | _ -> Some { notation_printing_unparsing = make_pp_rule level (etyps,symbols) format; notation_printing_level = level; notation_printing_extra = merge_extra old_extra new_extra; } (**********************************************************************) (* Miscellaneous *) let warn_unused_interpretation = CWarnings.create ~name:"unused-notation" ~category:"parsing" (fun b -> strbrk "interpretation is used neither for printing nor for parsing, " ++ (if b then strbrk "the declaration could be replaced by \"Reserved Notation\"." else strbrk "the declaration could be removed.")) let make_use reserved onlyparse onlyprint = match onlyparse, onlyprint with | false, false -> Some ParsingAndPrinting | true, false -> Some OnlyParsing | false, true -> Some OnlyPrinting | true, true -> warn_unused_interpretation reserved; None (**********************************************************************) (* Main functions about notations *) let to_map l = let fold accu (x, v) = Id.Map.add x v accu in List.fold_left fold Id.Map.empty l let make_notation_interpretation ~local main_data notation_symbols ntn syntax_rules df env ?(impls=empty_internalization_env) c scope = let {recvars;mainvars;symbols} = notation_symbols in (* Recover types of variables and pa/pp rules; redeclare them if needed *) let level, i_typs, main_data, sy_pp_rules = match syntax_rules with | PrimTokenSyntax -> None, [], main_data, None | SpecificSyntax sy -> (* If the only printing flag has been explicitly requested, put it back *) let main_data = { main_data with onlyprinting = main_data.onlyprinting || sy.synext_notgram = None } in Some sy.synext_level, List.combine mainvars sy.synext_nottyps, main_data, sy.synext_notprint in (* Declare interpretation *) let sy_pp_rules = make_specific_printing_rules i_typs symbols level sy_pp_rules (main_data.format, main_data.extra) in let path = (Lib.library_dp(), Lib.current_dirpath true) in let df' = ntn, (path,df) in let i_vars = make_internalization_vars recvars i_typs in let nenv = { ninterp_var_type = to_map i_vars; ninterp_rec_vars = to_map recvars; } in let (acvars, ac, reversibility) = interp_notation_constr env ~impls nenv c in let plevel = match level with Some (from,level,l) -> level | None (* numeral: irrelevant )*) -> 0 in let interp = make_interpretation_vars recvars plevel acvars i_typs in let map (x, _) = try Some (x, Id.Map.find x interp) with Not_found -> None in let vars = List.map_filter map i_vars in (* Order of elements is important here! *) let also_in_cases_pattern = has_no_binders_type vars in let onlyparsing,coe = printability level i_typs main_data.onlyparsing reversibility ac in let main_data = { main_data with onlyparsing } in let use = make_use false onlyparsing main_data.onlyprinting in { notobj_local = local; notobj_scope = scope; notobj_use = use; notobj_interp = (vars, ac); notobj_coercion = coe; notobj_deprecation = main_data.deprecation; notobj_notation = df'; notobj_specific_pp_rules = sy_pp_rules; notobj_also_in_cases_pattern = also_in_cases_pattern; } (* Notations without interpretation (Reserved Notation) *) let add_reserved_notation ~local ~infix ({CAst.loc;v=df},mods) = let open SynData in let (main_data,mods) = interp_non_syntax_modifiers ~reserved:true ~infix ~syndef:false None mods in let mods = interp_modifiers main_data.entry mods in let notation_symbols, isnumeral = analyze_notation_tokens ~onlyprinting:main_data.onlyprinting ~infix main_data.entry df in let notation_symbols = if infix then adjust_reserved_infix_notation notation_symbols else notation_symbols in let ntn = make_notation_key main_data.entry notation_symbols.symbols in if isnumeral then user_err ?loc (str "Notations for numbers are primitive and need not be reserved."); let sd = compute_syntax_data ~local main_data notation_symbols ntn mods in let synext = make_syntax_rules true main_data ntn sd in List.iter (fun f -> f ()) sd.msgs; Lib.add_anonymous_leaf (inSyntaxExtension(local,(ntn,synext))) (* Notations associated to a where clause *) type where_decl_notation = decl_notation * notation_main_data * notation_symbols * notation * syntax_rules let prepare_where_notation decl_ntn = let { decl_ntn_string = { CAst.loc ; v = df }; decl_ntn_interp = c; decl_ntn_modifiers = modifiers; decl_ntn_scope = sc; } = decl_ntn in let (main_data,mods) = interp_non_syntax_modifiers ~reserved:false ~infix:false ~syndef:false None modifiers in match mods with | _::_ -> CErrors.user_err (str"Only modifiers not affecting parsing are supported here.") | [] -> let notation_symbols, isnumeral = analyze_notation_tokens ~onlyprinting:main_data.onlyprinting ~infix:false main_data.entry df in let ntn = make_notation_key main_data.entry notation_symbols.symbols in let syntax_rules = if isnumeral then PrimTokenSyntax else try SpecificSyntax (recover_notation_syntax ntn) with NoSyntaxRule -> user_err Pp.(str "Parsing rule for this notation has to be previously declared.") in (decl_ntn, main_data, notation_symbols, ntn, syntax_rules) let add_notation_interpretation ~local env (decl_ntn, main_data, notation_symbols, ntn, syntax_rules) = let { decl_ntn_string = { CAst.loc ; v = df }; decl_ntn_interp = c; decl_ntn_scope = sc } = decl_ntn in let notation = make_notation_interpretation ~local main_data notation_symbols ntn syntax_rules df env c sc in syntax_rules_iter (fun sy -> Lib.add_anonymous_leaf (inSyntaxExtension (local,(ntn,sy)))) syntax_rules; Lib.add_anonymous_leaf (inNotation notation); Dumpglob.dump_notation (CAst.make ?loc ntn) sc true (* interpreting a where clause *) let set_notation_for_interpretation env impls (decl_ntn, main_data, notation_symbols, ntn, syntax_rules) = let { decl_ntn_string = { CAst.loc ; v = df }; decl_ntn_interp = c; decl_ntn_scope = sc } = decl_ntn in let notation = make_notation_interpretation ~local:true main_data notation_symbols ntn syntax_rules df env ~impls c sc in syntax_rules_iter (fun sy -> Lib.add_anonymous_leaf (inSyntaxExtension (true,(ntn,sy)))) syntax_rules; Lib.add_anonymous_leaf (inNotation notation); Option.iter (fun sc -> Notation.open_close_scope (false,true,sc)) sc (* Main entry point for command Notation *) let add_notation ~local ~infix deprecation env c ({CAst.loc;v=df},modifiers) sc = (* Extract the modifiers not affecting the parsing rule *) let (main_data,syntax_modifiers) = interp_non_syntax_modifiers ~reserved:false ~infix ~syndef:false deprecation modifiers in (* Extract the modifiers not affecting the parsing rule *) let notation_symbols, isnumeral = analyze_notation_tokens ~onlyprinting:main_data.onlyprinting ~infix main_data.entry df in (* Add variables on both sides if an infix notation *) let df, notation_symbols, c = if infix then adjust_infix_notation df notation_symbols c else df, notation_symbols, c in (* Build the canonical identifier of the syntactic part of the notation *) let ntn = make_notation_key main_data.entry notation_symbols.symbols in (* Build or rebuild the syntax rules *) let syntax_rules = if isnumeral then (check_no_syntax_modifiers_for_numeral syntax_modifiers; PrimTokenSyntax) else match syntax_modifiers with | [] -> (* No syntax data: try to rely on a previously declared rule *) (try SpecificSyntax (recover_notation_syntax ntn) with NoSyntaxRule -> (* Try to determine a default syntax rule *) let sd = compute_syntax_data ~local main_data notation_symbols ntn NotationMods.default in SpecificSyntax (make_syntax_rules false main_data ntn sd)) | _ -> let mods = interp_modifiers main_data.entry syntax_modifiers in let sd = compute_syntax_data ~local main_data notation_symbols ntn mods in SpecificSyntax (make_syntax_rules false main_data ntn sd) in (* Build the interpretation *) let notation = make_notation_interpretation ~local main_data notation_symbols ntn syntax_rules df env c sc in (* Declare both syntax and interpretation *) syntax_rules_iter (fun sy -> Lib.add_anonymous_leaf (inSyntaxExtension (local,(ntn,sy)))) syntax_rules; Lib.add_anonymous_leaf (inNotation notation); (* Dump the location of the notation for coqdoc *) Dumpglob.dump_notation (CAst.make ?loc ntn) sc true (* Main entry point for Format Notation *) let add_notation_extra_printing_rule df k v = let notk = let { symbols }, isnumeral = analyze_notation_tokens ~onlyprinting:true ~infix:false InConstrEntry df in if isnumeral then user_err (str "Notations for numbers are primitive."); make_notation_key InConstrEntry symbols in add_notation_extra_printing_rule notk k v (**********************************************************************) (* Scopes, delimiters and classes bound to scopes *) type scope_command = | ScopeDeclare | ScopeDelimAdd of string | ScopeDelimRemove | ScopeClasses of scope_class list let load_scope_command_common silently_define_scope_if_undefined _ (_,(local,scope,o)) = let declare_scope_if_needed = if silently_define_scope_if_undefined then Notation.declare_scope else Notation.ensure_scope in match o with | ScopeDeclare -> Notation.declare_scope scope (* When the default shall be to require that a scope already exists *) (* the call to declare_scope_if_needed will have to be removed below *) | ScopeDelimAdd dlm -> declare_scope_if_needed scope | ScopeDelimRemove -> declare_scope_if_needed scope | ScopeClasses cl -> declare_scope_if_needed scope let load_scope_command = load_scope_command_common true let open_scope_command i (_,(local,scope,o)) = if Int.equal i 1 then match o with | ScopeDeclare -> () | ScopeDelimAdd dlm -> Notation.declare_delimiters scope dlm | ScopeDelimRemove -> Notation.remove_delimiters scope | ScopeClasses cl -> List.iter (Notation.declare_scope_class scope) cl let cache_scope_command o = load_scope_command_common false 1 o; open_scope_command 1 o let subst_scope_command (subst,(local,scope,o as x)) = match o with | ScopeClasses cl -> let env = Global.env () in let cl' = List.map_filter (subst_scope_class env subst) cl in let cl' = if List.for_all2eq (==) cl cl' then cl else cl' in local, scope, ScopeClasses cl' | _ -> x let classify_scope_command (local, _, _ as o) = if local then Dispose else Substitute o let inScopeCommand : locality_flag * scope_name * scope_command -> obj = declare_object {(default_object "DELIMITERS") with cache_function = cache_scope_command; open_function = simple_open ~cat:notation_cat open_scope_command; load_function = load_scope_command; subst_function = subst_scope_command; classify_function = classify_scope_command} let declare_scope local scope = Lib.add_anonymous_leaf (inScopeCommand(local,scope,ScopeDeclare)) let add_delimiters local scope key = Lib.add_anonymous_leaf (inScopeCommand(local,scope,ScopeDelimAdd key)) let remove_delimiters local scope = Lib.add_anonymous_leaf (inScopeCommand(local,scope,ScopeDelimRemove)) let add_class_scope local scope cl = Lib.add_anonymous_leaf (inScopeCommand(local,scope,ScopeClasses cl)) let interp_syndef_modifiers deprecation modl = let mods, skipped = interp_non_syntax_modifiers ~reserved:false ~infix:false ~syndef:true deprecation modl in if skipped <> [] then (let modifier = List.hd skipped in user_err ?loc:modifier.CAst.loc (str "Abbreviations don't support " ++ Ppvernac.pr_syntax_modifier modifier)); (mods.onlyparsing, mods.itemscopes) let add_syntactic_definition ~local deprecation env ident (vars,c) modl = let (only_parsing, scopes) = interp_syndef_modifiers deprecation modl in let vars = List.map (fun v -> v, List.assoc_opt v scopes) vars in let acvars,pat,reversibility = match vars, intern_name_alias c with | [], Some(r,u) -> (* Check if abbreviation to a name and avoid early insertion of maximal implicit arguments *) Id.Map.empty, NRef(r, u), APrioriReversible | _ -> let fold accu (id,scope) = Id.Map.add id (NtnInternTypeAny scope) accu in let i_vars = List.fold_left fold Id.Map.empty vars in let nenv = { ninterp_var_type = i_vars; ninterp_rec_vars = Id.Map.empty; } in interp_notation_constr env nenv c in let in_pat (id,_) = (id,ETConstr (Constrexpr.InConstrEntry,None,(NextLevel,InternalProd))) in let interp = make_interpretation_vars ~default_if_binding:AsNameOrPattern [] 0 acvars (List.map in_pat vars) in let vars = List.map (fun (x,_) -> (x, Id.Map.find x interp)) vars in let also_in_cases_pattern = has_no_binders_type vars in let onlyparsing = only_parsing || fst (printability None [] false reversibility pat) in Syntax_def.declare_syntactic_definition ~local ~also_in_cases_pattern deprecation ident ~onlyparsing (vars,pat) (**********************************************************************) (* Declaration of custom entry *) let warn_custom_entry = CWarnings.create ~name:"custom-entry-overridden" ~category:"parsing" (fun s -> strbrk "Custom entry " ++ str s ++ strbrk " has been overridden.") let load_custom_entry _ (_,(local,s)) = if Egramcoq.exists_custom_entry s then warn_custom_entry s else Egramcoq.create_custom_entry ~local s let cache_custom_entry o = load_custom_entry 1 o let subst_custom_entry (subst,x) = x let classify_custom_entry (local,s as o) = if local then Dispose else Substitute o let inCustomEntry : locality_flag * string -> obj = declare_object {(default_object "CUSTOM-ENTRIES") with cache_function = cache_custom_entry; load_function = load_custom_entry; subst_function = subst_custom_entry; classify_function = classify_custom_entry} let declare_custom_entry local s = if Egramcoq.exists_custom_entry s then user_err Pp.(str "Custom entry " ++ str s ++ str " already exists.") else Lib.add_anonymous_leaf (inCustomEntry (local,s)) coq-8.15.0/vernac/metasyntax.mli000066400000000000000000000050131417001151100165330ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* infix:bool -> Deprecation.t option -> env -> constr_expr -> (lstring * syntax_modifier CAst.t list) -> scope_name option -> unit val add_notation_extra_printing_rule : string -> string -> string -> unit (** Declaring scopes, delimiter keys and default scopes *) val declare_scope : locality_flag -> scope_name -> unit val add_delimiters : locality_flag -> scope_name -> string -> unit val remove_delimiters : locality_flag -> scope_name -> unit val add_class_scope : locality_flag -> scope_name -> scope_class list -> unit (** Add a notation interpretation associated to a "where" clause (already has pa/pp rules) *) type where_decl_notation val prepare_where_notation : decl_notation -> where_decl_notation (** Interpret the modifiers of a where-notation *) val add_notation_interpretation : local:bool -> env -> where_decl_notation -> unit (** Declare the interpretation of the where-notation *) val set_notation_for_interpretation : env -> Constrintern.internalization_env -> where_decl_notation -> unit (** Set the interpretation of the where-notation for interpreting a mutual block *) (** Add only the parsing/printing rule of a notation *) val add_reserved_notation : local:bool -> infix:bool -> (lstring * syntax_modifier CAst.t list) -> unit (** Add a syntactic definition (as in "Notation f := ...") *) val add_syntactic_definition : local:bool -> Deprecation.t option -> env -> Id.t -> Id.t list * constr_expr -> syntax_modifier CAst.t list -> unit (** Print the Camlp5 state of a grammar *) val pr_grammar : string -> Pp.t val pr_custom_grammar : string -> Pp.t val with_syntax_protection : ('a -> 'b) -> 'a -> 'b val declare_custom_entry : locality_flag -> string -> unit coq-8.15.0/vernac/mltop.ml000066400000000000000000000313211417001151100153210ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit; add_dir : string -> unit; ml_loop : unit -> unit } (* Determines the behaviour of Coq with respect to ML files (compiled or not) *) type kind_load = | WithTop of toplevel | WithoutTop (* Must be always initialized *) let load = ref WithoutTop (* Sets and initializes a toplevel (if any) *) let set_top toplevel = load := WithTop toplevel; Nativelib.load_obj := toplevel.load_obj (* Removes the toplevel (if any) *) let remove () = load := WithoutTop; Nativelib.load_obj := (fun x -> () : string -> unit) (* Tests if an Ocaml toplevel runs under Coq *) let is_ocaml_top () = match !load with | WithTop _ -> true |_ -> false (* Tests if we can load ML files *) let has_dynlink = Coq_config.has_natdynlink || not Sys.(backend_type = Native) (* Runs the toplevel loop of Ocaml *) let ocaml_toploop () = match !load with | WithTop t -> Printexc.catch t.ml_loop () | _ -> () (* Dynamic loading of .cmo/.cma *) (* We register errors at least for Dynlink, it is possible to do so Symtable too, as we do in the bytecode init code. *) let _ = CErrors.register_handler (function | Dynlink.Error e -> Some (hov 0 (str "Dynlink error: " ++ str Dynlink.(error_message e))) | _ -> None ) let ml_load s = (match !load with | WithTop t -> t.load_obj s | WithoutTop -> Dynlink.loadfile s ); s let dir_ml_load s = match !load with | WithTop _ -> ml_load s | WithoutTop -> let warn = not !Flags.quiet in let _,gname = find_file_in_path ~warn !coq_mlpath_copy s in ml_load gname (* Adds a path to the ML paths *) let add_ml_dir s = match !load with | WithTop t -> t.add_dir s; keep_copy_mlpath s | WithoutTop when has_dynlink -> keep_copy_mlpath s | _ -> () (* convertit un nom quelconque en nom de fichier ou de module *) let mod_of_name name = if Filename.check_suffix name ".cmo" then Filename.chop_suffix name ".cmo" else name let get_ml_object_suffix name = if Filename.check_suffix name ".cmo" then Some ".cmo" else if Filename.check_suffix name ".cma" then Some ".cma" else if Filename.check_suffix name ".cmxs" then Some ".cmxs" else None let file_of_name name = let suffix = get_ml_object_suffix name in let fail s = user_err (str"File not found on loadpath : " ++ str s ++ str"\n" ++ str"Loadpath: " ++ str(String.concat ":" !coq_mlpath_copy) ++ str ".") in if not (Filename.is_relative name) then if Sys.file_exists name then name else fail name else if Sys.(backend_type = Native) then (* XXX: Dynlink.adapt_filename does the same? *) let name = match suffix with | Some ((".cmo"|".cma") as suffix) -> (Filename.chop_suffix name suffix) ^ ".cmxs" | Some ".cmxs" -> name | _ -> name ^ ".cmxs" in if is_in_path !coq_mlpath_copy name then name else fail name else let (full, base) = match suffix with | Some ".cmo" | Some ".cma" -> true, name | Some ".cmxs" -> false, Filename.chop_suffix name ".cmxs" | _ -> false, name in if full then if is_in_path !coq_mlpath_copy base then base else fail base else let name = base ^ ".cma" in if is_in_path !coq_mlpath_copy name then name else let name = base ^ ".cmo" in if is_in_path !coq_mlpath_copy name then name else fail (base ^ ".cm[ao]") (** Is the ML code of the standard library placed into loadable plugins or statically compiled into coqtop ? For the moment this choice is made according to the presence of native dynlink : even if bytecode coqtop could always load plugins, we prefer to have uniformity between bytecode and native versions. *) (* [known_loaded_module] contains the names of the loaded ML modules * (linked or loaded with load_object). It is used not to load a * module twice. It is NOT the list of ML modules Coq knows. *) let known_loaded_modules = ref String.Map.empty let add_known_module mname path = if not (String.Map.mem mname !known_loaded_modules) || String.Map.find mname !known_loaded_modules = None then known_loaded_modules := String.Map.add mname path !known_loaded_modules let module_is_known mname = String.Map.mem mname !known_loaded_modules let known_module_path mname = String.Map.find mname !known_loaded_modules (** A plugin is just an ML module with an initialization function. *) let known_loaded_plugins = ref String.Map.empty let add_known_plugin init name = add_known_module name None; known_loaded_plugins := String.Map.add name init !known_loaded_plugins let init_known_plugins () = String.Map.iter (fun _ f -> f()) !known_loaded_plugins (** Registering functions to be used at caching time, that is when the Declare ML module command is issued. *) let cache_objs = ref String.Map.empty let declare_cache_obj f name = let objs = try String.Map.find name !cache_objs with Not_found -> [] in let objs = f :: objs in cache_objs := String.Map.add name objs !cache_objs let perform_cache_obj name = let objs = try String.Map.find name !cache_objs with Not_found -> [] in let objs = List.rev objs in List.iter (fun f -> f ()) objs (** ml object = ml module or plugin *) let init_ml_object mname = try String.Map.find mname !known_loaded_plugins () with Not_found -> () let load_ml_object mname ?path fname= let path = match path with | None -> dir_ml_load fname | Some p -> ml_load p in add_known_module mname (Some path); init_ml_object mname; path let add_known_module m = add_known_module m None (* Summary of declared ML Modules *) (* List and not String.Set because order is important: most recent first. *) let loaded_modules = ref [] let get_loaded_modules () = List.rev !loaded_modules let add_loaded_module md path = if not (List.mem_assoc md !loaded_modules) then loaded_modules := (md,path) :: !loaded_modules let reset_loaded_modules () = loaded_modules := [] let if_verbose_load verb f name ?path fname = if not verb then f name ?path fname else let info = str "[Loading ML file " ++ str fname ++ str " ..." in try let path = f name ?path fname in Feedback.msg_info (info ++ str " done]"); path with reraise -> Feedback.msg_info (info ++ str " failed]"); raise reraise (** Load a module for the first time (i.e. dynlink it) or simulate its reload (i.e. doing nothing except maybe an initialization function). *) let trigger_ml_object verb cache reinit ?path name = if module_is_known name then begin if reinit then init_ml_object name; add_loaded_module name (known_module_path name); if cache then perform_cache_obj name end else if not has_dynlink then user_err (str "Dynamic link not supported (module " ++ str name ++ str ").") else begin let file = file_of_name (Option.default name path) in let path = if_verbose_load (verb && not !Flags.quiet) load_ml_object name ?path file in add_loaded_module name (Some path); if cache then perform_cache_obj name end let unfreeze_ml_modules x = reset_loaded_modules (); List.iter (fun (name,path) -> trigger_ml_object false false false ?path name) x let _ = Summary.declare_ml_modules_summary { Summary.freeze_function = (fun ~marshallable -> get_loaded_modules ()); Summary.unfreeze_function = unfreeze_ml_modules; Summary.init_function = reset_loaded_modules } (* Liboject entries of declared ML Modules *) (* Digest of module used to compile the file *) type ml_module_digest = | NoDigest | AnyDigest of Digest.t (* digest of any used cma / cmxa *) type ml_module_object = { mlocal : Vernacexpr.locality_flag; mnames : (string * ml_module_digest) list } let add_module_digest m = if not has_dynlink then m, NoDigest else try let file = file_of_name m in let path, file = System.where_in_path ~warn:false !coq_mlpath_copy file in m, AnyDigest (Digest.file file) with | Not_found -> m, NoDigest let cache_ml_objects (_,{mnames=mnames}) = let iter (obj, _) = trigger_ml_object true true true obj in List.iter iter mnames let load_ml_objects _ (_,{mnames=mnames}) = let iter (obj, _) = trigger_ml_object true false true obj in List.iter iter mnames let classify_ml_objects ({mlocal=mlocal} as o) = if mlocal then Libobject.Dispose else Libobject.Substitute o let inMLModule : ml_module_object -> Libobject.obj = let open Libobject in declare_object {(default_object "ML-MODULE") with cache_function = cache_ml_objects; load_function = load_ml_objects; subst_function = (fun (_,o) -> o); classify_function = classify_ml_objects } let declare_ml_modules local l = let l = List.map mod_of_name l in let l = List.map add_module_digest l in Lib.add_anonymous_leaf ~cache_first:false (inMLModule {mlocal=local; mnames=l}) let print_ml_path () = let l = !coq_mlpath_copy in str"ML Load Path:" ++ fnl () ++ str" " ++ hv 0 (prlist_with_sep fnl str l) (* Printing of loaded ML modules *) let print_ml_modules () = let l = get_loaded_modules () in str"Loaded ML Modules: " ++ pr_vertical_list str (List.map fst l) let print_gc () = let stat = Gc.stat () in let msg = str "minor words: " ++ real stat.Gc.minor_words ++ fnl () ++ str "promoted words: " ++ real stat.Gc.promoted_words ++ fnl () ++ str "major words: " ++ real stat.Gc.major_words ++ fnl () ++ str "minor_collections: " ++ int stat.Gc.minor_collections ++ fnl () ++ str "major_collections: " ++ int stat.Gc.major_collections ++ fnl () ++ str "heap_words: " ++ int stat.Gc.heap_words ++ fnl () ++ str "heap_chunks: " ++ int stat.Gc.heap_chunks ++ fnl () ++ str "live_words: " ++ int stat.Gc.live_words ++ fnl () ++ str "live_blocks: " ++ int stat.Gc.live_blocks ++ fnl () ++ str "free_words: " ++ int stat.Gc.free_words ++ fnl () ++ str "free_blocks: " ++ int stat.Gc.free_blocks ++ fnl () ++ str "largest_free: " ++ int stat.Gc.largest_free ++ fnl () ++ str "fragments: " ++ int stat.Gc.fragments ++ fnl () ++ str "compactions: " ++ int stat.Gc.compactions ++ fnl () ++ str "top_heap_words: " ++ int stat.Gc.top_heap_words ++ fnl () ++ str "stack_size: " ++ int stat.Gc.stack_size in hv 0 msg coq-8.15.0/vernac/mltop.mli000066400000000000000000000051151417001151100154740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit; add_dir : string -> unit; ml_loop : unit -> unit } (** Sets and initializes a toplevel (if any) *) val set_top : toplevel -> unit (** Removes the toplevel (if any) *) val remove : unit -> unit (** Tests if an Ocaml toplevel runs under Coq *) val is_ocaml_top : unit -> bool (** Starts the Ocaml toplevel loop *) val ocaml_toploop : unit -> unit (** {5 ML Dynlink} *) (** Adds a dir to the plugin search path *) val add_ml_dir : string -> unit (** Tests if we can load ML files *) val has_dynlink : bool (** List of modules linked to the toplevel *) val add_known_module : string -> unit val module_is_known : string -> bool (** {5 Initialization functions} *) (** Declare a plugin and its initialization function. A plugin is just an ML module with an initialization function. Adding a known plugin implies adding it as a known ML module. The initialization function is granted to be called after Coq is fully bootstrapped, even if the plugin is statically linked with the toplevel *) val add_known_plugin : (unit -> unit) -> string -> unit (** Calls all initialization functions in a non-specified order *) val init_known_plugins : unit -> unit (** Register a callback that will be called when the module is declared with the Declare ML Module command. This is useful to define Coq objects at that time only. Several functions can be defined for one module; they will be called in the order of declaration, and after the ML module has been properly initialized. *) val declare_cache_obj : (unit -> unit) -> string -> unit (** {5 Declaring modules} *) val declare_ml_modules : Vernacexpr.locality_flag -> string list -> unit (** {5 Utilities} *) val print_ml_path : unit -> Pp.t val print_ml_modules : unit -> Pp.t val print_gc : unit -> Pp.t coq-8.15.0/vernac/opaques.ml000066400000000000000000000154621417001151100156530ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Future.force c | Joined v -> v (** Current table of opaque terms *) module Summary = struct type t = opaque_result Opaqueproof.HandleMap.t let state : t ref = ref Opaqueproof.HandleMap.empty let init () = state := Opaqueproof.HandleMap.empty let freeze ~marshallable = if marshallable then let iter _ pf = match pf with | OpaqueCertif cert -> begin match !cert with | Computation c -> cert := Joined (Future.force c) | Joined _ -> () end | OpaqueValue _ -> () in let () = Opaqueproof.HandleMap.iter iter !state in !state else !state let unfreeze s = state := s let join ?(except=Future.UUIDSet.empty) () = let iter i pf = match pf with | OpaqueValue _ -> () | OpaqueCertif cert -> match !cert with | Joined cert -> (* FIXME: in this case we lost the fix_exn wrapper. Does that matter? *) if not @@ Safe_typing.is_filled_opaque i (Global.safe_env ()) then Global.fill_opaque cert | Computation cert -> if Future.UUIDSet.mem (Future.uuid cert) except then () else if Safe_typing.is_filled_opaque i (Global.safe_env ()) then assert (Future.is_over cert) else (* Little belly dance to preserve the fix_exn wrapper around filling *) Future.force @@ Future.chain cert (fun cert -> Global.fill_opaque cert) in Opaqueproof.HandleMap.iter iter !state end type opaque_disk = Opaqueproof.opaque_proofterm option array let get_opaque_disk i t = let i = Opaqueproof.repr_handle i in let () = assert (0 <= i && i < Array.length t) in t.(i) let set_opaque_disk i (c, priv) t = let i = Opaqueproof.repr_handle i in let () = assert (0 <= i && i < Array.length t) in let () = assert (Option.is_empty t.(i)) in let c = Constr.hcons c in t.(i) <- Some (c, priv) let current_opaques = Summary.state let declare_defined_opaque ?feedback_id i (body : Safe_typing.private_constants const_entry_body) = (* Note that the environment in which the variable is checked it the one when the thunk is evaluated, not the one where this function is called. It does not matter because the former must be an extension of the latter or otherwise the call to Safe_typing would throw an anomaly. *) let proof = Future.chain body begin fun (body, eff) -> let cert = Safe_typing.check_opaque (Global.safe_env ()) i (body, eff) in let () = Option.iter (fun id -> Feedback.feedback ~id Feedback.Complete) feedback_id in cert end in (* If the proof is already computed we fill it eagerly *) let () = match Future.peek_val proof with | None -> () | Some cert -> Global.fill_opaque cert in let proof = OpaqueCertif (ref (Computation proof)) in let () = assert (not @@ Opaqueproof.HandleMap.mem i !current_opaques) in current_opaques := Opaqueproof.HandleMap.add i proof !current_opaques let declare_private_opaque opaque = let (i, pf) = Safe_typing.repr_exported_opaque opaque in (* Joining was already done at private declaration time *) let proof = OpaqueValue pf in let () = assert (not @@ Opaqueproof.HandleMap.mem i !current_opaques) in current_opaques := Opaqueproof.HandleMap.add i proof !current_opaques let get_current_opaque i = try let pf = Opaqueproof.HandleMap.find i !current_opaques in match pf with | OpaqueValue pf -> Some pf | OpaqueCertif cert -> let c, ctx = Safe_typing.repr_certificate (force cert) in let ctx = match ctx with | Opaqueproof.PrivateMonomorphic _ -> Opaqueproof.PrivateMonomorphic () | Opaqueproof.PrivatePolymorphic _ as ctx -> ctx in Some (c, ctx) with Not_found -> None let get_current_constraints i = try let pf = Opaqueproof.HandleMap.find i !current_opaques in match pf with | OpaqueValue _ -> None | OpaqueCertif cert -> let _, ctx = Safe_typing.repr_certificate (force cert) in let ctx = match ctx with | Opaqueproof.PrivateMonomorphic ctx -> ctx | Opaqueproof.PrivatePolymorphic _ -> Univ.ContextSet.empty in Some ctx with Not_found -> None let dump ?(except=Future.UUIDSet.empty) () = let n = if Opaqueproof.HandleMap.is_empty !current_opaques then 0 else (Opaqueproof.repr_handle @@ fst @@ Opaqueproof.HandleMap.max_binding !current_opaques) + 1 in let opaque_table = Array.make n None in let fold h cu f2t_map = match cu with | OpaqueValue p -> let i = Opaqueproof.repr_handle h in let () = opaque_table.(i) <- Some p in f2t_map | OpaqueCertif c -> let i = Opaqueproof.repr_handle h in let f2t_map, proof = match !c with | Computation cert -> let uid = Future.uuid cert in let f2t_map = Future.UUIDMap.add uid h f2t_map in let c = Future.peek_val cert in let () = if Option.is_empty c && (not @@ Future.UUIDSet.mem uid except) then CErrors.anomaly Pp.(str"Proof object "++int i++str" is not checked nor to be checked") in f2t_map, c | Joined cert -> f2t_map, Some cert in let c = match proof with | None -> None | Some cert -> let (c, priv) = Safe_typing.repr_certificate cert in let priv = match priv with | Opaqueproof.PrivateMonomorphic _ -> Opaqueproof.PrivateMonomorphic () | Opaqueproof.PrivatePolymorphic _ as p -> p in Some (c, priv) in let () = opaque_table.(i) <- c in f2t_map in let f2t_map = Opaqueproof.HandleMap.fold fold !current_opaques Future.UUIDMap.empty in (opaque_table, f2t_map) coq-8.15.0/vernac/opaques.mli000066400000000000000000000031451417001151100160170ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* anomaly (Pp.str "Context is used only internally.") let pr_params pr_c (xl,(c,t)) = hov 2 (prlist_with_sep sep pr_lident xl ++ spc() ++ (if c then str":>" else str":" ++ spc() ++ pr_c t)) let rec factorize = function | [] -> [] | (c,(idl,t))::l -> match factorize l with | (xl,((c', t') as r))::l' when (c : bool) == c' && (=) t t' -> (* FIXME: we need equality on constr_expr *) (idl@xl,r)::l' | l' -> (idl,(c,t))::l' let pr_ne_params_list pr_c l = match factorize l with | [p] -> pr_params pr_c p | l -> prlist_with_sep spc (fun p -> hov 1 (str "(" ++ pr_params pr_c p ++ str ")")) l (* prlist_with_sep pr_semicolon (pr_params pr_c) *) let pr_thm_token k = keyword (string_of_theorem_kind k) let pr_syntax_modifier = let open Gramlib.Gramext in CAst.with_val (function | SetItemLevel (l,bko,n) -> prlist_with_sep sep_v2 str l ++ spc () ++ pr_at_level n ++ pr_opt pr_constr_as_binder_kind bko | SetItemScope (l,s) -> prlist_with_sep sep_v2 str l ++ spc () ++ str"in scope" ++ str s | SetLevel n -> pr_at_level (NumLevel n) | SetCustomEntry (s,n) -> keyword "in" ++ spc() ++ keyword "custom" ++ spc() ++ str s ++ (match n with None -> mt () | Some n -> pr_at_level (NumLevel n)) | SetAssoc LeftA -> keyword "left associativity" | SetAssoc RightA -> keyword "right associativity" | SetAssoc NonA -> keyword "no associativity" | SetEntryType (x,typ) -> str x ++ spc() ++ pr_set_simple_entry_type typ | SetOnlyPrinting -> keyword "only printing" | SetOnlyParsing -> keyword "only parsing" | SetFormat (TextFormat s) -> keyword "format " ++ pr_ast qs s | SetFormat (ExtraFormat (k,s)) -> keyword "format " ++ qs k ++ spc() ++ pr_ast qs s) let pr_syntax_modifiers = function | [] -> mt() | l -> spc() ++ hov 1 (str"(" ++ prlist_with_sep sep_v2 pr_syntax_modifier l ++ str")") let pr_decl_notation prc decl_ntn = let open Vernacexpr in let { decl_ntn_string = {CAst.loc;v=ntn}; decl_ntn_interp = c; decl_ntn_modifiers = modifiers; decl_ntn_scope = scopt } = decl_ntn in fnl () ++ keyword "where " ++ qs ntn ++ str " := " ++ Flags.without_option Flags.beautify prc c ++ pr_syntax_modifiers modifiers ++ pr_opt (fun sc -> str ": " ++ str sc) scopt let pr_rec_definition { fname; univs; rec_order; binders; rtype; body_def; notations } = let pr_pure_lconstr c = Flags.without_option Flags.beautify pr_lconstr c in let annot = pr_guard_annot pr_lconstr_expr binders rec_order in pr_ident_decl (fname,univs) ++ pr_binders_arg binders ++ annot ++ pr_type_option (fun c -> spc() ++ pr_lconstr_expr c) rtype ++ pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_pure_lconstr def) body_def ++ prlist (pr_decl_notation @@ pr_constr) notations let pr_statement head (idpl,(bl,c)) = hov 2 (head ++ spc() ++ pr_ident_decl idpl ++ spc() ++ (match bl with [] -> mt() | _ -> pr_binders bl ++ spc()) ++ str":" ++ pr_spc_lconstr c) (**************************************) (* Pretty printer for vernac commands *) (**************************************) let pr_constrarg c = spc () ++ pr_constr c let pr_lconstrarg c = spc () ++ pr_lconstr c let pr_intarg n = spc () ++ int n let pr_oc = function | NoInstance -> str" :" | BackInstance -> str" :>" let pr_record_field (x, { rf_subclass = oc ; rf_priority = pri ; rf_notation = ntn }) = let prx = match x with | AssumExpr (id,binders,t) -> hov 1 (pr_lname id ++ pr_binders_arg binders ++ spc() ++ pr_oc oc ++ spc() ++ pr_lconstr_expr t) | DefExpr(id,binders,b,opt) -> (match opt with | Some t -> hov 1 (pr_lname id ++ pr_binders_arg binders ++ spc() ++ pr_oc oc ++ spc() ++ pr_lconstr_expr t ++ str" :=" ++ pr_lconstr b) | None -> hov 1 (pr_lname id ++ str" :=" ++ spc() ++ pr_lconstr b)) in let prpri = match pri with None -> mt() | Some i -> str "| " ++ int i in prx ++ prpri ++ prlist (pr_decl_notation @@ pr_constr) ntn let pr_record_decl c fs = pr_opt pr_lident c ++ pr_record "{" "}" pr_record_field fs let pr_printable = function | PrintFullContext -> keyword "Print All" | PrintSectionContext s -> keyword "Print Section" ++ spc() ++ Libnames.pr_qualid s | PrintGrammar ent -> keyword "Print Grammar" ++ spc() ++ str ent | PrintCustomGrammar ent -> keyword "Print Custom Grammar" ++ spc() ++ str ent | PrintLoadPath dir -> keyword "Print LoadPath" ++ pr_opt DirPath.print dir | PrintLibraries -> keyword "Print Libraries" | PrintMLLoadPath -> keyword "Print ML Path" | PrintMLModules -> keyword "Print ML Modules" | PrintDebugGC -> keyword "Print ML GC" | PrintGraph -> keyword "Print Graph" | PrintClasses -> keyword "Print Classes" | PrintTypeClasses -> keyword "Print TypeClasses" | PrintInstances qid -> keyword "Print Instances" ++ spc () ++ pr_smart_global qid | PrintCoercions -> keyword "Print Coercions" | PrintCoercionPaths (s,t) -> keyword "Print Coercion Paths" ++ spc() ++ pr_class_rawexpr s ++ spc() ++ pr_class_rawexpr t | PrintCanonicalConversions qids -> keyword "Print Canonical Structures" ++ prlist pr_smart_global qids | PrintTypingFlags -> keyword "Print Typing Flags" | PrintTables -> keyword "Print Tables" | PrintHintGoal -> keyword "Print Hint" | PrintHint qid -> keyword "Print Hint" ++ spc () ++ pr_smart_global qid | PrintHintDb -> keyword "Print Hint *" | PrintHintDbName s -> keyword "Print HintDb" ++ spc () ++ str s | PrintUniverses (b, g, fopt) -> let cmd = if b then "Print Sorted Universes" else "Print Universes" in let pr_subgraph = prlist_with_sep spc pr_qualid in keyword cmd ++ pr_opt pr_subgraph g ++ pr_opt str fopt | PrintName (qid,udecl) -> keyword "Print" ++ spc() ++ pr_smart_global qid ++ pr_univ_name_list udecl | PrintModuleType qid -> keyword "Print Module Type" ++ spc() ++ pr_qualid qid | PrintModule qid -> keyword "Print Module" ++ spc() ++ pr_qualid qid | PrintInspect n -> keyword "Inspect" ++ spc() ++ int n | PrintScopes -> keyword "Print Scopes" | PrintScope s -> keyword "Print Scope" ++ spc() ++ str s | PrintVisibility s -> keyword "Print Visibility" ++ pr_opt str s | PrintAbout (qid,l,gopt) -> pr_opt (fun g -> Goal_select.pr_goal_selector g ++ str ":"++ spc()) gopt ++ keyword "About" ++ spc() ++ pr_smart_global qid ++ pr_univ_name_list l | PrintImplicit qid -> keyword "Print Implicit" ++ spc() ++ pr_smart_global qid (* spiwack: command printing all the axioms and section variables used in a term *) | PrintAssumptions (b, t, qid) -> let cmd = match b, t with | true, true -> "Print All Dependencies" | true, false -> "Print Opaque Dependencies" | false, true -> "Print Transparent Dependencies" | false, false -> "Print Assumptions" in keyword cmd ++ spc() ++ pr_smart_global qid | PrintNamespace dp -> keyword "Print Namespace" ++ DirPath.print dp | PrintStrategy None -> keyword "Print Strategies" | PrintStrategy (Some qid) -> keyword "Print Strategy" ++ pr_smart_global qid | PrintRegistered -> keyword "Print Registered" let pr_using e = let rec aux = function | SsEmpty -> "()" | SsType -> "(Type)" | SsSingl { v=id } -> "("^Id.to_string id^")" | SsCompl e -> "-" ^ aux e^"" | SsUnion(e1,e2) -> "("^aux e1 ^" + "^ aux e2^")" | SsSubstr(e1,e2) -> "("^aux e1 ^" - "^ aux e2^")" | SsFwdClose e -> "("^aux e^")*" in Pp.str (aux e) let pr_extend s cl = let pr_arg a = try pr_gen a with Failure _ -> str "" in try let rl = Egramml.get_extend_vernac_rule s in let rec aux rl cl = match rl, cl with | Egramml.GramNonTerminal _ :: rl, arg :: cl -> pr_arg arg :: aux rl cl | Egramml.GramTerminal s :: rl, cl -> str s :: aux rl cl | [], [] -> [] | _ -> assert false in hov 1 (pr_sequence identity (aux rl cl)) with Not_found -> hov 1 (str "TODO(" ++ str (fst s) ++ spc () ++ prlist_with_sep sep pr_arg cl ++ str ")") let pr_vernac_expr v = let return = tag_vernac v in match v with | VernacLoad (f,s) -> return ( keyword "Load" ++ if f then (spc() ++ keyword "Verbose" ++ spc()) else spc() ++ qs s ) (* Proof management *) | VernacAbortAll -> return (keyword "Abort All") | VernacRestart -> return (keyword "Restart") | VernacUnfocus -> return (keyword "Unfocus") | VernacUnfocused -> return (keyword "Unfocused") | VernacAbort id -> return (keyword "Abort" ++ pr_opt pr_lident id) | VernacUndo i -> return ( if Int.equal i 1 then keyword "Undo" else keyword "Undo" ++ pr_intarg i ) | VernacUndoTo i -> return (keyword "Undo" ++ spc() ++ keyword "To" ++ pr_intarg i) | VernacFocus i -> return (keyword "Focus" ++ pr_opt int i) | VernacShow s -> let pr_goal_reference = function | OpenSubgoals -> mt () | NthGoal n -> spc () ++ int n | GoalId id -> spc () ++ pr_id id in let pr_showable = function | ShowGoal n -> keyword "Show" ++ pr_goal_reference n | ShowProof -> keyword "Show Proof" | ShowExistentials -> keyword "Show Existentials" | ShowUniverses -> keyword "Show Universes" | ShowProofNames -> keyword "Show Conjectures" | ShowIntros b -> keyword "Show " ++ (if b then keyword "Intros" else keyword "Intro") | ShowMatch id -> keyword "Show Match " ++ pr_qualid id in return (pr_showable s) | VernacCheckGuard -> return (keyword "Guarded") (* Resetting *) | VernacResetName id -> return (keyword "Reset" ++ spc() ++ pr_lident id) | VernacResetInitial -> return (keyword "Reset Initial") | VernacBack i -> return ( if Int.equal i 1 then keyword "Back" else keyword "Back" ++ pr_intarg i ) (* Syntax *) | VernacOpenCloseScope (opening,sc) -> return ( keyword (if opening then "Open " else "Close ") ++ keyword "Scope" ++ spc() ++ str sc ) | VernacDeclareScope sc -> return ( keyword "Declare Scope" ++ spc () ++ str sc ) | VernacDelimiters (sc,Some key) -> return ( keyword "Delimit Scope" ++ spc () ++ str sc ++ spc() ++ keyword "with" ++ spc () ++ str key ) | VernacDelimiters (sc, None) -> return ( keyword "Undelimit Scope" ++ spc () ++ str sc ) | VernacBindScope (sc,cll) -> return ( keyword "Bind Scope" ++ spc () ++ str sc ++ spc() ++ keyword "with" ++ spc () ++ prlist_with_sep spc pr_class_rawexpr cll ) | VernacNotation (infix,c,({v=s},l),opt) -> return ( hov 2 (hov 0 (keyword (if infix then "Infix" else "Notation") ++ spc() ++ qs s ++ str " :=" ++ Flags.without_option Flags.beautify pr_constrarg c) ++ pr_syntax_modifiers l ++ (match opt with | None -> mt() | Some sc -> spc() ++ str":" ++ spc() ++ str sc)) ) | VernacReservedNotation (_, (s, l)) -> return ( keyword "Reserved Notation" ++ spc() ++ pr_ast qs s ++ pr_syntax_modifiers l ) | VernacNotationAddFormat(s,k,v) -> return ( keyword "Format Notation " ++ qs s ++ spc () ++ qs k ++ spc() ++ qs v ) | VernacDeclareCustomEntry s -> return ( keyword "Declare Custom Entry " ++ str s ) (* Gallina *) | VernacDefinition ((discharge,kind),id,b) -> (* A verifier... *) let pr_def_token dk = keyword ( if Name.is_anonymous (fst id).v then "Goal" else string_of_definition_object_kind dk) in let pr_reduce = function | None -> mt() | Some r -> keyword "Eval" ++ spc() ++ pr_red_expr r ++ keyword " in" ++ spc() in let pr_def_body = function | DefineBody (bl,red,body,d) -> let ty = match d with | None -> mt() | Some ty -> spc() ++ str":" ++ pr_spc_lconstr ty in (pr_binders_arg bl,ty,Some (pr_reduce red ++ pr_lconstr body)) | ProveBody (bl,t) -> let typ u = if (fst id).v = Anonymous then (assert (bl = []); u) else (str" :" ++ u) in (pr_binders_arg bl, typ (pr_spc_lconstr t), None) in let (binds,typ,c) = pr_def_body b in return ( hov 2 ( pr_def_token kind ++ spc() ++ pr_lname_decl id ++ binds ++ typ ++ (match c with | None -> mt() | Some cc -> str" :=" ++ spc() ++ cc)) ) | VernacStartTheoremProof (ki,l) -> return ( hov 1 (pr_statement (pr_thm_token ki) (List.hd l) ++ prlist (pr_statement (spc () ++ keyword "with")) (List.tl l)) ) | VernacEndProof Admitted -> return (keyword "Admitted") | VernacEndProof (Proved (opac,o)) -> return ( match o with | None -> (match opac with | Transparent -> keyword "Defined" | Opaque -> keyword "Qed") | Some id -> (if opac <> Transparent then keyword "Save" else keyword "Defined") ++ spc() ++ pr_lident id ) | VernacExactProof c -> return (hov 2 (keyword "Proof" ++ pr_lconstrarg c)) | VernacAssumption ((discharge,kind),t,l) -> let n = List.length (List.flatten (List.map fst (List.map snd l))) in let pr_params (c, (xl, t)) = hov 2 (prlist_with_sep sep pr_ident_decl xl ++ spc() ++ (if c then str":>" else str":" ++ spc() ++ pr_lconstr_expr t)) in let assumptions = prlist_with_sep spc (fun p -> hov 1 (str "(" ++ pr_params p ++ str ")")) l in return (hov 2 (pr_assumption_token (n > 1) discharge kind ++ pr_non_empty_arg pr_assumption_inline t ++ spc() ++ assumptions)) | VernacInductive (f,l) -> let pr_constructor (coe,(id,c)) = hov 2 (pr_lident id ++ str" " ++ (if coe then str":>" else str":") ++ Flags.without_option Flags.beautify pr_spc_lconstr c) in let pr_constructor_list l = match l with | Constructors [] -> mt() | Constructors l -> let fst_sep = match l with [_] -> " " | _ -> " | " in pr_com_at (begin_of_inductive l) ++ fnl() ++ str fst_sep ++ prlist_with_sep (fun _ -> fnl() ++ str" | ") pr_constructor l | RecordDecl (c,fs) -> pr_record_decl c fs in let pr_oneind key (((coe,iddecl),(indupar,indpar),s,lc),ntn) = hov 0 ( str key ++ spc() ++ (if coe then str"> " else str"") ++ pr_cumul_ident_decl iddecl ++ pr_and_type_binders_arg indupar ++ pr_opt (fun p -> str "|" ++ spc() ++ pr_and_type_binders_arg p) indpar ++ pr_opt (fun s -> str":" ++ spc() ++ pr_lconstr_expr s) s ++ str" :=") ++ pr_constructor_list lc ++ prlist (pr_decl_notation @@ pr_constr) ntn in let kind = match f with | Record -> "Record" | Structure -> "Structure" | Inductive_kw -> "Inductive" | CoInductive -> "CoInductive" | Class _ -> "Class" | Variant -> "Variant" in return ( hov 1 (pr_oneind kind (List.hd l)) ++ (prlist (fun ind -> fnl() ++ hov 1 (pr_oneind "with" ind)) (List.tl l)) ) | VernacFixpoint (local, recs) -> let local = match local with | DoDischarge -> "Let " | NoDischarge -> "" in return ( hov 0 (str local ++ keyword "Fixpoint" ++ spc () ++ prlist_with_sep (fun _ -> fnl () ++ keyword "with" ++ spc ()) pr_rec_definition recs) ) | VernacCoFixpoint (local, corecs) -> let local = match local with | DoDischarge -> keyword "Let" ++ spc () | NoDischarge -> str "" in let pr_onecorec {fname; univs; binders; rtype; body_def; notations } = pr_ident_decl (fname,univs) ++ spc() ++ pr_binders binders ++ spc() ++ str":" ++ spc() ++ pr_lconstr_expr rtype ++ pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_lconstr def) body_def ++ prlist (pr_decl_notation @@ pr_constr) notations in return ( hov 0 (local ++ keyword "CoFixpoint" ++ spc() ++ prlist_with_sep (fun _ -> fnl() ++ keyword "with" ++ spc ()) pr_onecorec corecs) ) | VernacScheme l -> return ( hov 2 (keyword "Scheme" ++ spc() ++ prlist_with_sep (fun _ -> fnl() ++ keyword "with" ++ spc ()) pr_onescheme l) ) | VernacCombinedScheme (id, l) -> return ( hov 2 (keyword "Combined Scheme" ++ spc() ++ pr_lident id ++ spc() ++ keyword "from" ++ spc() ++ prlist_with_sep (fun _ -> fnl() ++ str", ") pr_lident l) ) | VernacUniverse v -> return ( hov 2 (keyword "Universe" ++ spc () ++ prlist_with_sep (fun _ -> str",") pr_lident v) ) | VernacConstraint v -> return ( hov 2 (keyword "Constraint" ++ spc () ++ prlist_with_sep (fun _ -> str",") pr_uconstraint v) ) (* Gallina extensions *) | VernacBeginSection id -> return (hov 2 (keyword "Section" ++ spc () ++ pr_lident id)) | VernacEndSegment id -> return (hov 2 (keyword "End" ++ spc() ++ pr_lident id)) | VernacNameSectionHypSet (id,set) -> return (hov 2 (keyword "Package" ++ spc() ++ pr_lident id ++ spc()++ str ":="++spc()++pr_using set)) | VernacRequire (from, exp, l) -> let from = match from with | None -> mt () | Some r -> keyword "From" ++ spc () ++ pr_module r ++ spc () in return ( hov 2 (from ++ keyword "Require" ++ spc() ++ pr_require_token exp ++ prlist_with_sep sep pr_module l) ) | VernacImport (f,cats,l) -> return ( (if f then keyword "Export" else keyword "Import") ++ pr_import_cats cats ++ spc() ++ prlist_with_sep sep pr_import_module l ) | VernacCanonical q -> return ( keyword "Canonical Structure" ++ spc() ++ pr_smart_global q ) | VernacCoercion (id,c1,c2) -> return ( hov 1 ( keyword "Coercion" ++ spc() ++ pr_smart_global id ++ spc() ++ str":" ++ spc() ++ pr_class_rawexpr c1 ++ spc() ++ str">->" ++ spc() ++ pr_class_rawexpr c2) ) | VernacIdentityCoercion (id,c1,c2) -> return ( hov 1 ( keyword "Identity Coercion" ++ spc() ++ pr_lident id ++ spc() ++ str":" ++ spc() ++ pr_class_rawexpr c1 ++ spc() ++ str">->" ++ spc() ++ pr_class_rawexpr c2) ) | VernacInstance (instid, sup, cl, props, info) -> return ( hov 1 ( keyword "Instance" ++ (match instid with | {loc; v = Name id}, l -> spc () ++ pr_ident_decl (CAst.(make ?loc id),l) ++ spc () | { v = Anonymous }, _ -> mt ()) ++ pr_and_type_binders_arg sup ++ str":" ++ spc () ++ pr_constr cl ++ pr_hint_info pr_constr_pattern_expr info ++ (match props with | Some (true, { v = CRecord l}) -> spc () ++ str":=" ++ spc () ++ pr_record_body "{" "}" pr_lconstr l | Some (true,_) -> assert false | Some (false,p) -> spc () ++ str":=" ++ spc () ++ pr_constr p | None -> mt())) ) | VernacDeclareInstance (instid, sup, cl, info) -> return ( hov 1 ( keyword "Declare Instance" ++ spc () ++ pr_ident_decl instid ++ spc () ++ pr_and_type_binders_arg sup ++ str":" ++ spc () ++ pr_constr cl ++ pr_hint_info pr_constr_pattern_expr info) ) | VernacContext l -> return ( hov 1 ( keyword "Context" ++ pr_and_type_binders_arg l) ) | VernacExistingInstance insts -> let pr_inst (id, info) = pr_qualid id ++ pr_hint_info pr_constr_pattern_expr info in return ( hov 1 (keyword "Existing" ++ spc () ++ keyword(String.plural (List.length insts) "Instance") ++ spc () ++ prlist_with_sep spc pr_inst insts) ) | VernacExistingClass id -> return ( hov 1 (keyword "Existing" ++ spc () ++ keyword "Class" ++ spc () ++ pr_qualid id) ) (* Modules and Module Types *) | VernacDefineModule (export,m,bl,tys,bd) -> let b = pr_module_binders bl pr_lconstr in return ( hov 2 (keyword "Module" ++ spc() ++ pr_require_token export ++ pr_lident m ++ b ++ pr_of_module_type pr_lconstr tys ++ (if List.is_empty bd then mt () else str ":= ") ++ prlist_with_sep (fun () -> str " <+") (pr_module_ast_inl true pr_lconstr) bd) ) | VernacDeclareModule (export,id,bl,m1) -> let b = pr_module_binders bl pr_lconstr in return ( hov 2 (keyword "Declare Module" ++ spc() ++ pr_require_token export ++ pr_lident id ++ b ++ str " :" ++ pr_module_ast_inl true pr_lconstr m1) ) | VernacDeclareModuleType (id,bl,tyl,m) -> let b = pr_module_binders bl pr_lconstr in let pr_mt = pr_module_ast_inl true pr_lconstr in return ( hov 2 (keyword "Module Type " ++ pr_lident id ++ b ++ prlist_strict (fun m -> str " <:" ++ pr_mt m) tyl ++ (if List.is_empty m then mt () else str ":= ") ++ prlist_with_sep (fun () -> str " <+ ") pr_mt m) ) | VernacInclude (mexprs) -> let pr_m = pr_module_ast_inl false pr_lconstr in return ( hov 2 (keyword "Include" ++ spc() ++ prlist_with_sep (fun () -> str " <+ ") pr_m mexprs) ) (* Auxiliary file and library management *) | VernacAddLoadPath { implicit; physical_path; logical_path } -> return ( hov 2 (keyword "Add" ++ (if implicit then spc () ++ keyword "Rec" ++ spc () else spc()) ++ keyword "LoadPath" ++ spc() ++ qs physical_path ++ spc() ++ keyword "as" ++ spc() ++ DirPath.print logical_path)) | VernacRemoveLoadPath s -> return (keyword "Remove LoadPath" ++ qs s) | VernacAddMLPath (s) -> return ( keyword "Add" ++ keyword "ML Path" ++ qs s ) | VernacDeclareMLModule (l) -> return ( hov 2 (keyword "Declare ML Module" ++ spc() ++ prlist_with_sep sep qs l) ) | VernacChdir s -> return (keyword "Cd" ++ pr_opt qs s) (* Commands *) | VernacCreateHintDb (dbname,b) -> return ( hov 1 (keyword "Create HintDb" ++ spc () ++ str dbname ++ (if b then str" discriminated" else mt ())) ) | VernacRemoveHints (dbnames, ids) -> return ( hov 1 (keyword "Remove Hints" ++ spc () ++ prlist_with_sep spc (fun r -> pr_id (coerce_reference_to_id r)) ids ++ pr_opt_hintbases dbnames) ) | VernacHints (dbnames,h) -> return (pr_hints dbnames h pr_constr pr_constr_pattern_expr) | VernacSyntacticDefinition (id,(ids,c),l) -> return ( hov 2 (keyword "Notation" ++ spc () ++ pr_lident id ++ spc () ++ prlist_with_sep spc pr_id ids ++ str":=" ++ pr_constrarg c ++ pr_syntax_modifiers l) ) | VernacArguments (q, args, more_implicits, mods) -> return ( hov 2 ( keyword "Arguments" ++ spc() ++ pr_smart_global q ++ let pr_s = function None -> str"" | Some {v=s} -> str "%" ++ str s in let pr_if b x = if b then x else str "" in let pr_one_arg (x,k) = pr_if k (str"!") ++ Name.print x in let pr_br imp force x = let left,right = match imp with | Glob_term.NonMaxImplicit -> str "[", str "]" | Glob_term.MaxImplicit -> str "{", str "}" | Glob_term.Explicit -> if force then str"(",str")" else mt(),mt() in left ++ x ++ right in let get_arguments_like s imp tl = if s = None && imp = Glob_term.Explicit then [], tl else let rec fold extra = function | RealArg arg :: tl when Option.equal (fun a b -> String.equal a.CAst.v b.CAst.v) arg.notation_scope s && arg.implicit_status = imp -> fold ((arg.name,arg.recarg_like) :: extra) tl | args -> List.rev extra, args in fold [] tl in let rec print_arguments = function | [] -> mt() | VolatileArg :: l -> spc () ++ str"/" ++ print_arguments l | BidiArg :: l -> spc () ++ str"&" ++ print_arguments l | RealArg { name = id; recarg_like = k; notation_scope = s; implicit_status = imp } :: tl -> let extra, tl = get_arguments_like s imp tl in spc() ++ hov 1 (pr_br imp (extra<>[]) (prlist_with_sep spc pr_one_arg ((id,k)::extra)) ++ pr_s s) ++ print_arguments tl in let rec print_implicits = function | [] -> mt () | (name, impl) :: rest -> spc() ++ pr_br impl false (Name.print name) ++ print_implicits rest in print_arguments args ++ if not (List.is_empty more_implicits) then prlist (fun l -> str"," ++ print_implicits l) more_implicits else (mt ()) ++ (if not (List.is_empty mods) then str" : " else str"") ++ prlist_with_sep (fun () -> str", " ++ spc()) (function | `ReductionDontExposeCase -> keyword "simpl nomatch" | `ReductionNeverUnfold -> keyword "simpl never" | `DefaultImplicits -> keyword "default implicits" | `Rename -> keyword "rename" | `Assert -> keyword "assert" | `ExtraScopes -> keyword "extra scopes" | `ClearImplicits -> keyword "clear implicits" | `ClearScopes -> keyword "clear scopes" | `ClearBidiHint -> keyword "clear bidirectionality hint") mods) ) | VernacReserve bl -> let n = List.length (List.flatten (List.map fst bl)) in return ( hov 2 (tag_keyword (str"Implicit Type" ++ str (if n > 1 then "s " else " ")) ++ pr_ne_params_list pr_lconstr_expr (List.map (fun sb -> false,sb) bl)) ) | VernacGeneralizable g -> return ( hov 1 (tag_keyword ( str"Generalizable Variable" ++ match g with | None -> str "s none" | Some [] -> str "s all" | Some idl -> str (if List.length idl > 1 then "s " else " ") ++ prlist_with_sep spc pr_lident idl) )) | VernacSetOpacity(k,l) when Conv_oracle.is_transparent k -> return ( hov 1 (keyword "Transparent" ++ spc() ++ prlist_with_sep sep pr_smart_global l) ) | VernacSetOpacity(Conv_oracle.Opaque,l) -> return ( hov 1 (keyword "Opaque" ++ spc() ++ prlist_with_sep sep pr_smart_global l) ) | VernacSetOpacity _ -> return ( CErrors.anomaly (keyword "VernacSetOpacity used to set something else.") ) | VernacSetStrategy l -> let pr_lev = function | Conv_oracle.Opaque -> keyword "opaque" | Conv_oracle.Expand -> keyword "expand" | l when Conv_oracle.is_transparent l -> keyword "transparent" | Conv_oracle.Level n -> int n in let pr_line (l,q) = hov 2 (pr_lev l ++ spc() ++ str"[" ++ prlist_with_sep sep pr_smart_global q ++ str"]") in return ( hov 1 (keyword "Strategy" ++ spc() ++ hv 0 (prlist_with_sep sep pr_line l)) ) | VernacSetOption (export, na,v) -> let export = if export then keyword "Export" ++ spc () else mt () in let set = if v == OptionUnset then "Unset" else "Set" in return ( hov 2 (export ++ keyword set ++ spc() ++ pr_set_option na v) ) | VernacAddOption (na,l) -> return ( hov 2 (keyword "Add" ++ spc() ++ pr_printoption na (Some l)) ) | VernacRemoveOption (na,l) -> return ( hov 2 (keyword "Remove" ++ spc() ++ pr_printoption na (Some l)) ) | VernacMemOption (na,l) -> return ( hov 2 (keyword "Test" ++ spc() ++ pr_printoption na (Some l)) ) | VernacPrintOption na -> return ( hov 2 (keyword "Test" ++ spc() ++ pr_printoption na None) ) | VernacCheckMayEval (r,io,c) -> let pr_mayeval r c = match r with | Some r0 -> hov 2 (keyword "Eval" ++ spc() ++ pr_red_expr r0 ++ spc() ++ keyword "in" ++ spc () ++ pr_lconstr c) | None -> hov 2 (keyword "Check" ++ spc() ++ pr_lconstr c) in let pr_i = match io with None -> mt () | Some i -> Goal_select.pr_goal_selector i ++ str ": " in return (pr_i ++ pr_mayeval r c) | VernacGlobalCheck c -> return (hov 2 (keyword "Type" ++ pr_constrarg c)) | VernacDeclareReduction (s,r) -> return ( keyword "Declare Reduction" ++ spc () ++ str s ++ str " := " ++ pr_red_expr r ) | VernacPrint p -> return (pr_printable p) | VernacSearch (sea,g,sea_r) -> return (pr_search sea g sea_r @@ pr_constr_pattern_expr) | VernacLocate loc -> let pr_locate =function | LocateAny qid -> pr_smart_global qid | LocateTerm qid -> keyword "Term" ++ spc() ++ pr_smart_global qid | LocateFile f -> keyword "File" ++ spc() ++ qs f | LocateLibrary qid -> keyword "Library" ++ spc () ++ pr_module qid | LocateModule qid -> keyword "Module" ++ spc () ++ pr_module qid | LocateOther (s, qid) -> keyword s ++ spc () ++ pr_ltac_ref qid in return (keyword "Locate" ++ spc() ++ pr_locate loc) | VernacRegister (qid, RegisterCoqlib name) -> return ( hov 2 (keyword "Register" ++ spc() ++ pr_qualid qid ++ spc () ++ str "as" ++ spc () ++ pr_qualid name) ) | VernacRegister (qid, RegisterInline) -> return ( hov 2 (keyword "Register Inline" ++ spc() ++ pr_qualid qid) ) | VernacPrimitive(id,r,typopt) -> hov 2 (keyword "Primitive" ++ spc() ++ pr_ident_decl id ++ (Option.cata (fun ty -> spc() ++ str":" ++ pr_spc_lconstr ty) (mt()) typopt) ++ spc() ++ str ":=" ++ spc() ++ str (CPrimitives.op_or_type_to_string r)) | VernacComments l -> return ( hov 2 (keyword "Comments" ++ spc() ++ prlist_with_sep sep (pr_comment pr_constr) l) ) (* For extension *) | VernacExtend (s,c) -> return (pr_extend s c) | VernacProof (None, None) -> return (keyword "Proof") | VernacProof (None, Some e) -> return (keyword "Proof " ++ spc () ++ keyword "using" ++ spc() ++ pr_using e) | VernacProof (Some te, None) -> return (keyword "Proof with" ++ spc() ++ pr_gen te) | VernacProof (Some te, Some e) -> return ( keyword "Proof" ++ spc () ++ keyword "using" ++ spc() ++ pr_using e ++ spc() ++ keyword "with" ++ spc() ++ pr_gen te ) | VernacProofMode s -> return (keyword "Proof Mode" ++ str s) | VernacBullet b -> (* XXX: Redundant with Proof_bullet.print *) return (let open Proof_bullet in begin match b with | Dash n -> str (String.make n '-') | Star n -> str (String.make n '*') | Plus n -> str (String.make n '+') end) | VernacSubproof None -> return (str "{") | VernacSubproof (Some i) -> return (Goal_select.pr_goal_selector i ++ str ":" ++ spc () ++ str "{") | VernacEndSubproof -> return (str "}") let pr_control_flag (p : control_flag) = let w = match p with | ControlTime _ -> keyword "Time" | ControlRedirect s -> keyword "Redirect" ++ spc() ++ qs s | ControlTimeout n -> keyword "Timeout " ++ int n | ControlFail -> keyword "Fail" | ControlSucceed -> keyword "Succeed" in w ++ spc () let pr_vernac_control flags = Pp.prlist pr_control_flag flags let pr_vernac_attributes = function | [] -> mt () | flags -> str "#[" ++ prlist_with_sep pr_comma Attributes.pr_vernac_flag flags ++ str "]" ++ cut () let pr_vernac ({v = {control; attrs; expr}} as v) = tag_vernac v (pr_vernac_control control ++ pr_vernac_attributes attrs ++ pr_vernac_expr expr ++ sep_end expr) coq-8.15.0/vernac/ppvernac.mli000066400000000000000000000024561417001151100161640ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pp.t) -> 'a Extend.constr_entry_key_gen -> Pp.t val pr_syntax_modifier : Vernacexpr.syntax_modifier CAst.t -> Pp.t (** Prints a fixpoint body *) val pr_rec_definition : Vernacexpr.fixpoint_expr -> Pp.t (** Prints a vernac expression without dot *) val pr_vernac_expr : Vernacexpr.vernac_expr -> Pp.t (** Prints a "proof using X" clause. *) val pr_using : Vernacexpr.section_subset_expr -> Pp.t (** Prints a vernac expression and closes it with a dot. *) val pr_vernac : Vernacexpr.vernac_control -> Pp.t coq-8.15.0/vernac/prettyp.ml000066400000000000000000001144541417001151100157060ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* * on May-June 2006 for implementation of abstraction of pretty-printing of objects. *) open Pp open CErrors open Util open CAst open Names open Termops open Declarations open Environ open Impargs open Libobject open Libnames open Globnames open Printer open Printmod open Context.Rel.Declaration module RelDecl = Context.Rel.Declaration module NamedDecl = Context.Named.Declaration type object_pr = { print_inductive : MutInd.t -> UnivNames.univ_name_list option -> Pp.t; print_constant_with_infos : Constant.t -> UnivNames.univ_name_list option -> Pp.t; print_section_variable : env -> Evd.evar_map -> variable -> Pp.t; print_syntactic_def : env -> KerName.t -> Pp.t; print_module : ModPath.t -> Pp.t; print_modtype : ModPath.t -> Pp.t; print_named_decl : env -> Evd.evar_map -> Constr.named_declaration -> Pp.t; print_library_entry : env -> Evd.evar_map -> bool -> (object_name * Lib.node) -> Pp.t option; print_context : env -> Evd.evar_map -> bool -> int option -> Lib.library_segment -> Pp.t; print_typed_value_in_env : Environ.env -> Evd.evar_map -> EConstr.constr * EConstr.types -> Pp.t; print_eval : Reductionops.reduction_function -> env -> Evd.evar_map -> Constrexpr.constr_expr -> EConstr.unsafe_judgment -> Pp.t; } let gallina_print_module mp = print_module ~with_body:true mp let gallina_print_modtype = print_modtype (**************) (** Utilities *) let print_closed_sections = ref false let pr_infos_list l = v 0 (prlist_with_sep cut (fun x -> x) l) let with_line_skip l = if List.is_empty l then mt() else fnl() ++ fnl () ++ pr_infos_list l let blankline = mt() (* add a blank sentence in the list of infos *) let add_colon prefix = if ismt prefix then mt () else prefix ++ str ": " let int_or_no n = if Int.equal n 0 then str "no" else int n (*******************) (** Basic printing *) let print_basename sp = pr_global (GlobRef.ConstRef sp) let print_ref reduce ref udecl = let env = Global.env () in let typ, univs = Typeops.type_of_global_in_context env ref in let inst = Univ.make_abstract_instance univs in let bl = Printer.universe_binders_with_opt_names (Environ.universes_of_global env ref) udecl in let sigma = Evd.from_ctx (UState.of_binders bl) in let typ = if reduce then let ctx,ccl = Reductionops.splay_prod_assum env sigma (EConstr.of_constr typ) in EConstr.to_constr sigma (EConstr.it_mkProd_or_LetIn ccl ctx) else typ in let typ = Arguments_renaming.rename_type typ ref in let impargs = select_stronger_impargs (implicits_of_global ref) in let impargs = List.map binding_kind_of_status impargs in let variance = let open GlobRef in match ref with | VarRef _ | ConstRef _ -> None | IndRef (ind,_) | ConstructRef ((ind,_),_) -> let mind = Environ.lookup_mind ind env in mind.Declarations.mind_variance in let inst = if Global.is_polymorphic ref then Printer.pr_universe_instance sigma inst else mt () in let priv = None in (* We deliberately don't print private univs in About. *) hov 0 (pr_global ref ++ inst ++ str " :" ++ spc () ++ pr_ltype_env env sigma ~impargs typ ++ Printer.pr_abstract_universe_ctx sigma ?variance univs ?priv) (********************************) (** Printing implicit arguments *) let pr_impl_name imp = Id.print (name_of_implicit imp) let print_impargs_by_name max = function | [] -> [] | impls -> let n = List.length impls in [hov 0 (str (String.plural n "Argument") ++ spc() ++ prlist_with_sep pr_comma pr_impl_name impls ++ spc() ++ str (String.conjugate_verb_to_be n) ++ str" implicit" ++ (if max then strbrk " and maximally inserted" else mt()))] let print_one_impargs_list l = let imps = List.filter is_status_implicit l in let maximps = List.filter Impargs.maximal_insertion_of imps in let nonmaximps = List.subtract (=) imps maximps in (* FIXME *) print_impargs_by_name false nonmaximps @ print_impargs_by_name true maximps let print_impargs_list prefix l = let l = extract_impargs_data l in List.flatten (List.map (fun (cond,imps) -> match cond with | None -> List.map (fun pp -> add_colon prefix ++ pp) (print_one_impargs_list imps) | Some (n1,n2) -> [v 2 (prlist_with_sep cut (fun x -> x) [(if ismt prefix then str "When" else prefix ++ str ", when") ++ str " applied to " ++ (if Int.equal n1 n2 then int_or_no n2 else if Int.equal n1 0 then str "no more than " ++ int n2 else int n1 ++ str " to " ++ int_or_no n2) ++ str (String.plural n2 " argument") ++ str ":"; v 0 (prlist_with_sep cut (fun x -> x) (if List.exists is_status_implicit imps then print_one_impargs_list imps else [str "No implicit arguments"]))])]) l) let need_expansion impl ref = let typ, _ = Typeops.type_of_global_in_context (Global.env ()) ref in let ctx = Term.prod_assum typ in let nprods = List.count is_local_assum ctx in not (List.is_empty impl) && List.length impl >= nprods && let _,lastimpl = List.chop nprods impl in List.exists is_status_implicit lastimpl let print_impargs ref = let ref = Smartlocate.smart_global ref in let impl = implicits_of_global ref in let has_impl = not (List.is_empty impl) in (* Need to reduce since implicits are computed with products flattened *) pr_infos_list ([ print_ref (need_expansion (select_impargs_size 0 impl) ref) ref None; blankline ] @ (if has_impl then print_impargs_list (mt()) impl else [str "No implicit arguments"])) (*********************) (** Printing Opacity *) type opacity = | FullyOpaque | TransparentMaybeOpacified of Conv_oracle.level let opacity env = function | GlobRef.VarRef v when NamedDecl.is_local_def (Environ.lookup_named v env) -> Some(TransparentMaybeOpacified (Conv_oracle.get_strategy (Environ.oracle env) (VarKey v))) | GlobRef.ConstRef cst -> let cb = Environ.lookup_constant cst env in (match cb.const_body with | Undef _ | Primitive _ -> None | OpaqueDef _ -> Some FullyOpaque | Def _ -> Some (TransparentMaybeOpacified (Conv_oracle.get_strategy (Environ.oracle env) (ConstKey cst)))) | _ -> None let print_opacity ref = match opacity (Global.env()) ref with | None -> [] | Some s -> [pr_global ref ++ str " is " ++ match s with | FullyOpaque -> str "opaque" | TransparentMaybeOpacified Conv_oracle.Opaque -> str "basically transparent but considered opaque for reduction" | TransparentMaybeOpacified lev when Conv_oracle.is_transparent lev -> str "transparent" | TransparentMaybeOpacified (Conv_oracle.Level n) -> str "transparent (with expansion weight " ++ int n ++ str ")" | TransparentMaybeOpacified Conv_oracle.Expand -> str "transparent (with minimal expansion weight)"] (*******************) let print_if_is_coercion ref = if Coercionops.coercion_exists ref then [pr_global ref ++ str " is a coercion"] else [] (*******************) (* *) let pr_template_variables = function | [] -> mt () | vars -> str " on " ++ prlist_with_sep spc UnivNames.(pr_with_global_universes empty_binders) vars let print_polymorphism ref = let poly = Global.is_polymorphic ref in let template_poly = Global.is_template_polymorphic ref in let template_variables = Global.get_template_polymorphic_variables ref in [ pr_global ref ++ str " is " ++ (if poly then str "universe polymorphic" else if template_poly then str "template universe polymorphic" ++ if !Detyping.print_universes then h (pr_template_variables template_variables) else mt() else str "not universe polymorphic") ] let print_type_in_type ref = let unsafe = Global.is_type_in_type ref in if unsafe then [ pr_global ref ++ str " relies on an unsafe universe hierarchy"] else [] let print_primitive_record recflag mipv = function | PrimRecord _ -> let eta = match recflag with | CoFinite | Finite -> str" without eta conversion" | BiFinite -> str " with eta conversion" in [Id.print mipv.(0).mind_typename ++ str" has primitive projections" ++ eta ++ str"."] | FakeRecord | NotRecord -> [] let print_primitive ref = match ref with | GlobRef.IndRef ind -> let mib,_ = Global.lookup_inductive ind in print_primitive_record mib.mind_finite mib.mind_packets mib.mind_record | _ -> [] let needs_extra_scopes ref scopes = let open Constr in let rec aux env t = function | [] -> false | _::scopes -> match kind (Reduction.whd_all env t) with | Prod (na,dom,codom) -> aux (push_rel (RelDecl.LocalAssum (na,dom)) env) codom scopes | _ -> true in let env = Global.env() in let ty, _ctx = Typeops.type_of_global_in_context env ref in aux env ty scopes let implicit_kind_of_status = function | None -> Anonymous, Glob_term.Explicit | Some ((na,_,_),_,(maximal,_)) -> na, if maximal then Glob_term.MaxImplicit else Glob_term.NonMaxImplicit let extra_implicit_kind_of_status imp = let _,imp = implicit_kind_of_status imp in (Anonymous, imp) let dummy = { Vernacexpr.implicit_status = Glob_term.Explicit; name = Anonymous; recarg_like = false; notation_scope = None; } let is_dummy = function | Vernacexpr.(RealArg {implicit_status; name; recarg_like; notation_scope}) -> name = Anonymous && not recarg_like && notation_scope = None && implicit_status = Glob_term.Explicit | _ -> false let rec main_implicits i renames recargs scopes impls = if renames = [] && recargs = [] && scopes = [] && impls = [] then [] else let recarg_like, recargs = match recargs with | j :: recargs when i = j -> true, recargs | _ -> false, recargs in let (name, implicit_status) = match renames, impls with | _, (Some _ as i) :: _ -> implicit_kind_of_status i | name::_, _ -> (name,Glob_term.Explicit) | [], (None::_ | []) -> (Anonymous, Glob_term.Explicit) in let notation_scope = match scopes with | scope :: _ -> Option.map CAst.make scope | [] -> None in let status = {Vernacexpr.implicit_status; name; recarg_like; notation_scope} in let tl = function [] -> [] | _::tl -> tl in (* recargs is special -> tl handled above *) let rest = main_implicits (i+1) (tl renames) recargs (tl scopes) (tl impls) in status :: rest let rec insert_fake_args volatile bidi impls = let open Vernacexpr in match volatile, bidi with | Some 0, _ -> VolatileArg :: insert_fake_args None bidi impls | _, Some 0 -> BidiArg :: insert_fake_args volatile None impls | None, None -> List.map (fun a -> RealArg a) impls | _, _ -> let hd, tl = match impls with | impl :: impls -> impl, impls | [] -> dummy, [] in let f = Option.map pred in RealArg hd :: insert_fake_args (f volatile) (f bidi) tl let print_arguments ref = let qid = Nametab.shortest_qualid_of_global Id.Set.empty ref in let flags, recargs, nargs_for_red = let open Reductionops.ReductionBehaviour in match get ref with | None -> [], [], None | Some NeverUnfold -> [`ReductionNeverUnfold], [], None | Some (UnfoldWhen { nargs; recargs }) -> [], recargs, nargs | Some (UnfoldWhenNoMatch { nargs; recargs }) -> [`ReductionDontExposeCase], recargs, nargs in let names, not_renamed = try Arguments_renaming.arguments_names ref, false with Not_found -> let env = Global.env () in let ty, _ = Typeops.type_of_global_in_context env ref in List.map pi1 (Impargs.compute_implicits_names env (Evd.from_env env) (EConstr.of_constr ty)), true in let scopes = Notation.find_arguments_scope ref in let flags = if needs_extra_scopes ref scopes then `ExtraScopes::flags else flags in let impls = Impargs.extract_impargs_data (Impargs.implicits_of_global ref) in let impls, moreimpls = match impls with | (_, impls) :: rest -> impls, rest | [] -> assert false in let impls = main_implicits 0 names recargs scopes impls in let moreimpls = List.map (fun (_,i) -> List.map extra_implicit_kind_of_status i) moreimpls in let bidi = Pretyping.get_bidirectionality_hint ref in let impls = insert_fake_args nargs_for_red bidi impls in if List.for_all is_dummy impls && moreimpls = [] && flags = [] then [] else let open Constrexpr in let open Vernacexpr in [Ppvernac.pr_vernac_expr (VernacArguments (CAst.make (AN qid), impls, moreimpls, flags)) ++ (if not_renamed then mt () else fnl () ++ str " (where some original arguments have been renamed)")] let print_name_infos ref = let type_info_for_implicit = if need_expansion (select_impargs_size 0 (implicits_of_global ref)) ref then (* Need to reduce since implicits are computed with products flattened *) [str "Expanded type for implicit arguments"; print_ref true ref None; blankline] else [] in print_type_in_type ref @ print_primitive ref @ type_info_for_implicit @ print_arguments ref @ print_if_is_coercion ref let print_inductive_args sp mipv = let flatmapi f v = List.flatten (Array.to_list (Array.mapi f v)) in flatmapi (fun i mip -> print_arguments (GlobRef.IndRef (sp,i)) @ flatmapi (fun j _ -> print_arguments (GlobRef.ConstructRef ((sp,i),j+1))) mip.mind_consnames) mipv let print_bidi_hints gr = match Pretyping.get_bidirectionality_hint gr with | None -> [] | Some nargs -> [str "Using typing information from context after typing the " ++ int nargs ++ str " first arguments"] (*********************) (* "Locate" commands *) type 'a locatable_info = { locate : qualid -> 'a option; locate_all : qualid -> 'a list; shortest_qualid : 'a -> qualid; name : 'a -> Pp.t; print : 'a -> Pp.t; about : 'a -> Pp.t; } type locatable = Locatable : 'a locatable_info -> locatable type logical_name = | Term of GlobRef.t | Dir of Nametab.GlobDirRef.t | Syntactic of KerName.t | Module of ModPath.t | ModuleType of ModPath.t | Other : 'a * 'a locatable_info -> logical_name | Undefined of qualid (** Generic table for objects that are accessible through a name. *) let locatable_map : locatable String.Map.t ref = ref String.Map.empty let register_locatable name f = locatable_map := String.Map.add name (Locatable f) !locatable_map exception ObjFound of logical_name let locate_any_name qid = try Term (Nametab.locate qid) with Not_found -> try Syntactic (Nametab.locate_syndef qid) with Not_found -> try Dir (Nametab.locate_dir qid) with Not_found -> try Module (Nametab.locate_module qid) with Not_found -> try ModuleType (Nametab.locate_modtype qid) with Not_found -> let iter _ (Locatable info) = match info.locate qid with | None -> () | Some ans -> raise (ObjFound (Other (ans, info))) in try String.Map.iter iter !locatable_map; Undefined qid with ObjFound obj -> obj let pr_located_qualid = function | Term ref -> let ref_str = let open GlobRef in match ref with ConstRef _ -> "Constant" | IndRef _ -> "Inductive" | ConstructRef _ -> "Constructor" | VarRef _ -> "Variable" in str ref_str ++ spc () ++ pr_path (Nametab.path_of_global ref) | Syntactic kn -> str "Notation" ++ spc () ++ pr_path (Nametab.path_of_syndef kn) | Dir dir -> let s,dir = let open Nametab in let open GlobDirRef in match dir with | DirOpenModule { obj_dir ; _ } -> "Open Module", obj_dir | DirOpenModtype { obj_dir ; _ } -> "Open Module Type", obj_dir | DirOpenSection { obj_dir ; _ } -> "Open Section", obj_dir in str s ++ spc () ++ DirPath.print dir | Module mp -> str "Module" ++ spc () ++ DirPath.print (Nametab.dirpath_of_module mp) | ModuleType mp -> str "Module Type" ++ spc () ++ pr_path (Nametab.path_of_modtype mp) | Other (obj, info) -> info.name obj | Undefined qid -> pr_qualid qid ++ spc () ++ str "not a defined object." let canonize_ref = let open GlobRef in function | ConstRef c -> let kn = Constant.canonical c in if KerName.equal (Constant.user c) kn then None else Some (ConstRef (Constant.make1 kn)) | IndRef (ind,i) -> let kn = MutInd.canonical ind in if KerName.equal (MutInd.user ind) kn then None else Some (IndRef (MutInd.make1 kn, i)) | ConstructRef ((ind,i),j) -> let kn = MutInd.canonical ind in if KerName.equal (MutInd.user ind) kn then None else Some (ConstructRef ((MutInd.make1 kn, i),j)) | VarRef _ -> None let display_alias = function | Term r -> begin match canonize_ref r with | None -> mt () | Some r' -> let q' = Nametab.shortest_qualid_of_global Id.Set.empty r' in spc () ++ str "(alias of " ++ pr_qualid q' ++ str ")" end | _ -> mt () let locate_term qid = let expand = function | TrueGlobal ref -> Term ref, Nametab.shortest_qualid_of_global Id.Set.empty ref | SynDef kn -> Syntactic kn, Nametab.shortest_qualid_of_syndef Id.Set.empty kn in List.map expand (Nametab.locate_extended_all qid) let locate_module qid = let all = Nametab.locate_extended_all_module qid in let map mp = Module mp, Nametab.shortest_qualid_of_module mp in let mods = List.map map all in (* Don't forget the opened modules: they are not part of the same name tab. *) let all = Nametab.locate_extended_all_dir qid in let map dir = let open Nametab.GlobDirRef in match dir with | DirOpenModule _ -> Some (Dir dir, qid) | _ -> None in mods @ List.map_filter map all let locate_modtype qid = let all = Nametab.locate_extended_all_modtype qid in let map mp = ModuleType mp, Nametab.shortest_qualid_of_modtype mp in let modtypes = List.map map all in (* Don't forget the opened module types: they are not part of the same name tab. *) let all = Nametab.locate_extended_all_dir qid in let map dir = let open Nametab.GlobDirRef in match dir with | DirOpenModtype _ -> Some (Dir dir, qid) | _ -> None in modtypes @ List.map_filter map all let locate_other s qid = let Locatable info = String.Map.find s !locatable_map in let ans = info.locate_all qid in let map obj = (Other (obj, info), info.shortest_qualid obj) in List.map map ans type locatable_kind = | LocTerm | LocModule | LocOther of string | LocAny let print_located_qualid name flags qid = let located = match flags with | LocTerm -> locate_term qid | LocModule -> locate_modtype qid @ locate_module qid | LocOther s -> locate_other s qid | LocAny -> locate_term qid @ locate_modtype qid @ locate_module qid @ String.Map.fold (fun s _ accu -> locate_other s qid @ accu) !locatable_map [] in match located with | [] -> let (dir,id) = repr_qualid qid in if DirPath.is_empty dir then str "No " ++ str name ++ str " of basename" ++ spc () ++ Id.print id else str "No " ++ str name ++ str " of suffix" ++ spc () ++ pr_qualid qid | l -> prlist_with_sep fnl (fun (o,oqid) -> hov 2 (pr_located_qualid o ++ (if not (qualid_eq oqid qid) then spc() ++ str "(shorter name to refer to it in current context is " ++ pr_qualid oqid ++ str")" else mt ()) ++ display_alias o)) l let print_located_term ref = print_located_qualid "term" LocTerm ref let print_located_other s ref = print_located_qualid s (LocOther s) ref let print_located_module ref = print_located_qualid "module" LocModule ref let print_located_qualid ref = print_located_qualid "object" LocAny ref (******************************************) (**** Printing declarations and judgments *) (**** Gallina layer *****) let gallina_print_typed_value_in_env env sigma (trm,typ) = (pr_leconstr_env ~inctx:true env sigma trm ++ fnl () ++ str " : " ++ pr_letype_env env sigma typ) (* To be improved; the type should be used to provide the types in the abstractions. This should be done recursively inside pr_lconstr, so that the pretty-print of a proposition (P:(nat->nat)->Prop)(P [u]u) synthesizes the type nat of the abstraction on u *) let print_named_def env sigma name body typ = let pbody = pr_lconstr_env ~inctx:true env sigma body in let ptyp = pr_ltype_env env sigma typ in let pbody = if Constr.isCast body then surround pbody else pbody in (str "*** [" ++ str name ++ str " " ++ hov 0 (str ":=" ++ brk (1,2) ++ pbody ++ spc () ++ str ":" ++ brk (1,2) ++ ptyp) ++ str "]") let print_named_assum env sigma name typ = str "*** [" ++ str name ++ str " : " ++ pr_ltype_env env sigma typ ++ str "]" let gallina_print_named_decl env sigma = let open Context.Named.Declaration in function | LocalAssum (id, typ) -> print_named_assum env sigma (Id.to_string id.Context.binder_name) typ | LocalDef (id, body, typ) -> print_named_def env sigma (Id.to_string id.Context.binder_name) body typ let assumptions_for_print lna = List.fold_right (fun na env -> add_name na env) lna empty_names_context (*********************) (* *) let gallina_print_inductive sp udecl = let env = Global.env() in let mib = Environ.lookup_mind sp env in let mipv = mib.mind_packets in pr_mutual_inductive_body env sp mib udecl ++ with_line_skip (print_primitive_record mib.mind_finite mipv mib.mind_record @ print_inductive_args sp mipv) let print_named_decl env sigma id = gallina_print_named_decl env sigma (Global.lookup_named id) ++ fnl () let gallina_print_section_variable env sigma id = print_named_decl env sigma id ++ with_line_skip (print_name_infos (GlobRef.VarRef id)) let print_body env evd = function | Some c -> pr_lconstr_env ~inctx:true env evd c | None -> (str"") let print_typed_body env evd (val_0,typ) = (print_body env evd val_0 ++ fnl () ++ str " : " ++ pr_ltype_env env evd typ) let print_instance sigma cb = if Declareops.constant_is_polymorphic cb then let univs = Declareops.constant_polymorphic_context cb in let inst = Univ.make_abstract_instance univs in pr_universe_instance sigma inst else mt() let print_constant with_values sep sp udecl = let cb = Global.lookup_constant sp in let val_0 = Global.body_of_constant_body Library.indirect_accessor cb in let typ = cb.const_type in let univs = cb.const_universes in let uctx = UState.of_binders (Printer.universe_binders_with_opt_names (Declareops.constant_polymorphic_context cb) udecl) in let env = Global.env () and sigma = Evd.from_ctx uctx in let pr_ltype = pr_ltype_env env sigma in hov 0 ( match val_0 with | None -> str"*** [ " ++ print_basename sp ++ print_instance sigma cb ++ str " : " ++ cut () ++ pr_ltype typ ++ str" ]" ++ Printer.pr_universes sigma univs | Some (c, priv, ctx) -> let priv = match priv with | Opaqueproof.PrivateMonomorphic () -> None | Opaqueproof.PrivatePolymorphic (_, ctx) -> Some ctx in print_basename sp ++ print_instance sigma cb ++ str sep ++ cut () ++ (if with_values then print_typed_body env sigma (Some c,typ) else pr_ltype typ)++ Printer.pr_universes sigma univs ?priv) let gallina_print_constant_with_infos sp udecl = print_constant true " = " sp udecl ++ with_line_skip (print_name_infos (GlobRef.ConstRef sp)) let gallina_print_syntactic_def env kn = let qid = Nametab.shortest_qualid_of_syndef Id.Set.empty kn and (vars,a) = Syntax_def.search_syntactic_definition kn in let c = Notation_ops.glob_constr_of_notation_constr a in hov 2 (hov 4 (str "Notation " ++ pr_qualid qid ++ prlist (fun id -> spc () ++ Id.print id) (List.map fst vars) ++ spc () ++ str ":=") ++ spc () ++ Constrextern.without_specific_symbols [Notation.SynDefRule kn] (pr_glob_constr_env env (Evd.from_env env)) c) module DynHandle = Libobject.Dyn.Map(struct type 'a t = 'a -> Pp.t option end) let handle h (Libobject.Dyn.Dyn (tag, o)) = match DynHandle.find tag h with | f -> f o | exception Not_found -> None (* TODO: this kind of feature should not rely on the Libobject stack. There is no reason that an object in the stack corresponds to a user-facing declaration. It may have been so at the time this was written, but this needs to be done in a more principled way. *) let gallina_print_leaf_entry env sigma with_values ((sp, kn),lobj) = let sep = if with_values then " = " else " : " in match lobj with | AtomicObject o -> let handler = DynHandle.add Declare.Internal.objVariable begin fun _ -> (* Outside sections, VARIABLES still exist but only with universes constraints *) (try Some(print_named_decl env sigma (basename sp)) with Not_found -> None) end @@ DynHandle.add Declare.Internal.Constant.tag begin fun _ -> Some (print_constant with_values sep (Constant.make1 kn) None) end @@ DynHandle.add DeclareInd.Internal.objInductive begin fun _ -> Some (gallina_print_inductive (MutInd.make1 kn) None) end @@ DynHandle.empty in handle handler o | ModuleObject _ -> let (mp,l) = KerName.repr kn in Some (print_module ~with_body:with_values (MPdot (mp,l))) | ModuleTypeObject _ -> let (mp,l) = KerName.repr kn in Some (print_modtype (MPdot (mp,l))) | _ -> None let gallina_print_library_entry env sigma with_values ent = let pr_name (sp,_) = Id.print (basename sp) in match ent with | (oname,Lib.Leaf lobj) -> gallina_print_leaf_entry env sigma with_values (oname,lobj) | (oname,Lib.OpenedSection (dir,_)) -> Some (str " >>>>>>> Section " ++ pr_name oname) | (_,Lib.CompilingLibrary { Nametab.obj_dir; _ }) -> Some (str " >>>>>>> Library " ++ DirPath.print obj_dir) | (oname,Lib.OpenedModule _) -> Some (str " >>>>>>> Module " ++ pr_name oname) let gallina_print_context env sigma with_values = let rec prec n = function | h::rest when Option.is_empty n || Option.get n > 0 -> (match gallina_print_library_entry env sigma with_values h with | None -> prec n rest | Some pp -> prec (Option.map ((+) (-1)) n) rest ++ pp ++ fnl ()) | _ -> mt () in prec let gallina_print_eval red_fun env sigma _ {uj_val=trm;uj_type=typ} = let ntrm = red_fun env sigma trm in (str " = " ++ gallina_print_typed_value_in_env env sigma (ntrm,typ)) (******************************************) (**** Printing abstraction layer *) let default_object_pr = { print_inductive = gallina_print_inductive; print_constant_with_infos = gallina_print_constant_with_infos; print_section_variable = gallina_print_section_variable; print_syntactic_def = gallina_print_syntactic_def; print_module = gallina_print_module; print_modtype = gallina_print_modtype; print_named_decl = gallina_print_named_decl; print_library_entry = gallina_print_library_entry; print_context = gallina_print_context; print_typed_value_in_env = gallina_print_typed_value_in_env; print_eval = gallina_print_eval; } let object_pr = ref default_object_pr let set_object_pr = (:=) object_pr let print_inductive x = !object_pr.print_inductive x let print_constant_with_infos c = !object_pr.print_constant_with_infos c let print_section_variable c = !object_pr.print_section_variable c let print_syntactic_def x = !object_pr.print_syntactic_def x let print_module x = !object_pr.print_module x let print_modtype x = !object_pr.print_modtype x let print_named_decl x = !object_pr.print_named_decl x let print_library_entry x = !object_pr.print_library_entry x let print_context x = !object_pr.print_context x let print_typed_value_in_env x = !object_pr.print_typed_value_in_env x let print_eval x = !object_pr.print_eval x (******************************************) (**** Printing declarations and judgments *) (**** Abstract layer *****) let print_judgment env sigma {uj_val=trm;uj_type=typ} = print_typed_value_in_env env sigma (trm, typ) let print_safe_judgment env sigma j = let trm = Safe_typing.j_val j in let typ = Safe_typing.j_type j in let trm = EConstr.of_constr trm in let typ = EConstr.of_constr typ in print_typed_value_in_env env sigma (trm, typ) (*********************) (* *) let print_full_context env sigma = print_context env sigma true None (Lib.contents ()) let print_full_context_typ env sigma = print_context env sigma false None (Lib.contents ()) module DynHandleF = Libobject.Dyn.Map(struct type 'a t = 'a -> Pp.t end) let handleF h (Libobject.Dyn.Dyn (tag, o)) = match DynHandleF.find tag h with | f -> f o | exception Not_found -> mt () (* TODO: see the comment for {!gallina_print_leaf_entry} *) let print_full_pure_context env sigma = let rec prec = function | ((_,kn),Lib.Leaf AtomicObject lobj)::rest -> let handler = DynHandleF.add Declare.Internal.Constant.tag begin fun _ -> let con = Global.constant_of_delta_kn kn in let cb = Global.lookup_constant con in let typ = cb.const_type in hov 0 ( match cb.const_body with | Undef _ -> str "Parameter " ++ print_basename con ++ str " : " ++ cut () ++ pr_ltype_env env sigma typ | OpaqueDef lc -> str "Theorem " ++ print_basename con ++ cut () ++ str " : " ++ pr_ltype_env env sigma typ ++ str "." ++ fnl () ++ str "Proof " ++ pr_lconstr_env env sigma (fst (Global.force_proof Library.indirect_accessor lc)) | Def c -> str "Definition " ++ print_basename con ++ cut () ++ str " : " ++ pr_ltype_env env sigma typ ++ cut () ++ str " := " ++ pr_lconstr_env env sigma c | Primitive _ -> str "Primitive " ++ print_basename con ++ str " : " ++ cut () ++ pr_ltype_env env sigma typ) ++ str "." ++ fnl () ++ fnl () end @@ DynHandleF.add DeclareInd.Internal.objInductive begin fun _ -> let mind = Global.mind_of_delta_kn kn in let mib = Global.lookup_mind mind in pr_mutual_inductive_body (Global.env()) mind mib None ++ str "." ++ fnl () ++ fnl () end @@ DynHandleF.empty in let pp = handleF handler lobj in prec rest ++ pp | ((_,kn),Lib.Leaf ModuleObject _)::rest -> (* TODO: make it reparsable *) let (mp,l) = KerName.repr kn in prec rest ++ print_module (MPdot (mp,l)) ++ str "." ++ fnl () ++ fnl () | ((_,kn),Lib.Leaf ModuleTypeObject _)::rest -> (* TODO: make it reparsable *) let (mp,l) = KerName.repr kn in prec rest ++ print_modtype (MPdot (mp,l)) ++ str "." ++ fnl () ++ fnl () | _::rest -> prec rest | _ -> mt () in prec (Lib.contents ()) (* For printing an inductive definition with its constructors and elimination, assume that the declaration of constructors and eliminations follows the definition of the inductive type *) (* This is designed to print the contents of an opened section *) let read_sec_context qid = let dir = try Nametab.locate_section qid with Not_found -> user_err ?loc:qid.loc (str "Unknown section.") in let rec get_cxt in_cxt = function | (_,Lib.OpenedSection ({Nametab.obj_dir;_},_) as hd)::rest -> if DirPath.equal dir obj_dir then (hd::in_cxt) else get_cxt (hd::in_cxt) rest | [] -> [] | hd::rest -> get_cxt (hd::in_cxt) rest in let cxt = Lib.contents () in List.rev (get_cxt [] cxt) let print_sec_context env sigma sec = print_context env sigma true None (read_sec_context sec) let print_sec_context_typ env sigma sec = print_context env sigma false None (read_sec_context sec) let maybe_error_reject_univ_decl na udecl = let open GlobRef in match na, udecl with | _, None | Term (ConstRef _ | IndRef _ | ConstructRef _), Some _ -> () | (Term (VarRef _) | Syntactic _ | Dir _ | Module _ | ModuleType _ | Other _ | Undefined _), Some udecl -> (* TODO Print na somehow *) user_err (str "This object does not support universe names.") let print_any_name env sigma na udecl = maybe_error_reject_univ_decl na udecl; let open GlobRef in match na with | Term (ConstRef sp) -> print_constant_with_infos sp udecl | Term (IndRef (sp,_)) -> print_inductive sp udecl | Term (ConstructRef ((sp,_),_)) -> print_inductive sp udecl | Term (VarRef sp) -> print_section_variable env sigma sp | Syntactic kn -> print_syntactic_def env kn | Module mp -> print_module mp | Dir _ -> mt () | ModuleType mp -> print_modtype mp | Other (obj, info) -> info.print obj | Undefined qid -> try (* Var locale de but, pas var de section... donc pas d'implicits *) let dir,str = repr_qualid qid in if not (DirPath.is_empty dir) then raise Not_found; str |> Global.lookup_named |> print_named_decl env sigma with Not_found -> user_err ?loc:qid.loc (pr_qualid qid ++ spc () ++ str "not a defined object.") let print_name env sigma na udecl = match na with | {loc; v=Constrexpr.ByNotation (ntn,sc)} -> print_any_name env sigma (Term (Notation.interp_notation_as_global_reference ?loc ~head:false (fun _ -> true) ntn sc)) udecl | {loc; v=Constrexpr.AN ref} -> print_any_name env sigma (locate_any_name ref) udecl let print_opaque_name env sigma qid = let open GlobRef in match Nametab.global qid with | ConstRef cst -> let cb = Global.lookup_constant cst in if Declareops.constant_has_body cb then print_constant_with_infos cst None else user_err Pp.(str "Not a defined constant.") | IndRef (sp,_) -> print_inductive sp None | ConstructRef cstr as gr -> let ty, ctx = Typeops.type_of_global_in_context env gr in let ty = EConstr.of_constr ty in let open EConstr in print_typed_value_in_env env sigma (mkConstruct cstr, ty) | VarRef id -> env |> lookup_named id |> print_named_decl env sigma let print_about_any ?loc env sigma k udecl = maybe_error_reject_univ_decl k udecl; match k with | Term ref -> let rb = Reductionops.ReductionBehaviour.print ref in Dumpglob.add_glob ?loc ref; pr_infos_list (print_ref false ref udecl :: blankline :: print_polymorphism ref @ print_name_infos ref @ (if Pp.ismt rb then [] else [rb]) @ print_opacity ref @ print_bidi_hints ref @ [hov 0 (str "Expands to: " ++ pr_located_qualid k)]) | Syntactic kn -> let () = match Syntax_def.search_syntactic_definition kn with | [],Notation_term.NRef (ref,_) -> Dumpglob.add_glob ?loc ref | _ -> () in v 0 ( print_syntactic_def env kn ++ fnl () ++ hov 0 (str "Expands to: " ++ pr_located_qualid k)) | Dir _ | Module _ | ModuleType _ | Undefined _ -> hov 0 (pr_located_qualid k) | Other (obj, info) -> hov 0 (info.about obj) let print_about env sigma na udecl = match na with | {loc;v=Constrexpr.ByNotation (ntn,sc)} -> print_about_any ?loc env sigma (Term (Notation.interp_notation_as_global_reference ?loc ~head:false (fun _ -> true) ntn sc)) udecl | {loc;v=Constrexpr.AN ref} -> print_about_any ?loc env sigma (locate_any_name ref) udecl (* for debug *) let inspect env sigma depth = print_context env sigma false (Some depth) (Lib.contents ()) (*************************************************************************) (* Pretty-printing functions coming from classops.ml *) open Coercionops let print_coercion_value v = Printer.pr_global v.coe_value let print_path ((i,j),p) = hov 2 ( str"[" ++ hov 0 (prlist_with_sep pr_semicolon print_coercion_value p) ++ str"] : ") ++ pr_class i ++ str" >-> " ++ pr_class j let _ = Coercionops.install_path_printer print_path let print_graph () = prlist_with_sep fnl print_path (inheritance_graph()) let print_classes () = pr_sequence pr_class (classes()) let print_coercions () = pr_sequence print_coercion_value (coercions()) let print_path_between cls clt = let p = try lookup_path_between_class (cls, clt) with Not_found -> user_err (str"No path between " ++ pr_class cls ++ str" and " ++ pr_class clt ++ str ".") in print_path ((cls, clt), p) let print_canonical_projections env sigma grefs = let open Structures in let match_proj_gref { CSTable.projection; value; solution } gr = GlobRef.equal projection gr || begin match value with | ValuePattern.Const_cs y -> GlobRef.equal y gr | _ -> false end || GlobRef.equal solution gr in let projs = List.filter (fun p -> List.for_all (match_proj_gref p) grefs) (CSTable.entries ()) in prlist_with_sep fnl (fun { CSTable.projection; value; solution } -> ValuePattern.print value ++ str " <- " ++ pr_global projection ++ str " ( " ++ pr_global solution ++ str " )") projs (*************************************************************************) (*************************************************************************) (* Pretty-printing functions for type classes *) open Typeclasses let pr_typeclass env t = print_ref false t.cl_impl None let print_typeclasses () = let env = Global.env () in prlist_with_sep fnl (pr_typeclass env) (typeclasses ()) let pr_instance env i = (* gallina_print_constant_with_infos i.is_impl *) (* lighter *) print_ref false (instance_impl i) None ++ begin match hint_priority i with | None -> mt () | Some i -> spc () ++ str "|" ++ spc () ++ int i end let print_all_instances () = let env = Global.env () in let inst = all_instances () in prlist_with_sep fnl (pr_instance env) inst let print_instances r = let env = Global.env () in let sigma = Evd.from_env env in let inst = instances env sigma r in prlist_with_sep fnl (pr_instance env) inst coq-8.15.0/vernac/prettyp.mli000066400000000000000000000112551417001151100160520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Termops.names_context val print_closed_sections : bool ref val print_context : env -> Evd.evar_map -> bool -> int option -> Lib.library_segment -> Pp.t val print_library_entry : env -> Evd.evar_map -> bool -> (Libobject.object_name * Lib.node) -> Pp.t option val print_full_context : env -> Evd.evar_map -> Pp.t val print_full_context_typ : env -> Evd.evar_map -> Pp.t val print_full_pure_context : env -> Evd.evar_map -> Pp.t val print_sec_context : env -> Evd.evar_map -> qualid -> Pp.t val print_sec_context_typ : env -> Evd.evar_map -> qualid -> Pp.t val print_judgment : env -> Evd.evar_map -> EConstr.unsafe_judgment -> Pp.t val print_safe_judgment : env -> Evd.evar_map -> Safe_typing.judgment -> Pp.t val print_eval : reduction_function -> env -> Evd.evar_map -> Constrexpr.constr_expr -> EConstr.unsafe_judgment -> Pp.t val print_name : env -> Evd.evar_map -> qualid Constrexpr.or_by_notation -> UnivNames.univ_name_list option -> Pp.t val print_opaque_name : env -> Evd.evar_map -> qualid -> Pp.t val print_about : env -> Evd.evar_map -> qualid Constrexpr.or_by_notation -> UnivNames.univ_name_list option -> Pp.t val print_impargs : qualid Constrexpr.or_by_notation -> Pp.t (** Pretty-printing functions for classes and coercions *) val print_graph : unit -> Pp.t val print_classes : unit -> Pp.t val print_coercions : unit -> Pp.t val print_path_between : Coercionops.cl_typ -> Coercionops.cl_typ -> Pp.t val print_canonical_projections : env -> Evd.evar_map -> GlobRef.t list -> Pp.t (** Pretty-printing functions for type classes and instances *) val print_typeclasses : unit -> Pp.t val print_instances : GlobRef.t -> Pp.t val print_all_instances : unit -> Pp.t val inspect : env -> Evd.evar_map -> int -> Pp.t (** {5 Locate} *) type 'a locatable_info = { locate : qualid -> 'a option; (** Locate the most precise object with the provided name if any. *) locate_all : qualid -> 'a list; (** Locate all objects whose name is a suffix of the provided name *) shortest_qualid : 'a -> qualid; (** Return the shortest name in the current context *) name : 'a -> Pp.t; (** Data as printed by the Locate command *) print : 'a -> Pp.t; (** Data as printed by the Print command *) about : 'a -> Pp.t; (** Data as printed by the About command *) } (** Generic data structure representing locatable objects. *) val register_locatable : string -> 'a locatable_info -> unit (** Define a new type of locatable objects that can be reached via the corresponding generic vernacular commands. The string should be a unique name describing the kind of objects considered and that is added as a grammar command prefix for vernacular commands Locate. *) val print_located_qualid : qualid -> Pp.t val print_located_term : qualid -> Pp.t val print_located_module : qualid -> Pp.t val print_located_other : string -> qualid -> Pp.t type object_pr = { print_inductive : MutInd.t -> UnivNames.univ_name_list option -> Pp.t; print_constant_with_infos : Constant.t -> UnivNames.univ_name_list option -> Pp.t; print_section_variable : env -> Evd.evar_map -> variable -> Pp.t; print_syntactic_def : env -> KerName.t -> Pp.t; print_module : ModPath.t -> Pp.t; print_modtype : ModPath.t -> Pp.t; print_named_decl : env -> Evd.evar_map -> Constr.named_declaration -> Pp.t; print_library_entry : env -> Evd.evar_map -> bool -> (Libobject.object_name * Lib.node) -> Pp.t option; print_context : env -> Evd.evar_map -> bool -> int option -> Lib.library_segment -> Pp.t; print_typed_value_in_env : Environ.env -> Evd.evar_map -> EConstr.constr * EConstr.types -> Pp.t; print_eval : Reductionops.reduction_function -> env -> Evd.evar_map -> Constrexpr.constr_expr -> EConstr.unsafe_judgment -> Pp.t; } val set_object_pr : object_pr -> unit val default_object_pr : object_pr coq-8.15.0/vernac/printmod.ml000066400000000000000000000406651417001151100160350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* !short) ; optwrite = ((:=) short) } (** Each time we have to print a non-globally visible structure, we place its elements in a fake fresh namespace. *) let mk_fake_top = let r = ref 0 in fun () -> incr r; Id.of_string ("FAKETOP"^(string_of_int !r)) let def s = tag_definition (str s) let keyword s = tag_keyword (str s) let get_new_id locals id = let rec get_id l id = let dir = DirPath.make [id] in if not (Nametab.exists_module dir || Nametab.exists_dir dir) then id else get_id (Id.Set.add id l) (Namegen.next_ident_away id l) in let avoid = List.fold_left (fun accu (_, id) -> Id.Set.add id accu) Id.Set.empty locals in get_id avoid id (** Inductive declarations *) open Reduction let print_params env sigma params = if List.is_empty params then mt () else Printer.pr_rel_context env sigma params ++ brk(1,2) let print_constructors envpar sigma names types = let pc = prlist_with_sep (fun () -> brk(1,0) ++ str "| ") (fun (id,c) -> Id.print id ++ str " : " ++ Printer.pr_lconstr_env envpar sigma c) (Array.to_list (Array.map2 (fun n t -> (n,t)) names types)) in hv 0 (str " " ++ pc) let build_ind_type mip = Inductive.type_of_inductive mip let print_one_inductive env sigma mib ((_,i) as ind) = let u = Univ.make_abstract_instance (Declareops.inductive_polymorphic_context mib) in let mip = mib.mind_packets.(i) in let params = Inductive.inductive_paramdecls (mib,u) in let nparamdecls = Context.Rel.length params in let args = Context.Rel.instance_list mkRel 0 params in let arity = hnf_prod_applist_assum env nparamdecls (build_ind_type ((mib,mip),u)) args in let cstrtypes = Inductive.type_of_constructors (ind,u) (mib,mip) in let cstrtypes = Array.map (fun c -> hnf_prod_applist_assum env nparamdecls c args) cstrtypes in let envpar = push_rel_context params env in let inst = if Declareops.inductive_is_polymorphic mib then Printer.pr_universe_instance sigma u else mt () in hov 0 ( Id.print mip.mind_typename ++ inst ++ brk(1,4) ++ print_params env sigma params ++ str ": " ++ Printer.pr_lconstr_env envpar sigma arity ++ str " :=") ++ brk(0,2) ++ print_constructors envpar sigma mip.mind_consnames cstrtypes let print_mutual_inductive env mind mib udecl = let inds = List.init (Array.length mib.mind_packets) (fun x -> (mind, x)) in let keyword = let open Declarations in match mib.mind_finite with | Finite -> "Inductive" | BiFinite -> "Variant" | CoFinite -> "CoInductive" in let bl = Printer.universe_binders_with_opt_names (Declareops.inductive_polymorphic_context mib) udecl in let sigma = Evd.from_ctx (UState.of_binders bl) in hov 0 (def keyword ++ spc () ++ prlist_with_sep (fun () -> fnl () ++ str" with ") (print_one_inductive env sigma mib) inds ++ str "." ++ Printer.pr_universes sigma ?variance:mib.mind_variance mib.mind_universes) let get_fields = let rec prodec_rec l subst c = match kind c with | Prod (na,t,c) -> let id = match na.binder_name with Name id -> id | Anonymous -> Id.of_string "_" in prodec_rec ((id,true,Vars.substl subst t)::l) (mkVar id::subst) c | LetIn (na,b,_,c) -> let id = match na.binder_name with Name id -> id | Anonymous -> Id.of_string "_" in prodec_rec ((id,false,Vars.substl subst b)::l) (mkVar id::subst) c | _ -> List.rev l in prodec_rec [] [] let print_record env mind mib udecl = let u = Univ.make_abstract_instance (Declareops.inductive_polymorphic_context mib) in let mip = mib.mind_packets.(0) in let params = Inductive.inductive_paramdecls (mib,u) in let nparamdecls = Context.Rel.length params in let args = Context.Rel.instance_list mkRel 0 params in let arity = hnf_prod_applist_assum env nparamdecls (build_ind_type ((mib,mip),u)) args in let cstrtypes = Inductive.type_of_constructors ((mind,0),u) (mib,mip) in let cstrtype = hnf_prod_applist_assum env nparamdecls cstrtypes.(0) args in let fields = get_fields cstrtype in let envpar = push_rel_context params env in let bl = Printer.universe_binders_with_opt_names (Declareops.inductive_polymorphic_context mib) udecl in let sigma = Evd.from_ctx (UState.of_binders bl) in let keyword = let open Declarations in match mib.mind_finite with | BiFinite -> "Record" | Finite -> "Inductive" | CoFinite -> "CoInductive" in hov 0 ( hov 0 ( def keyword ++ spc () ++ Id.print mip.mind_typename ++ brk(1,4) ++ print_params env sigma params ++ str ": " ++ Printer.pr_lconstr_env envpar sigma arity ++ brk(1,2) ++ str ":= " ++ Id.print mip.mind_consnames.(0)) ++ brk(1,2) ++ hv 2 (str "{ " ++ prlist_with_sep (fun () -> str ";" ++ brk(2,0)) (fun (id,b,c) -> Id.print id ++ str (if b then " : " else " := ") ++ Printer.pr_lconstr_env envpar sigma c) fields) ++ str" }." ++ Printer.pr_universes sigma ?variance:mib.mind_variance mib.mind_universes ) let pr_mutual_inductive_body env mind mib udecl = if mib.mind_record != NotRecord && not !Flags.raw_print then print_record env mind mib udecl else print_mutual_inductive env mind mib udecl (** Modpaths *) let rec print_local_modpath locals = function | MPbound mbid -> Id.print (Util.List.assoc_f MBId.equal mbid locals) | MPdot(mp,l) -> print_local_modpath locals mp ++ str "." ++ Label.print l | MPfile _ -> raise Not_found let print_modpath locals mp = try (* must be with let because streams are lazy! *) let qid = Nametab.shortest_qualid_of_module mp in pr_qualid qid with | Not_found -> print_local_modpath locals mp let print_kn locals kn = try let qid = Nametab.shortest_qualid_of_modtype kn in pr_qualid qid with Not_found -> try print_local_modpath locals kn with Not_found -> print_modpath locals kn let nametab_register_dir obj_mp = let id = mk_fake_top () in let obj_dir = DirPath.make [id] in Nametab.(push_module (Until 1) obj_dir obj_mp) (** Nota: the [global_reference] we register in the nametab below might differ from internal ones, since we cannot recreate here the canonical part of constant and inductive names, but only the user names. This works nonetheless since we search now [Nametab.the_globrevtab] modulo user name. *) let nametab_register_body mp dir (l,body) = let push id ref = Nametab.push (Nametab.Until (1+List.length (DirPath.repr dir))) (make_path dir id) ref in match body with | SFBmodule _ -> () (* TODO *) | SFBmodtype _ -> () (* TODO *) | SFBconst _ -> push (Label.to_id l) (GlobRef.ConstRef (Constant.make2 mp l)) | SFBmind mib -> let mind = MutInd.make2 mp l in Array.iteri (fun i mip -> push mip.mind_typename (GlobRef.IndRef (mind,i)); Array.iteri (fun j id -> push id (GlobRef.ConstructRef ((mind,i),j+1))) mip.mind_consnames) mib.mind_packets (* TODO only import printing-relevant objects (or find a way to print without importing) *) let import_module = Declaremods.import_module Libobject.unfiltered let process_module_binding = Declaremods.process_module_binding let nametab_register_module_body mp struc = (* If [mp] is a globally visible module, we simply import it *) try import_module ~export:false mp with Not_found -> (* Otherwise we try to emulate an import by playing with nametab *) nametab_register_dir mp; List.iter (nametab_register_body mp DirPath.empty) struc let get_typ_expr_alg mtb = match mtb.mod_type_alg with | Some (NoFunctor me) -> me | _ -> raise Not_found let nametab_register_modparam mbid mtb = let id = MBId.to_id mbid in match mtb.mod_type with | MoreFunctor _ -> id (* functorial param : nothing to register *) | NoFunctor struc -> (* We first try to use the algebraic type expression if any, via a Declaremods function that converts back to module entries *) try let () = process_module_binding mbid (get_typ_expr_alg mtb) in id with e when CErrors.noncritical e -> (* Otherwise, we try to play with the nametab ourselves *) let mp = MPbound mbid in let check id = Nametab.exists_module (DirPath.make [id]) in let id = Namegen.next_ident_away_from id check in let dir = DirPath.make [id] in nametab_register_dir mp; List.iter (nametab_register_body mp dir) struc; id let print_body is_impl extent env mp (l,body) = let name = Label.print l in hov 2 (match body with | SFBmodule _ -> keyword "Module" ++ spc () ++ name | SFBmodtype _ -> keyword "Module Type" ++ spc () ++ name | SFBconst cb -> let ctx = Declareops.constant_polymorphic_context cb in (match cb.const_body with | Def _ -> def "Definition" ++ spc () | OpaqueDef _ when is_impl -> def "Theorem" ++ spc () | _ -> def "Parameter" ++ spc ()) ++ name ++ (match extent with | OnlyNames -> mt () | WithContents -> let bl = Printer.universe_binders_with_opt_names ctx None in let sigma = Evd.from_ctx (UState.of_binders bl) in str " :" ++ spc () ++ hov 0 (Printer.pr_ltype_env env sigma cb.const_type) ++ (match cb.const_body with | Def l when is_impl -> spc () ++ hov 2 (str ":= " ++ Printer.pr_lconstr_env env sigma l) | _ -> mt ()) ++ str "." ++ Printer.pr_abstract_universe_ctx sigma ctx) | SFBmind mib -> match extent with | WithContents -> pr_mutual_inductive_body env (MutInd.make2 mp l) mib None | OnlyNames -> let keyword = let open Declarations in match mib.mind_finite with | Finite -> def "Inductive" | BiFinite -> def "Variant" | CoFinite -> def "CoInductive" in keyword ++ spc () ++ name) let print_struct is_impl extent env mp struc = prlist_with_sep spc (print_body is_impl extent env mp) struc let print_structure is_type extent env mp locals struc = let env' = Modops.add_structure mp struc Mod_subst.empty_delta_resolver env in nametab_register_module_body mp struc; let kwd = if is_type then "Sig" else "Struct" in hv 2 (keyword kwd ++ spc () ++ print_struct false extent env' mp struc ++ brk (1,-2) ++ keyword "End") let rec flatten_app mexpr l = match mexpr with | MEapply (mexpr, arg) -> flatten_app mexpr (arg::l) | MEident mp -> mp::l | MEwith _ -> assert false let rec print_typ_expr extent env mp locals mty = match mty with | MEident kn -> print_kn locals kn | MEapply _ -> let lapp = flatten_app mty [] in let fapp = List.hd lapp in let mapp = List.tl lapp in hov 3 (str"(" ++ (print_kn locals fapp) ++ spc () ++ prlist_with_sep spc (print_modpath locals) mapp ++ str")") | MEwith(me,WithDef(idl,(c, _)))-> let s = String.concat "." (List.map Id.to_string idl) in let body = match extent with | WithContents -> let sigma = Evd.from_env env in spc() ++ str ":=" ++ spc() ++ Printer.pr_lconstr_env env sigma c | OnlyNames -> mt() in hov 2 (print_typ_expr extent env mp locals me ++ spc() ++ str "with" ++ spc() ++ def "Definition"++ spc() ++ str s ++ body) | MEwith(me,WithMod(idl,mp'))-> let s = String.concat "." (List.map Id.to_string idl) in let body = match extent with | WithContents -> spc() ++ str ":="++ spc() ++ print_modpath locals mp' | OnlyNames -> mt () in hov 2 (print_typ_expr extent env mp locals me ++ spc() ++ str "with" ++ spc() ++ keyword "Module"++ spc() ++ str s ++ body) let print_mod_expr env mp locals = function | MEident mp -> print_modpath locals mp | MEapply _ as me -> let lapp = flatten_app me [] in hov 3 (str"(" ++ prlist_with_sep spc (print_modpath locals) lapp ++ str")") | MEwith _ -> assert false (* No 'with' syntax for modules *) let rec print_functor fty fatom is_type extent env mp locals = function | NoFunctor me -> fatom is_type extent env mp locals me | MoreFunctor (mbid,mtb1,me2) -> let id = nametab_register_modparam mbid mtb1 in let mp1 = MPbound mbid in let pr_mtb1 = fty extent env mp1 locals mtb1 in let env' = Modops.add_module_type mp1 mtb1 env in let locals' = (mbid, get_new_id locals (MBId.to_id mbid))::locals in let kwd = if is_type then "Funsig" else "Functor" in hov 2 (keyword kwd ++ spc () ++ str "(" ++ Id.print id ++ str ":" ++ pr_mtb1 ++ str ")" ++ spc() ++ print_functor fty fatom is_type extent env' mp locals' me2) let rec print_expression x = print_functor print_modtype (function true -> print_typ_expr | false -> fun _ -> print_mod_expr) x and print_signature x = print_functor print_modtype print_structure x and print_modtype extent env mp locals mtb = match mtb.mod_type_alg with | Some me -> print_expression true extent env mp locals me | None -> print_signature true extent env mp locals mtb.mod_type (** Since we might play with nametab above, we should reset to prior state after the printing *) let print_expression' is_type extent env mp me = Vernacstate.System.protect (fun e -> print_expression is_type extent env mp [] e) me let print_signature' is_type extent env mp me = Vernacstate.System.protect (fun e -> print_signature is_type extent env mp [] e) me let unsafe_print_module extent env mp with_body mb = let name = print_modpath [] mp in let pr_equals = spc () ++ str ":= " in let body = match with_body, mb.mod_expr with | false, _ | true, Abstract -> mt() | _, Algebraic me -> pr_equals ++ print_expression' false extent env mp me | _, Struct sign -> pr_equals ++ print_signature' false extent env mp sign | _, FullStruct -> pr_equals ++ print_signature' false extent env mp mb.mod_type in let modtype = match mb.mod_expr, mb.mod_type_alg with | FullStruct, _ -> mt () | _, Some ty -> brk (1,1) ++ str": " ++ print_expression' true extent env mp ty | _, _ -> brk (1,1) ++ str": " ++ print_signature' true extent env mp mb.mod_type in hv 0 (keyword "Module" ++ spc () ++ name ++ modtype ++ body) exception ShortPrinting let print_module ~with_body mp = let me = Global.lookup_module mp in try if !short then raise ShortPrinting; unsafe_print_module WithContents (Global.env ()) mp with_body me ++ fnl () with e when CErrors.noncritical e -> unsafe_print_module OnlyNames (Global.env ()) mp with_body me ++ fnl () let print_modtype kn = let mtb = Global.lookup_modtype kn in let name = print_kn [] kn in hv 1 (keyword "Module Type" ++ spc () ++ name ++ str " =" ++ spc () ++ try if !short then raise ShortPrinting; print_signature' true WithContents (Global.env ()) kn mtb.mod_type with e when CErrors.noncritical e -> print_signature' true OnlyNames (Global.env ()) kn mtb.mod_type) coq-8.15.0/vernac/printmod.mli000066400000000000000000000016321417001151100161750ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* MutInd.t -> Declarations.mutual_inductive_body -> UnivNames.univ_name_list option -> Pp.t val print_module : with_body:bool -> ModPath.t -> Pp.t val print_modtype : ModPath.t -> Pp.t coq-8.15.0/vernac/proof_using.ml000066400000000000000000000207711417001151100165270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let vb = match decl with | LocalAssum _ -> Id.Set.empty | LocalDef (_,b,_) -> Termops.global_vars_set env sigma b in let vty = Termops.global_vars_set env sigma (NamedDecl.get_type decl) in let vbty = Id.Set.union vb vty in if Id.Set.exists (fun v -> Id.Set.mem v s) vbty then Id.Set.add (NamedDecl.get_id decl) (Id.Set.union s vbty) else s) s (EConstr.named_context env) in if Id.Set.equal s s' then s else close_fwd env sigma s' let set_of_type env sigma ty = List.fold_left (fun acc ty -> Id.Set.union (Termops.global_vars_set env sigma ty) acc) Id.Set.empty ty let full_set env = List.fold_right Id.Set.add (List.map NamedDecl.get_id (named_context env)) Id.Set.empty let warn_all_collection_precedence = CWarnings.create ~name:"all-collection-precedence" ~category:"deprecated" Pp.(fun () -> str "Variable " ++ Id.print all_collection_id ++ str " is shadowed by Collection named " ++ Id.print all_collection_id ++ str " containing all variables.") let warn_collection_precedence = CWarnings.create ~name:"collection-precedence" ~category:"deprecated" Pp.(fun id -> Id.print id ++ str " is both name of a Collection and Variable, Collection " ++ Id.print id ++ str " takes precedence over Variable.") let warn_redefine_collection = CWarnings.create ~name:"collection-redefinition" ~category:"deprecated" Pp.(fun id -> str "New Collection definition of " ++ Id.print id ++ str " shadows the previous one.") let warn_variable_shadowing = CWarnings.create ~name:"variable-shadowing" ~category:"deprecated" Pp.(fun id -> Id.print id ++ str " was already a defined Variable, the name " ++ Id.print id ++ str " will refer to Collection when executing \"Proof using\" command.") let err_redefine_all_collection () = CErrors.user_err Pp.(str "\"" ++ Id.print all_collection_id ++ str "\" is a predefined collection containing all variables. It can't be redefined.") let process_expr env sigma e v_ty = let variable_exists id = try ignore (lookup_named id env); true with | Not_found -> false in let rec aux = function | SsEmpty -> Id.Set.empty | SsType -> v_ty | SsSingl { CAst.v = id } -> set_of_id id | SsUnion(e1,e2) -> Id.Set.union (aux e1) (aux e2) | SsSubstr(e1,e2) -> Id.Set.diff (aux e1) (aux e2) | SsCompl e -> Id.Set.diff (full_set env) (aux e) | SsFwdClose e -> close_fwd env sigma (aux e) and set_of_id id = if Id.equal id all_collection_id then begin if variable_exists all_collection_id then warn_all_collection_precedence (); full_set env end else if is_known_name id then begin if variable_exists id then warn_collection_precedence id; aux (CList.assoc_f Id.equal id !known_names) end else Id.Set.singleton id in aux e let process_expr env sigma e ty = let v_ty = set_of_type env sigma ty in let s = Id.Set.union v_ty (process_expr env sigma e v_ty) in Id.Set.elements s type t = Names.Id.Set.t let definition_using env evd ~using ~terms = let l = process_expr env evd using terms in Names.Id.Set.(List.fold_right add l empty) let name_set id expr = if Id.equal id all_collection_id then err_redefine_all_collection (); if is_known_name id then warn_redefine_collection id; if Termops.is_section_variable (Global.env ()) id then warn_variable_shadowing id; known_names := (id,expr) :: !known_names let minimize_hyps env ids = let rec aux ids = let ids' = Id.Set.fold (fun id alive -> let impl_by_id = Id.Set.remove id (really_needed env (Id.Set.singleton id)) in if Id.Set.is_empty impl_by_id then alive else Id.Set.diff alive impl_by_id) ids ids in if Id.Set.equal ids ids' then ids else aux ids' in aux ids let remove_ids_and_lets env s ids = let not_ids id = not (Id.Set.mem id ids) in let no_body id = named_body id env = None in let deps id = really_needed env (Id.Set.singleton id) in (Id.Set.filter (fun id -> not_ids id && (no_body id || Id.Set.exists not_ids (Id.Set.filter no_body (deps id)))) s) let record_proof_using expr = Aux_file.record_in_aux "suggest_proof_using" expr let debug_proof_using = CDebug.create ~name:"proof-using" () (* Variables in [skip] come from after the definition, so don't count for "All". Used in the variable case since the env contains the variable itself. *) let suggest_common env ppid used ids_typ skip = let module S = Id.Set in let open Pp in let pr_set parens s = let wrap ppcmds = if parens && S.cardinal s > 1 then str "(" ++ ppcmds ++ str ")" else ppcmds in wrap (prlist_with_sep (fun _ -> str" ") Id.print (S.elements s)) in let needed = minimize_hyps env (remove_ids_and_lets env used ids_typ) in let all_needed = really_needed env needed in let all = List.fold_left (fun all d -> S.add (NamedDecl.get_id d) all) S.empty (named_context env) in let all = S.diff all skip in let fwd_typ = close_fwd env (Evd.from_env env) ids_typ in let () = debug_proof_using (fun () -> str "All " ++ pr_set false all ++ fnl() ++ str "Type " ++ pr_set false ids_typ ++ fnl() ++ str "needed " ++ pr_set false needed ++ fnl() ++ str "all_needed " ++ pr_set false all_needed ++ fnl() ++ str "Type* " ++ pr_set false fwd_typ) in let valid_exprs = ref [] in let valid e = valid_exprs := e :: !valid_exprs in if S.is_empty needed then valid (str "Type"); if S.equal all_needed fwd_typ then valid (str "Type*"); if S.equal all all_needed then valid(str "All"); valid (pr_set false needed); Feedback.msg_info ( str"The proof of "++ ppid ++ spc() ++ str "should start with one of the following commands:"++spc()++ v 0 ( prlist_with_sep cut (fun x->str"Proof using " ++x++ str". ") !valid_exprs)); if Aux_file.recording () then let s = string_of_ppcmds (prlist_with_sep (fun _ -> str";") (fun x->x) !valid_exprs) in record_proof_using s let suggest_proof_using = ref false let () = Goptions.(declare_bool_option { optdepr = false; optkey = ["Suggest";"Proof";"Using"]; optread = (fun () -> !suggest_proof_using); optwrite = ((:=) suggest_proof_using) }) let suggest_constant env kn = if !suggest_proof_using then begin let open Declarations in let body = lookup_constant kn env in let used = Id.Set.of_list @@ List.map NamedDecl.get_id body.const_hyps in let ids_typ = global_vars_set env body.const_type in suggest_common env (Printer.pr_constant env kn) used ids_typ Id.Set.empty end let suggest_variable env id = if !suggest_proof_using then begin match lookup_named id env with | LocalDef (_,body,typ) -> let ids_typ = global_vars_set env typ in let ids_body = global_vars_set env body in let used = Id.Set.union ids_body ids_typ in suggest_common env (Id.print id) used ids_typ (Id.Set.singleton id) | LocalAssum _ -> assert false end let value = ref None let using_to_string us = Pp.string_of_ppcmds (Ppvernac.pr_using us) let using_from_string us = Pcoq.Entry.parse G_vernac.section_subset_expr (Pcoq.Parsable.make (Stream.of_string us)) let proof_using_opt_name = ["Default";"Proof";"Using"] let () = Goptions.(declare_stringopt_option { optdepr = false; optkey = proof_using_opt_name; optread = (fun () -> Option.map using_to_string !value); optwrite = (fun b -> value := Option.map using_from_string b); }) let get_default_proof_using () = !value coq-8.15.0/vernac/proof_using.mli000066400000000000000000000026501417001151100166740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Evd.evar_map -> using:Vernacexpr.section_subset_expr -> terms:EConstr.constr list -> t val name_set : Names.Id.t -> Vernacexpr.section_subset_expr -> unit val suggest_constant : Environ.env -> Names.Constant.t -> unit val suggest_variable : Environ.env -> Names.Id.t -> unit val get_default_proof_using : unit -> Vernacexpr.section_subset_expr option val proof_using_opt_name : string list (** For the stm *) val using_from_string : string -> Vernacexpr.section_subset_expr coq-8.15.0/vernac/pvernac.ml000066400000000000000000000056611417001151100156340ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* CErrors.anomaly Pp.(str "proof mode not found: " ++ str ename) in let lookup_proof_mode name = if Hashtbl.mem proof_mode name then Some name else None in register_proof_mode, find_proof_mode, lookup_proof_mode let proof_mode_to_string name = name let command_entry_ref = ref None module Vernac_ = struct (* The different kinds of vernacular commands *) let gallina = Entry.create "gallina" let gallina_ext = Entry.create "gallina_ext" let command = Entry.create "command" let syntax = Entry.create "syntax_command" let vernac_control = Entry.create "Vernac.vernac_control" let inductive_definition = Entry.create "Vernac.inductive_definition" let fix_definition = Entry.create "Vernac.fix_definition" let red_expr = Entry.create "red_expr" let hint_info = Entry.create "hint_info" (* Main vernac entry *) let main_entry = Entry.create "vernac" let noedit_mode = Entry.create "noedit_command" let () = let act_vernac v loc = Some v in let act_eoi _ loc = None in let rule = [ Pcoq.(Production.make (Rule.next Rule.stop (Symbol.token Tok.PEOI)) act_eoi); Pcoq.(Production.make (Rule.next Rule.stop (Symbol.nterm vernac_control)) act_vernac); ] in Pcoq.(grammar_extend main_entry (Fresh (Gramlib.Gramext.First, [None, None, rule]))) let select_tactic_entry spec = match spec with | None -> noedit_mode | Some ename -> find_proof_mode ename let command_entry = Pcoq.Entry.(of_parser "command_entry" { parser_fun = (fun strm -> Pcoq.Entry.parse_token_stream (select_tactic_entry !command_entry_ref) strm) }) end module Unsafe = struct let set_tactic_entry oname = command_entry_ref := oname end let main_entry proof_mode = Unsafe.set_tactic_entry proof_mode; Vernac_.main_entry let () = register_grammar Genredexpr.wit_red_expr (Vernac_.red_expr); coq-8.15.0/vernac/pvernac.mli000066400000000000000000000042121417001151100157740ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* unit end (** The main entry: reads an optional vernac command *) val main_entry : proof_mode option -> vernac_control option Entry.t (** Grammar entry for tactics: proof mode(s). By default Coq's grammar has an empty entry (non-terminal) for tactics. A plugin can register its non-terminal by providing a name and a grammar entry. For example the Ltac plugin register the "Classic" grammar entry for parsing its tactics. *) val register_proof_mode : string -> Vernacexpr.vernac_expr Entry.t -> proof_mode val lookup_proof_mode : string -> proof_mode option val proof_mode_to_string : proof_mode -> string coq-8.15.0/vernac/recLemmas.ml000066400000000000000000000117131417001151100161010ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let typ = Declare.CInfo.get_typ x in let (hyps,ccl) = EConstr.decompose_prod_assum sigma typ in let whnf_hyp_hds = EConstr.map_rel_context_in_env (fun env c -> fst (Reductionops.whd_all_stack env sigma c)) (Global.env()) hyps in let ind_hyps = List.flatten (List.map_i (fun i decl -> let t = RelDecl.get_type decl in match EConstr.kind sigma t with | Ind ((kn,_ as ind),u) when let mind = Global.lookup_mind kn in mind.mind_finite <> Declarations.CoFinite -> [ind,x,i] | _ -> []) 0 (List.rev (List.filter Context.Rel.Declaration.is_local_assum whnf_hyp_hds))) in let ind_ccl = let cclenv = EConstr.push_rel_context hyps (Global.env()) in let whnf_ccl,_ = Reductionops.whd_all_stack cclenv Evd.empty ccl in match EConstr.kind sigma whnf_ccl with | Ind ((kn,_ as ind),u) when let mind = Global.lookup_mind kn in Int.equal mind.mind_ntypes n && mind.mind_finite == Declarations.CoFinite -> [ind,x,0] | _ -> [] in ind_hyps,ind_ccl) thms in let inds_hyps,ind_ccls = List.split inds in let of_same_mutind ((kn,_),_,_) = function ((kn',_),_,_) -> Environ.QMutInd.equal (Global.env ()) kn kn' in (* Check if all conclusions are coinductive in the same type *) (* (degenerated cartesian product since there is at most one coind ccl) *) let same_indccl = List.cartesians_filter (fun hyp oks -> if List.for_all (of_same_mutind hyp) oks then Some (hyp::oks) else None) [] ind_ccls in let ordered_same_indccl = List.filter (List.for_all_i (fun i ((kn,j),_,_) -> Int.equal i j) 0) same_indccl in (* Check if some hypotheses are inductive in the same type *) let common_same_indhyp = List.cartesians_filter (fun hyp oks -> if List.for_all (of_same_mutind hyp) oks then Some (hyp::oks) else None) [] inds_hyps in let ordered_inds,finite,guard = match ordered_same_indccl, common_same_indhyp with | indccl::rest, _ -> assert (List.is_empty rest); (* One occ. of common coind ccls and no common inductive hyps *) if not (List.is_empty common_same_indhyp) then Flags.if_verbose Feedback.msg_info (Pp.str "Assuming mutual coinductive statements."); flush_all (); indccl, true, [] | [], _::_ -> let () = match same_indccl with | ind :: _ -> if List.distinct_f Names.Ind.CanOrd.compare (List.map pi1 ind) then Flags.if_verbose Feedback.msg_info (Pp.strbrk ("Coinductive statements do not follow the order of "^ "definition, assuming the proof to be by induction.")); flush_all () | _ -> () in let possible_guards = List.map (List.map pi3) inds_hyps in (* assume the largest indices as possible *) List.last common_same_indhyp, false, possible_guards | _, [] -> CErrors.user_err Pp.(str ("Cannot find common (mutual) inductive premises or coinductive" ^ " conclusions in the statements.")) in (finite,guard,None), ordered_inds type mutual_info = | NonMutual of EConstr.t Declare.CInfo.t | Mutual of { mutual_info : Declare.Proof.mutual_info ; cinfo : EConstr.t Declare.CInfo.t list ; possible_guards : int list } let look_for_possibly_mutual_statements sigma thms : mutual_info = match thms with | [thm] -> (* One non recursively proved theorem *) NonMutual thm | _::_ as thms -> (* More than one statement and/or an explicit decreasing mark: *) (* we look for a common inductive hyp or a common coinductive conclusion *) let recguard,ordered_inds = find_mutually_recursive_statements sigma thms in let cinfo = List.map pi2 ordered_inds in Mutual { mutual_info = recguard; cinfo; possible_guards = List.map (fun (_,_,i) -> succ i) ordered_inds } | [] -> CErrors.anomaly (Pp.str "Empty list of theorems.") coq-8.15.0/vernac/recLemmas.mli000066400000000000000000000017521417001151100162540ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* EConstr.t Declare.CInfo.t list -> mutual_info coq-8.15.0/vernac/record.ml000066400000000000000000001117231417001151100154510ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let sigma, (i, b, t), impl = match d with | Vernacexpr.AssumExpr({CAst.loc;v=id},bl,t) -> (* Temporary compatibility with the type-classes heuristics *) (* which are applied after the interpretation of bl and *) (* before the one of t otherwise (see #13166) *) let t = if bl = [] then t else mkCProdN bl t in let sigma, t, impl = ComAssumption.interp_assumption ~program_mode:false env sigma impls_env [] t in sigma, (id, None, t), impl | Vernacexpr.DefExpr({CAst.loc;v=id},bl,b,t) -> let sigma, (b, t), impl = ComDefinition.interp_definition ~program_mode:false env sigma impls_env bl None b t in let t = match t with Some t -> t | None -> Retyping.get_type_of env sigma b in sigma, (id, Some b, t), impl in let r = Retyping.relevance_of_type env sigma t in let impls_env = match i with | Anonymous -> impls_env | Name id -> Id.Map.add id (Constrintern.compute_internalization_data env sigma id Constrintern.Method t impl) impls_env in let d = match b with | None -> LocalAssum (make_annot i r,t) | Some b -> LocalDef (make_annot i r,b,t) in List.iter (Metasyntax.set_notation_for_interpretation env impls_env) no; (EConstr.push_rel d env, sigma, impl :: uimpls, d::params, impls_env)) (env, sigma, [], [], impls_env) nots l in let _, _, sigma = Context.Rel.fold_outside ~init:(env,0,sigma) (fun f (env,k,sigma) -> let sigma = RelDecl.fold_constr (fun c sigma -> ComInductive.maybe_unify_params_in env sigma ~ninds ~nparams ~binders:k c) f sigma in EConstr.push_rel f env, k+1, sigma) newfs in sigma, (impls, newfs) let compute_constructor_level evars env l = List.fold_right (fun d (env, univ) -> let univ = if is_local_assum d then let s = Retyping.get_sort_of env evars (RelDecl.get_type d) in Univ.sup (Sorts.univ_of_sort s) univ else univ in (EConstr.push_rel d env, univ)) l (env, Univ.Universe.sprop) let check_anonymous_type ind = match ind with | { CAst.v = CSort (Glob_term.UAnonymous {rigid=true}) } -> true | _ -> false let error_parameters_must_be_named bk {CAst.loc; v=name} = match bk, name with | Default _, Anonymous -> CErrors.user_err ?loc (str "Record parameters must be named.") | _ -> () let check_parameters_must_be_named = function | CLocalDef (b, _, _) -> error_parameters_must_be_named default_binder_kind b | CLocalAssum (ls, bk, ce) -> List.iter (error_parameters_must_be_named bk) ls | CLocalPattern {CAst.loc} -> Loc.raise ?loc (Stream.Error "pattern with quote not allowed in record parameters") (** [DataI.t] contains the information used in record interpretation, it is a strict subset of [Ast.t] thus this should be eventually removed or merged with [Ast.t] *) module DataI = struct type t = { name : Id.t ; arity : Constrexpr.constr_expr option (** declared sort for the record *) ; nots : Metasyntax.where_decl_notation list list (** notations for fields *) ; fs : Vernacexpr.local_decl_expr list } end type projection_flags = { pf_subclass: bool; pf_canonical: bool; } (** [DataR.t] contains record data after interpretation / type-inference *) module DataR = struct type t = { min_univ : Univ.Universe.t ; arity : Constr.t ; implfs : Impargs.manual_implicits list ; fields : Constr.rel_declaration list } end module Data = struct type t = { id : Id.t ; idbuild : Id.t ; is_coercion : bool ; coers : projection_flags list ; rdata : DataR.t } end let build_type_telescope newps env0 (sigma, template) { DataI.arity; _ } = match arity with | None -> let uvarkind = Evd.univ_flexible_alg in let sigma, s = Evd.new_sort_variable uvarkind sigma in (sigma, template), (EConstr.mkSort s, s) | Some t -> let env = EConstr.push_rel_context newps env0 in let poly = match t with | { CAst.v = CSort (Glob_term.UAnonymous {rigid=true}) } -> true | _ -> false in let impls = Constrintern.empty_internalization_env in let sigma, s = Constrintern.interp_type_evars ~program_mode:false env sigma ~impls t in let sred = Reductionops.whd_allnolet env sigma s in (match EConstr.kind sigma sred with | Sort s' -> let s' = EConstr.ESorts.kind sigma s' in (if poly then match Evd.is_sort_variable sigma s' with | Some l -> let sigma = Evd.make_flexible_variable sigma ~algebraic:true l in (sigma, template), (s, s') | None -> (sigma, false), (s, s') else (sigma, false), (s, s')) | _ -> user_err ?loc:(constr_loc t) (str"Sort expected.")) type tc_result = bool * Impargs.manual_implicits (* Part relative to closing the definitions *) * UState.named_universes_entry * Entries.variance_entry * Constr.rel_context * DataR.t list (* ps = parameter list *) let typecheck_params_and_fields def poly udecl ps (records : DataI.t list) : tc_result = let env0 = Global.env () in (* Special case elaboration for template-polymorphic inductives, lower bound on introduced universes is Prop so that we do not miss any Set <= i constraint for universes that might actually be instantiated with Prop. *) let is_template = List.exists (fun { DataI.arity; _} -> Option.cata check_anonymous_type true arity) records in let env0 = if not poly && is_template then Environ.set_universes_lbound env0 UGraph.Bound.Prop else env0 in let sigma, decl, variances = Constrintern.interp_cumul_univ_decl_opt env0 udecl in let () = List.iter check_parameters_must_be_named ps in let sigma, (impls_env, ((env1,newps), imps)) = Constrintern.interp_context_evars ~program_mode:false env0 sigma ps in let (sigma, template), typs = List.fold_left_map (build_type_telescope newps env0) (sigma, true) records in let arities = List.map (fun (typ, _) -> EConstr.it_mkProd_or_LetIn typ newps) typs in let relevances = List.map (fun (_,s) -> Sorts.relevance_of_sort s) typs in let fold accu { DataI.name; _ } arity r = EConstr.push_rel (LocalAssum (make_annot (Name name) r,arity)) accu in let env_ar = EConstr.push_rel_context newps (List.fold_left3 fold env0 records arities relevances) in let impls_env = let ids = List.map (fun { DataI.name; _ } -> name) records in let imps = List.map (fun _ -> imps) arities in Constrintern.compute_internalization_env env0 sigma ~impls:impls_env Constrintern.Inductive ids arities imps in let ninds = List.length arities in let nparams = List.length newps in let fold sigma { DataI.nots; fs; _ } arity = interp_fields_evars env_ar sigma ~ninds ~nparams impls_env nots fs in let (sigma, data) = List.fold_left2_map fold sigma records arities in let sigma = Pretyping.solve_remaining_evars Pretyping.all_and_fail_flags env_ar sigma in let fold sigma (typ, sort) (_, newfs) = let _, univ = compute_constructor_level sigma env_ar newfs in let univ = if Sorts.is_sprop sort then univ else Univ.Universe.sup univ Univ.type0m_univ in if not def && is_impredicative_sort env0 sort then sigma, (univ, typ) else let sigma = Evd.set_leq_sort env_ar sigma (Sorts.sort_of_univ univ) sort in if Univ.is_small_univ univ && Option.cata (Evd.is_flexible_level sigma) false (Evd.is_sort_variable sigma sort) then (* We can assume that the level in aritysort is not constrained and clear it, if it is flexible *) Evd.set_eq_sort env_ar sigma Sorts.set sort, (univ, EConstr.mkSort (Sorts.sort_of_univ univ)) else sigma, (univ, typ) in let (sigma, typs) = List.fold_left2_map fold sigma typs data in (* TODO: Have this use Declaredef.prepare_definition *) let sigma, (newps, ans) = Evarutil.finalize sigma (fun nf -> let newps = List.map (RelDecl.map_constr_het nf) newps in let map (implfs, fields) (min_univ, typ) = let fields = List.map (RelDecl.map_constr_het nf) fields in let arity = nf typ in { DataR.min_univ; arity; implfs; fields } in let ans = List.map2 map data typs in newps, ans) in let univs = Evd.check_univ_decl ~poly sigma decl in let ce t = Pretyping.check_evars env0 sigma (EConstr.of_constr t) in let () = List.iter (iter_constr ce) (List.rev newps) in template, imps, univs, variances, newps, ans type record_error = | MissingProj of Id.t * Id.t list | BadTypedProj of Id.t * env * Type_errors.type_error let warn_cannot_define_projection = CWarnings.create ~name:"cannot-define-projection" ~category:"records" (fun msg -> hov 0 msg) (* If a projection is not definable, we throw an error if the user asked it to be a coercion. Otherwise, we just print an info message. The user might still want to name the field of the record. *) let warning_or_error ~info coe indsp err = let st = match err with | MissingProj (fi,projs) -> let s,have = if List.length projs > 1 then "s","were" else "","was" in (Id.print fi ++ strbrk" cannot be defined because the projection" ++ str s ++ spc () ++ prlist_with_sep pr_comma Id.print projs ++ spc () ++ str have ++ strbrk " not defined.") | BadTypedProj (fi,ctx,te) -> match te with | ElimArity (_,_,_,Some (_,_,_,NonInformativeToInformative)) -> (Id.print fi ++ strbrk" cannot be defined because it is informative and " ++ Printer.pr_inductive (Global.env()) indsp ++ strbrk " is not.") | ElimArity (_,_,_,Some (_,_,_,StrongEliminationOnNonSmallType)) -> (Id.print fi ++ strbrk" cannot be defined because it is large and " ++ Printer.pr_inductive (Global.env()) indsp ++ strbrk " is not.") | _ -> (Id.print fi ++ strbrk " cannot be defined because it is not typable.") in if coe then user_err ~info st; warn_cannot_define_projection (hov 0 st) type field_status = | NoProjection of Name.t | Projection of constr exception NotDefinable of record_error (* This replaces previous projection bodies in current projection *) (* Undefined projs are collected and, at least one undefined proj occurs *) (* in the body of current projection then the latter can not be defined *) (* [c] is defined in ctxt [[params;fields]] and [l] is an instance of *) (* [[fields]] defined in ctxt [[params;x:ind]] *) let subst_projection fid l c = let lv = List.length l in let bad_projs = ref [] in let rec substrec depth c = match Constr.kind c with | Rel k -> (* We are in context [[params;fields;x:ind;...depth...]] *) if k <= depth+1 then c else if k-depth-1 <= lv then match List.nth l (k-depth-2) with | Projection t -> lift depth t | NoProjection (Name id) -> bad_projs := id :: !bad_projs; mkRel k | NoProjection Anonymous -> user_err (str "Field " ++ Id.print fid ++ str " depends on the " ++ pr_nth (k-depth-1) ++ str " field which has no name.") else mkRel (k-lv) | _ -> Constr.map_with_binders succ substrec depth c in let c' = lift 1 c in (* to get [c] defined in ctxt [[params;fields;x:ind]] *) let c'' = substrec 0 c' in if not (List.is_empty !bad_projs) then raise (NotDefinable (MissingProj (fid,List.rev !bad_projs))); c'' let instantiate_possibly_recursive_type ind u ntypes paramdecls fields = let subst = List.map_i (fun i _ -> mkRel i) 1 paramdecls in let subst' = List.init ntypes (fun i -> mkIndU ((ind, ntypes - i - 1), u)) in Vars.substl_rel_context (subst @ subst') fields (* We build projections *) (* TODO: refactor the declaration part here; this requires some surgery as Evarutil.finalize is called too early in the path *) (** This builds and _declares_ a named projection, the code looks tricky due to the term manipulation. It also handles declaring the implicits parameters, coercion status, etc... of the projection; this could be refactored as noted above by moving to the higher-level declare constant API *) let build_named_proj ~primitive ~flags ~poly ~univs ~uinstance ~kind env paramdecls paramargs decl impls fid subst nfi ti i indsp mib lifted_fields x rp = let ccl = subst_projection fid subst ti in let body, p_opt = match decl with | LocalDef (_,ci,_) -> subst_projection fid subst ci, None | LocalAssum ({binder_relevance=rci},_) -> (* [ccl] is defined in context [params;x:rp] *) (* [ccl'] is defined in context [params;x:rp;x:rp] *) if primitive then let p = Projection.Repr.make indsp ~proj_npars:mib.mind_nparams ~proj_arg:i (Label.of_id fid) in mkProj (Projection.make p true, mkRel 1), Some p else let ccl' = liftn 1 2 ccl in let p = mkLambda (x, lift 1 rp, ccl') in let branch = it_mkLambda_or_LetIn (mkRel nfi) lifted_fields in let ci = Inductiveops.make_case_info env indsp rci LetStyle in (* Record projections are always NoInvert because they're at constant relevance *) mkCase (Inductive.contract_case env (ci, p, NoInvert, mkRel 1, [|branch|])), None in let proj = it_mkLambda_or_LetIn (mkLambda (x,rp,body)) paramdecls in let projtyp = it_mkProd_or_LetIn (mkProd (x,rp,ccl)) paramdecls in let univs = match fst univs with | Entries.Monomorphic_entry -> UState.Monomorphic_entry Univ.ContextSet.empty, snd univs | Entries.Polymorphic_entry uctx -> UState.Polymorphic_entry uctx, snd univs in let entry = Declare.definition_entry ~univs ~types:projtyp proj in let kind = Decls.IsDefinition kind in let kn = try Declare.declare_constant ~name:fid ~kind (Declare.DefinitionEntry entry) with Type_errors.TypeError (ctx,te) as exn when not primitive -> let _, info = Exninfo.capture exn in Exninfo.iraise (NotDefinable (BadTypedProj (fid,ctx,te)),info) in Declare.definition_message fid; let term = match p_opt with | Some p -> let _ = DeclareInd.declare_primitive_projection p kn in mkProj (Projection.make p false,mkRel 1) | None -> let proj_args = (*Rel 1 refers to "x"*) paramargs@[mkRel 1] in match decl with | LocalDef (_,ci,_) when primitive -> body | _ -> applist (mkConstU (kn,uinstance),proj_args) in let refi = GlobRef.ConstRef kn in Impargs.maybe_declare_manual_implicits false refi impls; if flags.pf_subclass then begin let cl = ComCoercion.class_of_global (GlobRef.IndRef indsp) in ComCoercion.try_add_new_coercion_with_source refi ~local:false ~poly ~source:cl end; let i = if is_local_assum decl then i+1 else i in (Some kn, i, Projection term::subst) (** [build_proj] will build a projection for each field, or skip if the field is anonymous, i.e. [_ : t] *) let build_proj env mib indsp primitive x rp lifted_fields ~poly paramdecls paramargs ~uinstance ~kind ~univs (nfi,i,kinds,subst) flags decl impls = let fi = RelDecl.get_name decl in let ti = RelDecl.get_type decl in let (sp_proj,i,subst) = match fi with | Anonymous -> (None,i,NoProjection fi::subst) | Name fid -> try build_named_proj ~primitive ~flags ~poly ~univs ~uinstance ~kind env paramdecls paramargs decl impls fid subst nfi ti i indsp mib lifted_fields x rp with NotDefinable why as exn -> let _, info = Exninfo.capture exn in warning_or_error ~info flags.pf_subclass indsp why; (None,i,NoProjection fi::subst) in (nfi - 1, i, { Structure.proj_name = fi ; proj_true = is_local_assum decl ; proj_canonical = flags.pf_canonical ; proj_body = sp_proj } :: kinds , subst) (** [declare_projections] prepares the common context for all record projections and then calls [build_proj] for each one. *) let declare_projections indsp univs ?(kind=Decls.StructureComponent) binder_name flags fieldimpls fields = let env = Global.env() in let (mib,mip) = Global.lookup_inductive indsp in let poly = Declareops.inductive_is_polymorphic mib in let uinstance = match fst univs with | Polymorphic_entry uctx -> Univ.UContext.instance uctx | Monomorphic_entry -> Univ.Instance.empty in let paramdecls = Inductive.inductive_paramdecls (mib, uinstance) in let r = mkIndU (indsp,uinstance) in let rp = applist (r, Context.Rel.instance_list mkRel 0 paramdecls) in let paramargs = Context.Rel.instance_list mkRel 1 paramdecls in (*def in [[params;x:rp]]*) let x = make_annot (Name binder_name) mip.mind_relevance in let fields = instantiate_possibly_recursive_type (fst indsp) uinstance mib.mind_ntypes paramdecls fields in let lifted_fields = Vars.lift_rel_context 1 fields in let primitive = match mib.mind_record with | PrimRecord _ -> true | FakeRecord | NotRecord -> false in let (_,_,canonical_projections,_) = List.fold_left3 (build_proj env mib indsp primitive x rp lifted_fields ~poly paramdecls paramargs ~uinstance ~kind ~univs) (List.length fields,0,[],[]) flags (List.rev fields) (List.rev fieldimpls) in List.rev canonical_projections open Typeclasses let check_template ~template ~poly ~univs ~params { Data.id; rdata = { DataR.min_univ; fields; _ }; _ } = let template_candidate () = (* we use some dummy values for the arities in the rel_context as univs_of_constr doesn't care about localassums and getting the real values is too annoying *) let add_levels c levels = Univ.Level.Set.union levels (Vars.universes_of_constr c) in let param_levels = List.fold_left (fun levels d -> match d with | LocalAssum _ -> levels | LocalDef (_,b,t) -> add_levels b (add_levels t levels)) Univ.Level.Set.empty params in let ctor_levels = List.fold_left (fun univs d -> let univs = RelDecl.fold_constr (fun c univs -> add_levels c univs) d univs in univs) param_levels fields in ComInductive.template_polymorphism_candidate ~ctor_levels univs params (Some (Sorts.sort_of_univ min_univ)) in match template with | Some template, _ -> (* templateness explicitly requested *) if poly && template then user_err Pp.(strbrk "template and polymorphism not compatible"); template | None, template -> (* auto detect template *) ComInductive.should_auto_template id (template && template_candidate ()) let load_structure i (_, structure) = Structure.register structure let cache_structure o = load_structure 1 o let subst_structure (subst, obj) = Structure.subst subst obj let discharge_structure (_, x) = Some x let rebuild_structure s = Structure.rebuild (Global.env()) s let inStruc : Structure.t -> Libobject.obj = let open Libobject in declare_object {(default_object "STRUCTURE") with cache_function = cache_structure; load_function = load_structure; subst_function = subst_structure; classify_function = (fun x -> Substitute x); discharge_function = discharge_structure; rebuild_function = rebuild_structure; } let declare_structure_entry o = Lib.add_anonymous_leaf (inStruc o) (** In the type of every projection, the record is bound to a variable named using the first character of the record type. We rename it to avoid collisions with names already used in the field types. *) (** Get all names bound at the head of [t]. *) let rec add_bound_names_constr (names : Id.Set.t) (t : constr) : Id.Set.t = match destProd t with | (b, _, t) -> let names = match b.binder_name with | Name.Anonymous -> names | Name.Name n -> Id.Set.add n names in add_bound_names_constr names t | exception DestKO -> names (** Get all names bound in any record field. *) let bound_names_rdata { DataR.fields; _ } : Id.Set.t = let add_names names field = add_bound_names_constr names (RelDecl.get_type field) in List.fold_left add_names Id.Set.empty fields (** Pick a variable name for a record, avoiding names bound in its fields. *) let data_name { Data.id; Data.rdata; _ } = let name = Id.of_string (Unicode.lowercase_first_char (Id.to_string id)) in Namegen.next_ident_away name (bound_names_rdata rdata) (** Main record declaration part: The entry point is [definition_structure], which will match on the declared [kind] and then either follow the regular record declaration path to [declare_structure] or handle the record as a class declaration with [declare_class]. *) (** [declare_structure] does two principal things: - prepares and declares the low-level (mutual) inductive corresponding to [record_data] - prepares and declares the corresponding record projections, mainly taken care of by [declare_projections] *) let declare_structure ~cumulative finite ~univs ~variances ~primitive_proj paramimpls params template ?(kind=Decls.StructureComponent) ?name (record_data : Data.t list) = let nparams = List.length params in let (univs, ubinders) = univs in let poly, projunivs = match univs with | UState.Monomorphic_entry _ -> false, Entries.Monomorphic_entry | UState.Polymorphic_entry uctx -> true, Entries.Polymorphic_entry uctx in let binder_name = match name with | None -> Array.map_of_list data_name record_data | Some n -> n in let ntypes = List.length record_data in let mk_block i { Data.id; idbuild; rdata = { DataR.min_univ; arity; fields; _ }; _ } = let nfields = List.length fields in let args = Context.Rel.instance_list mkRel nfields params in let ind = applist (mkRel (ntypes - i + nparams + nfields), args) in let type_constructor = it_mkProd_or_LetIn ind fields in { mind_entry_typename = id; mind_entry_arity = arity; mind_entry_consnames = [idbuild]; mind_entry_lc = [type_constructor] } in let blocks = List.mapi mk_block record_data in let template = List.for_all (check_template ~template ~univs ~poly ~params) record_data in let primitive = primitive_proj && List.for_all (fun { Data.rdata = { DataR.fields; _ }; _ } -> List.exists is_local_assum fields) record_data in let globnames, univs = match univs with | UState.Monomorphic_entry ctx -> if template then (univs, ubinders), Template_ind_entry ctx else let () = DeclareUctx.declare_universe_context ~poly:false ctx in (univs, ubinders), Monomorphic_ind_entry | UState.Polymorphic_entry ctx -> (univs, UnivNames.empty_binders), Polymorphic_ind_entry ctx in let variance = ComInductive.variance_of_entry ~cumulative ~variances univs in let mie = { mind_entry_params = params; mind_entry_record = Some (if primitive then Some binder_name else None); mind_entry_finite = finite; mind_entry_inds = blocks; mind_entry_private = None; mind_entry_universes = univs; mind_entry_variance = variance; } in let impls = List.map (fun _ -> paramimpls, []) record_data in let kn = DeclareInd.declare_mutual_inductive_with_eliminations mie globnames impls ~primitive_expected:primitive_proj in let map i { Data.is_coercion; coers; rdata = { DataR.implfs; fields; _}; _ } = let rsp = (kn, i) in (* This is ind path of idstruc *) let cstr = (rsp, 1) in let projections = declare_projections rsp (projunivs,ubinders) ~kind binder_name.(i) coers implfs fields in let build = GlobRef.ConstructRef cstr in let () = if is_coercion then ComCoercion.try_add_new_coercion build ~local:false ~poly in let struc = Structure.make (Global.env ()) rsp projections in let () = declare_structure_entry struc in rsp in List.mapi map record_data let implicits_of_context ctx = List.map (fun name -> CAst.make (Some (name,true))) (List.rev (Anonymous :: (List.map RelDecl.get_name ctx))) let build_class_constant ~univs ~rdata ~primitive_proj field implfs params paramimpls coers binder id proj_name = let class_body = it_mkLambda_or_LetIn field params in let class_type = it_mkProd_or_LetIn rdata.DataR.arity params in let class_entry = Declare.definition_entry ~types:class_type ~univs class_body in let cst = Declare.declare_constant ~name:id (Declare.DefinitionEntry class_entry) ~kind:Decls.(IsDefinition Definition) in let inst, univs = match univs with | UState.Monomorphic_entry _, ubinders -> Univ.Instance.empty, (UState.Monomorphic_entry Univ.ContextSet.empty, ubinders) | UState.Polymorphic_entry uctx, _ -> Univ.UContext.instance uctx, univs in let cstu = (cst, inst) in let inst_type = appvectc (mkConstU cstu) (Termops.rel_vect 0 (List.length params)) in let proj_type = it_mkProd_or_LetIn (mkProd(binder, inst_type, lift 1 field)) params in let proj_body = it_mkLambda_or_LetIn (mkLambda (binder, inst_type, mkRel 1)) params in let proj_entry = Declare.definition_entry ~types:proj_type ~univs proj_body in let proj_cst = Declare.declare_constant ~name:proj_name (Declare.DefinitionEntry proj_entry) ~kind:Decls.(IsDefinition Definition) in let cref = GlobRef.ConstRef cst in Impargs.declare_manual_implicits false cref paramimpls; Impargs.declare_manual_implicits false (GlobRef.ConstRef proj_cst) (List.hd implfs); Classes.set_typeclass_transparency ~locality:Hints.SuperGlobal [Tacred.EvalConstRef cst] false; let sub = List.hd coers in let m = { meth_name = Name proj_name; meth_info = sub; meth_const = Some proj_cst; } in [cref, [m]] let build_record_constant ~rdata ~univs ~variances ~cumulative ~template ~primitive_proj fields params paramimpls coers id idbuild binder_name = let record_data = { Data.id ; idbuild ; is_coercion = false ; coers = List.map (fun _ -> { pf_subclass = false ; pf_canonical = true }) fields ; rdata } in let inds = declare_structure ~cumulative Declarations.BiFinite ~univs ~variances ~primitive_proj paramimpls params template ~kind:Decls.Method ~name:[|binder_name|] [record_data] in let map ind = let map decl b y = { meth_name = RelDecl.get_name decl; meth_info = b; meth_const = y; } in let l = List.map3 map (List.rev fields) coers (Structure.find_projections ind) in GlobRef.IndRef ind, l in List.map map inds (** [declare_class] will prepare and declare a [Class]. This is done in 2 steps: 1. two markely different paths are followed depending on whether the class declaration refers to a constant "definitional classes" or to a record, that is to say: Class foo := bar : T. which is equivalent to Definition foo := T. Definition bar (x:foo) : T := x. Existing Class foo. vs Class foo := { ... }. 2. declare the class, using the information from 1. in the form of [Classes.typeclass] *) let declare_class def ~cumulative ~univs ~variances ~primitive_proj id idbuild paramimpls params rdata template ?(kind=Decls.StructureComponent) coers = let implfs = (* Make the class implicit in the projections, and the params if applicable. *) let impls = implicits_of_context params in List.map (fun x -> impls @ x) rdata.DataR.implfs in let rdata = { rdata with DataR.implfs } in let binder_name = Namegen.next_ident_away id (Termops.vars_of_env (Global.env())) in let fields = rdata.DataR.fields in let data = match fields with | [ LocalAssum ({binder_name=Name proj_name} as binder, field) | LocalDef ({binder_name=Name proj_name} as binder, _, field) ] when def -> let binder = {binder with binder_name=Name binder_name} in build_class_constant ~rdata ~univs ~primitive_proj field implfs params paramimpls coers binder id proj_name | _ -> build_record_constant ~rdata ~univs ~variances ~cumulative ~template ~primitive_proj fields params paramimpls coers id idbuild binder_name in let univs, params, fields = match fst univs with | UState.Polymorphic_entry uctx -> let usubst, auctx = Univ.abstract_universes uctx in let usubst = Univ.make_instance_subst usubst in let map c = Vars.subst_univs_level_constr usubst c in let fields = Context.Rel.map map fields in let params = Context.Rel.map map params in auctx, params, fields | UState.Monomorphic_entry _ -> Univ.AbstractContext.empty, params, fields in let map (impl, projs) = let k = { cl_univs = univs; cl_impl = impl; cl_strict = typeclasses_strict (); cl_unique = typeclasses_unique (); cl_context = params; cl_props = fields; cl_projs = projs } in let env = Global.env () in let sigma = Evd.from_env env in Classes.add_class env sigma k; impl in List.map map data let add_constant_class env sigma cst = let ty, univs = Typeops.type_of_global_in_context env (GlobRef.ConstRef cst) in let r = (Environ.lookup_constant cst env).const_relevance in let ctx, _ = decompose_prod_assum ty in let args = Context.Rel.instance Constr.mkRel 0 ctx in let t = mkApp (mkConstU (cst, Univ.make_abstract_instance univs), args) in let tc = { cl_univs = univs; cl_impl = GlobRef.ConstRef cst; cl_context = ctx; cl_props = [LocalAssum (make_annot Anonymous r, t)]; cl_projs = []; cl_strict = typeclasses_strict (); cl_unique = typeclasses_unique () } in Classes.add_class env sigma tc; Classes.set_typeclass_transparency ~locality:Hints.SuperGlobal [Tacred.EvalConstRef cst] false let add_inductive_class env sigma ind = let mind, oneind = Inductive.lookup_mind_specif env ind in let k = let ctx = oneind.mind_arity_ctxt in let univs = Declareops.inductive_polymorphic_context mind in let inst = Univ.make_abstract_instance univs in let ty = Inductive.type_of_inductive ((mind, oneind), inst) in let r = oneind.mind_relevance in { cl_univs = univs; cl_impl = GlobRef.IndRef ind; cl_context = ctx; cl_props = [LocalAssum (make_annot Anonymous r, ty)]; cl_projs = []; cl_strict = typeclasses_strict (); cl_unique = typeclasses_unique () } in Classes.add_class env sigma k let warn_already_existing_class = CWarnings.create ~name:"already-existing-class" ~category:"automation" Pp.(fun g -> Printer.pr_global g ++ str " is already declared as a typeclass.") let declare_existing_class g = let env = Global.env () in let sigma = Evd.from_env env in if Typeclasses.is_class g then warn_already_existing_class g else match g with | GlobRef.ConstRef x -> add_constant_class env sigma x | GlobRef.IndRef x -> add_inductive_class env sigma x | _ -> user_err (Pp.str"Unsupported class type, only constants and inductives are allowed.") open Vernacexpr module Ast = struct type t = { name : Names.lident ; is_coercion : coercion_flag ; binders: local_binder_expr list ; cfs : (local_decl_expr * record_field_attr) list ; idbuild : Id.t ; sort : constr_expr option } let to_datai { name; is_coercion; cfs; idbuild; sort } = let fs = List.map fst cfs in { DataI.name = name.CAst.v ; arity = sort ; nots = List.map (fun (_, { rf_notation }) -> List.map Metasyntax.prepare_where_notation rf_notation) cfs ; fs } end let check_unique_names records = let extract_name acc (rf_decl, _) = match rf_decl with Vernacexpr.AssumExpr({CAst.v=Name id},_,_) -> id::acc | Vernacexpr.DefExpr ({CAst.v=Name id},_,_,_) -> id::acc | _ -> acc in let allnames = List.fold_left (fun acc { Ast.name; cfs; _ } -> name.CAst.v :: (List.fold_left extract_name acc cfs)) [] records in match List.duplicates Id.equal allnames with | [] -> () | id :: _ -> user_err (str "Two objects have the same name" ++ spc () ++ quote (Id.print id)) let check_priorities kind records = let isnot_class = match kind with Class false -> false | _ -> true in let has_priority { Ast.cfs; _ } = List.exists (fun (_, { rf_priority }) -> not (Option.is_empty rf_priority)) cfs in if isnot_class && List.exists has_priority records then user_err Pp.(str "Priorities only allowed for type class substructures") let extract_record_data records = let data = List.map Ast.to_datai records in let pss = List.map (fun { Ast.binders; _ } -> binders) records in let ps = match pss with | [] -> CErrors.anomaly (str "Empty record block") | ps :: rem -> let eq_local_binders bl1 bl2 = List.equal local_binder_eq bl1 bl2 in let () = if not (List.for_all (eq_local_binders ps) rem) then user_err (str "Parameters should be syntactically the \ same for each inductive type.") in ps in ps, data (* declaring structures, common data to refactor *) let class_struture ~cumulative ~template ~impargs ~univs ~params ~primitive_proj def records data = let { Ast.name; cfs; idbuild; _ }, rdata = match records, data with | [r], [d] -> r, d | _, _ -> CErrors.user_err (str "Mutual definitional classes are not handled") in let coers = List.map (fun (_, { rf_subclass; rf_priority }) -> match rf_subclass with | Vernacexpr.BackInstance -> Some {hint_priority = rf_priority; hint_pattern = None} | Vernacexpr.NoInstance -> None) cfs in declare_class def ~cumulative ~univs ~primitive_proj name.CAst.v idbuild impargs params rdata template coers let regular_structure ~cumulative ~template ~impargs ~univs ~variances ~params ~finite ~primitive_proj records data = let adjust_impls impls = impargs @ [CAst.make None] @ impls in let data = List.map (fun ({ DataR.implfs; _ } as d) -> { d with DataR.implfs = List.map adjust_impls implfs }) data in (* let map (min_univ, arity, fieldimpls, fields) { Ast.name; is_coercion; cfs; idbuild; _ } = *) let map rdata { Ast.name; is_coercion; cfs; idbuild; _ } = let coers = List.map (fun (_, { rf_subclass ; rf_canonical }) -> { pf_subclass = (match rf_subclass with Vernacexpr.BackInstance -> true | Vernacexpr.NoInstance -> false); pf_canonical = rf_canonical }) cfs in { Data.id = name.CAst.v; idbuild; rdata; is_coercion; coers } in let data = List.map2 map data records in let inds = declare_structure ~cumulative finite ~univs ~variances ~primitive_proj impargs params template data in List.map (fun ind -> GlobRef.IndRef ind) inds (** [fs] corresponds to fields and [ps] to parameters; [coers] is a list telling if the corresponding fields must me declared as coercions or subinstances. *) let definition_structure udecl kind ~template ~cumulative ~poly ~primitive_proj finite (records : Ast.t list) : GlobRef.t list = let () = check_unique_names records in let () = check_priorities kind records in let ps, data = extract_record_data records in let auto_template, impargs, univs, variances, params, data = (* In theory we should be able to use [Notation.with_notation_protection], due to the call to Metasyntax.set_notation_for_interpretation, however something is messing state beyond that. *) Vernacstate.System.protect (fun () -> typecheck_params_and_fields (kind = Class true) poly udecl ps data) () in let template = template, auto_template in match kind with | Class def -> class_struture ~template ~impargs ~cumulative ~params ~univs ~variances ~primitive_proj def records data | Inductive_kw | CoInductive | Variant | Record | Structure -> regular_structure ~cumulative ~template ~impargs ~univs ~variances ~params ~finite ~primitive_proj records data module Internal = struct type nonrec projection_flags = projection_flags = { pf_subclass: bool; pf_canonical: bool; } let declare_projections = declare_projections let declare_structure_entry = declare_structure_entry end coq-8.15.0/vernac/record.mli000066400000000000000000000035011417001151100156140ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* inductive_kind -> template:bool option -> cumulative:bool -> poly:bool -> primitive_proj:bool -> Declarations.recursivity_kind -> Ast.t list -> GlobRef.t list val declare_existing_class : GlobRef.t -> unit (* Implementation internals, consult Coq developers before using; current user Elpi, see https://github.com/LPCIC/coq-elpi/pull/151 *) module Internal : sig type projection_flags = { pf_subclass: bool; pf_canonical: bool; } val declare_projections : Names.inductive -> Entries.universes_entry * UnivNames.universe_binders -> ?kind:Decls.definition_object_kind -> Names.Id.t -> projection_flags list -> Impargs.manual_implicits list -> Constr.rel_context -> Structure.projection list val declare_structure_entry : Structure.t -> unit end coq-8.15.0/vernac/retrieveObl.ml000066400000000000000000000237561417001151100164650ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* if Evd.is_obligation_evar evm key then () else let loc, k = Evd.evar_source key evm in Pretype_errors.error_unsolvable_implicit ?loc env evm key None) (Evd.undefined_map evm) type obligation_info = ( Names.Id.t * Constr.types * Evar_kinds.t Loc.located * (bool * Evar_kinds.obligation_definition_status) * Int.Set.t * unit Proofview.tactic option ) array type oblinfo = { ev_name : int * Id.t ; ev_hyps : EConstr.named_context ; ev_status : bool * Evar_kinds.obligation_definition_status ; ev_chop : int option ; ev_src : Evar_kinds.t Loc.located ; ev_typ : Constr.types ; ev_tac : unit Proofview.tactic option ; ev_deps : Int.Set.t } (** Substitute evar references in t using de Bruijn indices, where n binders were passed through. *) let succfix (depth, fixrels) = (succ depth, List.map succ fixrels) let subst_evar_constr evm evs n idf t = let seen = ref Int.Set.empty in let transparent = ref Id.Set.empty in let evar_info id = CList.assoc_f Evar.equal id evs in let rec substrec (depth, fixrels) c = match EConstr.kind evm c with | Constr.Evar (k, args) -> let {ev_name = id, idstr; ev_hyps = hyps; ev_chop = chop} = try evar_info k with Not_found -> CErrors.anomaly ~label:"eterm" Pp.( str "existential variable " ++ int (Evar.repr k) ++ str " not found.") in seen := Int.Set.add id !seen; (* Evar arguments are created in inverse order, and we must not apply to defined ones (i.e. LetIn's) *) let args = let n = match chop with None -> 0 | Some c -> c in let l, r = CList.chop n (List.rev args) in List.rev r in let args = let rec aux hyps args acc = let open Context.Named.Declaration in match (hyps, args) with | LocalAssum _ :: tlh, c :: tla -> aux tlh tla (substrec (depth, fixrels) c :: acc) | LocalDef _ :: tlh, _ :: tla -> aux tlh tla acc | [], [] -> acc | _, _ -> acc (*failwith "subst_evars: invalid argument"*) in aux hyps args [] in if List.exists (fun x -> match EConstr.kind evm x with | Constr.Rel n -> Int.List.mem n fixrels | _ -> false) args then transparent := Id.Set.add idstr !transparent; EConstr.mkApp (idf idstr, Array.of_list args) | Constr.Fix _ -> EConstr.map_with_binders evm succfix substrec (depth, 1 :: fixrels) c | _ -> EConstr.map_with_binders evm succfix substrec (depth, fixrels) c in let t' = substrec (0, []) t in (EConstr.to_constr evm t', !seen, !transparent) (** Substitute variable references in t using de Bruijn indices, where n binders were passed through. *) let subst_vars acc n t = let var_index id = Util.List.index Id.equal id acc in let rec substrec depth c = match Constr.kind c with | Constr.Var v -> ( try Constr.mkRel (depth + var_index v) with Not_found -> c ) | _ -> Constr.map_with_binders succ substrec depth c in substrec 0 t (** Rewrite type of an evar ([ H1 : t1, ... Hn : tn |- concl ]) to a product : forall H1 : t1, ..., forall Hn : tn, concl. Changes evars and hypothesis references to variable references. *) let etype_of_evar evm evs hyps concl = let open Context.Named.Declaration in let rec aux acc n = function | decl :: tl -> ( let t', s, trans = subst_evar_constr evm evs n EConstr.mkVar (Context.Named.Declaration.get_type decl) in let t'' = subst_vars acc 0 t' in let rest, s', trans' = aux (Context.Named.Declaration.get_id decl :: acc) (succ n) tl in let s' = Int.Set.union s s' in let trans' = Id.Set.union trans trans' in match decl with | LocalDef (id, c, _) -> let c', s'', trans'' = subst_evar_constr evm evs n EConstr.mkVar c in let c' = subst_vars acc 0 c' in ( Term.mkNamedProd_or_LetIn (LocalDef (id, c', t'')) rest , Int.Set.union s'' s' , Id.Set.union trans'' trans' ) | LocalAssum (id, _) -> (Term.mkNamedProd_or_LetIn (LocalAssum (id, t'')) rest, s', trans') ) | [] -> let t', s, trans = subst_evar_constr evm evs n EConstr.mkVar concl in (subst_vars acc 0 t', s, trans) in aux [] 0 (List.rev hyps) let trunc_named_context n ctx = let len = List.length ctx in CList.firstn (len - n) ctx let rec chop_product n t = let pop t = Vars.lift (-1) t in if Int.equal n 0 then Some t else match Constr.kind t with | Constr.Prod (_, _, b) -> if Vars.noccurn 1 b then chop_product (pred n) (pop b) else None | _ -> None let evar_dependencies evm oev = let one_step deps = Evar.Set.fold (fun ev s -> let evi = Evd.find evm ev in let deps' = Evd.evars_of_filtered_evar_info evm evi in if Evar.Set.mem oev deps' then invalid_arg ( "Ill-formed evar map: cycle detected for evar " ^ Pp.string_of_ppcmds @@ Evar.print oev ) else Evar.Set.union deps' s) deps deps in let rec aux deps = let deps' = one_step deps in if Evar.Set.equal deps deps' then deps else aux deps' in aux (Evar.Set.singleton oev) let move_after ((id, ev, deps) as obl) l = let rec aux restdeps = function | ((id', _, _) as obl') :: tl -> let restdeps' = Evar.Set.remove id' restdeps in if Evar.Set.is_empty restdeps' then obl' :: obl :: tl else obl' :: aux restdeps' tl | [] -> [obl] in aux (Evar.Set.remove id deps) l let sort_dependencies evl = let rec aux l found list = match l with | ((id, ev, deps) as obl) :: tl -> let found' = Evar.Set.union found (Evar.Set.singleton id) in if Evar.Set.subset deps found' then aux tl found' (obl :: list) else aux (move_after obl tl) found list | [] -> List.rev list in aux evl Evar.Set.empty [] let retrieve_obligations env name evm fs ?status t ty = (* 'Serialize' the evars *) let nc = Environ.named_context env in let nc_len = Context.Named.length nc in let evm = Evarutil.nf_evar_map_undefined evm in let evl = Evarutil.non_instantiated evm in let evl = Evar.Map.bindings evl in let evl = List.map (fun (id, ev) -> (id, ev, evar_dependencies evm id)) evl in let sevl = sort_dependencies evl in let evl = List.map (fun (id, ev, _) -> (id, ev)) sevl in let evn = let i = ref (-1) in List.rev_map (fun (id, ev) -> incr i; ( id , ( !i , Id.of_string (Id.to_string name ^ "_obligation_" ^ string_of_int (succ !i)) ) , ev )) evl in let evts = (* Remove existential variables in types and build the corresponding products *) List.fold_right (fun (id, (n, nstr), ev) l -> let hyps = Evd.evar_filtered_context ev in let hyps = trunc_named_context nc_len hyps in let evtyp, deps, transp = etype_of_evar evm l hyps ev.Evd.evar_concl in let evtyp, hyps, chop = match chop_product fs evtyp with | Some t -> (t, trunc_named_context fs hyps, fs) | None -> (evtyp, hyps, 0) in let loc, k = Evd.evar_source id evm in let status = match k with | Evar_kinds.QuestionMark {Evar_kinds.qm_obligation = o} -> o | _ -> ( match status with | Some o -> o | None -> Evar_kinds.Define (not (Program.get_proofs_transparency ())) ) in let force_status, status, chop = match status with | Evar_kinds.Define true as stat -> if not (Int.equal chop fs) then (true, Evar_kinds.Define false, None) else (false, stat, Some chop) | s -> (false, s, None) in let info = { ev_name = (n, nstr) ; ev_hyps = hyps ; ev_status = (force_status, status) ; ev_chop = chop ; ev_src = (loc, k) ; ev_typ = evtyp ; ev_deps = deps ; ev_tac = None } in (id, info) :: l) evn [] in let t', _, transparent = (* Substitute evar refs in the term by variables *) subst_evar_constr evm evts 0 EConstr.mkVar t in let ty, _, _ = subst_evar_constr evm evts 0 EConstr.mkVar ty in let evars = List.map (fun (ev, info) -> let { ev_name = _, name ; ev_status = force_status, status ; ev_src = src ; ev_typ = typ ; ev_deps = deps ; ev_tac = tac } = info in let force_status, status = match status with | Evar_kinds.Define true when Id.Set.mem name transparent -> (true, Evar_kinds.Define false) | _ -> (force_status, status) in (name, typ, src, (force_status, status), deps, tac)) evts in let evnames = List.map (fun (ev, info) -> (ev, snd info.ev_name)) evts in let evmap f c = Util.pi1 (subst_evar_constr evm evts 0 f c) in (Array.of_list (List.rev evars), (evnames, evmap), t', ty) coq-8.15.0/vernac/retrieveObl.mli000066400000000000000000000035531417001151100166270ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* Evd.evar_map -> unit type obligation_info = ( Names.Id.t * Constr.types * Evar_kinds.t Loc.located * (bool * Evar_kinds.obligation_definition_status) * Int.Set.t * unit Proofview.tactic option ) array (** ident, type, location of the original evar, (opaque or transparent, expand or define), dependencies as indexes into the array, tactic to solve it *) val retrieve_obligations : Environ.env -> Names.Id.t -> Evd.evar_map -> int -> ?status:Evar_kinds.obligation_definition_status -> EConstr.t -> EConstr.types -> obligation_info * ( (Evar.t * Names.Id.t) list * ((Names.Id.t -> EConstr.t) -> EConstr.t -> Constr.t) ) * Constr.t * Constr.t (** [retrieve_obligations env id sigma fs ?status body type] returns [obls, (evnames, evmap), nbody, ntype] a list of obligations built from evars in [body, type]. [fs] is the number of function prototypes to try to clear from evars contexts. [evnames, evmap) is the list of names / substitution functions used to program with holes. This is not used in Coq, but in the equations plugin; [evnames] is actually redundant with the information contained in [obls] *) coq-8.15.0/vernac/search.ml000066400000000000000000000325461417001151100154450ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Decls.logical_kind option -> env -> Evd.evar_map -> constr -> bool type display_function = GlobRef.t -> Decls.logical_kind option -> env -> constr -> unit (* This option restricts the output of [SearchPattern ...], etc. to the names of the symbols matching the query, separated by a newline. This type of output is useful for editors (like emacs), to generate a list of completion candidates without having to parse through the types of all symbols. *) type glob_search_item = | GlobSearchSubPattern of glob_search_where * bool * constr_pattern | GlobSearchString of string | GlobSearchKind of Decls.logical_kind | GlobSearchFilter of (GlobRef.t -> bool) type glob_search_request = | GlobSearchLiteral of glob_search_item | GlobSearchDisjConj of (bool * glob_search_request) list list module SearchBlacklist = Goptions.MakeStringTable (struct let key = ["Search";"Blacklist"] let title = "Current search blacklist : " let member_message s b = str "Search blacklist does " ++ (if b then mt () else str "not ") ++ str "include " ++ str s end) (* The functions iter_constructors and iter_declarations implement the behavior needed for the Coq searching commands. These functions take as first argument the procedure that will be called to treat each entry. This procedure receives the name of the object, the assumptions that will make it possible to print its type, and the constr term that represent its type. *) let iter_constructors indsp u fn env nconstr = for i = 1 to nconstr do let typ = Inductiveops.type_of_constructor env ((indsp, i), u) in fn (GlobRef.ConstructRef (indsp, i)) None env typ done (* FIXME: this is a Libobject hack that should be replaced with a proper registration mechanism. *) module DynHandle = Libobject.Dyn.Map(struct type 'a t = 'a -> unit end) let handle h (Libobject.Dyn.Dyn (tag, o)) = match DynHandle.find tag h with | f -> f o | exception Not_found -> () (* General search over declarations *) let generic_search env (fn : GlobRef.t -> Decls.logical_kind option -> env -> constr -> unit) = List.iter (fun d -> fn (GlobRef.VarRef (NamedDecl.get_id d)) None env (NamedDecl.get_type d)) (Environ.named_context env); let iter_obj (sp, kn) lobj = match lobj with | AtomicObject o -> let handler = DynHandle.add Declare.Internal.Constant.tag begin fun obj -> let cst = Global.constant_of_delta_kn kn in let gr = GlobRef.ConstRef cst in let (typ, _) = Typeops.type_of_global_in_context (Global.env ()) gr in let kind = Declare.Internal.Constant.kind obj in fn gr (Some kind) env typ end @@ DynHandle.add DeclareInd.Internal.objInductive begin fun _ -> let mind = Global.mind_of_delta_kn kn in let mib = Global.lookup_mind mind in let iter_packet i mip = let ind = (mind, i) in let u = Univ.make_abstract_instance (Declareops.inductive_polymorphic_context mib) in let i = (ind, u) in let typ = Inductiveops.type_of_inductive env i in let () = fn (GlobRef.IndRef ind) None env typ in let len = Array.length mip.mind_user_lc in iter_constructors ind u fn env len in Array.iteri iter_packet mib.mind_packets end @@ DynHandle.empty in handle handler o | _ -> () in try Declaremods.iter_all_segments iter_obj with Not_found -> () (** This module defines a preference on constrs in the form of a [compare] function (preferred constr must be big for this functions, so preferences such as small constr must use a reversed order). This priority will be used to order search results and propose first results which are more likely to be relevant to the query, this is why the type [t] contains the other elements required of a search. *) module ConstrPriority = struct (* The priority is memoised here. Because of the very localised use of this module, it is not worth it making a convenient interface. *) type t = GlobRef.t * Decls.logical_kind option * Environ.env * Constr.t * priority and priority = int module ConstrSet = CSet.Make(Constr) (** A measure of the size of a term *) let rec size t = Constr.fold (fun s t -> 1 + s + size t) 0 t (** Set of the "symbols" (definitions, inductives, constructors) which appear in a term. *) let rec symbols acc t = let open Constr in match kind t with | Const _ | Ind _ | Construct _ -> ConstrSet.add t acc | _ -> Constr.fold symbols acc t (** The number of distinct "symbols" (see {!symbols}) which appear in a term. *) let num_symbols t = ConstrSet.(cardinal (symbols empty t)) let priority gref t : priority = -(3*(num_symbols t) + size t) let compare (_,_,_,_,p1) (_,_,_,_,p2) = pervasives_compare p1 p2 end module PriorityQueue = Heap.Functional(ConstrPriority) let rec iter_priority_queue q fn = (* use an option to make the function tail recursive. Will be obsoleted with Ocaml 4.02 with the [match … with | exception …] syntax. *) let next = begin try Some (PriorityQueue.maximum q) with Heap.EmptyHeap -> None end in match next with | Some (gref,kind,env,t,_) -> fn gref kind env t; iter_priority_queue (PriorityQueue.remove q) fn | None -> () let prioritize_search seq fn = let acc = ref PriorityQueue.empty in let iter gref kind env t = let p = ConstrPriority.priority gref t in acc := PriorityQueue.add (gref,kind,env,t,p) !acc in let () = seq iter in iter_priority_queue !acc fn (** Filters *) (** This function tries to see whether the conclusion matches a pattern. FIXME: this is quite dummy, we may find a more efficient algorithm. *) let rec pattern_filter pat ref env sigma typ = let typ = Termops.strip_outer_cast sigma typ in if Constr_matching.is_matching env sigma pat typ then true else match EConstr.kind sigma typ with | Prod (_, _, typ) | LetIn (_, _, _, typ) -> pattern_filter pat ref env sigma typ | _ -> false let full_name_of_reference ref = let (dir,id) = repr_path (Nametab.path_of_global ref) in DirPath.to_string dir ^ "." ^ Id.to_string id (** Whether a reference is blacklisted *) let blacklist_filter ref kind env sigma typ = let name = full_name_of_reference ref in let is_not_bl str = not (String.string_contains ~where:name ~what:str) in CString.Set.for_all is_not_bl (SearchBlacklist.v ()) let module_filter (mods, outside) ref kind env sigma typ = let sp = Nametab.path_of_global ref in let sl = dirpath sp in let is_outside md = not (is_dirpath_prefix_of md sl) in let is_inside md = is_dirpath_prefix_of md sl in if outside then List.for_all is_outside mods else List.is_empty mods || List.exists is_inside mods let name_of_reference ref = Id.to_string (Nametab.basename_of_global ref) let search_filter query gr kind env sigma typ = match query with | GlobSearchSubPattern (where,head,pat) -> let open Context.Rel.Declaration in let rec collect env hyps typ = match Constr.kind typ with | LetIn (na,b,t,c) -> collect (push_rel (LocalDef (na,b,t)) env) ((env,b) :: (env,t) :: hyps) c | Prod (na,t,c) -> collect (push_rel (LocalAssum (na,t)) env) ((env,t) :: hyps) c | _ -> (hyps,(env,typ)) in let typl= match where with | InHyp -> fst (collect env [] typ) | InConcl -> [snd (collect env [] typ)] | Anywhere -> if head then let hyps, ccl = collect env [] typ in ccl :: hyps else [env,typ] in List.exists (fun (env,typ) -> let f = if head then Constr_matching.is_matching_head else Constr_matching.is_matching_appsubterm ~closed:false in f env sigma pat (EConstr.of_constr typ)) typl | GlobSearchString s -> String.string_contains ~where:(name_of_reference gr) ~what:s | GlobSearchKind k -> (match kind with None -> false | Some k' -> k = k') | GlobSearchFilter f -> f gr (** SearchPattern *) let search_pattern env sigma pat mods pr_search = let filter ref kind env typ = module_filter mods ref kind env sigma typ && pattern_filter pat ref env sigma (EConstr.of_constr typ) && blacklist_filter ref kind env sigma typ in let iter ref kind env typ = if filter ref kind env typ then pr_search ref kind env typ in generic_search env iter (** SearchRewrite *) let eq () = Coqlib.(lib_ref "core.eq.type") let rewrite_pat1 pat = PApp (PRef (eq ()), [| PMeta None; pat; PMeta None |]) let rewrite_pat2 pat = PApp (PRef (eq ()), [| PMeta None; PMeta None; pat |]) let search_rewrite env sigma pat mods pr_search = let pat1 = rewrite_pat1 pat in let pat2 = rewrite_pat2 pat in let filter ref kind env typ = module_filter mods ref kind env sigma typ && (pattern_filter pat1 ref env sigma (EConstr.of_constr typ) || pattern_filter pat2 ref env sigma (EConstr.of_constr typ)) && blacklist_filter ref kind env sigma typ in let iter ref kind env typ = if filter ref kind env typ then pr_search ref kind env typ in generic_search env iter (** Search *) let search env sigma items mods pr_search = let filter ref kind env typ = let eqb b1 b2 = if b1 then b2 else not b2 in module_filter mods ref kind env sigma typ && let rec aux = function | GlobSearchLiteral i -> search_filter i ref kind env sigma typ | GlobSearchDisjConj l -> List.exists (List.for_all aux') l and aux' (b,s) = eqb b (aux s) in List.for_all aux' items && blacklist_filter ref kind env sigma typ in let iter ref kind env typ = if filter ref kind env typ then pr_search ref kind env typ in generic_search env iter type search_constraint = | Name_Pattern of Str.regexp | Type_Pattern of Pattern.constr_pattern | SubType_Pattern of Pattern.constr_pattern | In_Module of Names.DirPath.t | Include_Blacklist type 'a coq_object = { coq_object_prefix : string list; coq_object_qualid : string list; coq_object_object : 'a; } let interface_search env sigma = let rec extract_flags name tpe subtpe mods blacklist = function | [] -> (name, tpe, subtpe, mods, blacklist) | (Name_Pattern regexp, b) :: l -> extract_flags ((regexp, b) :: name) tpe subtpe mods blacklist l | (Type_Pattern pat, b) :: l -> extract_flags name ((pat, b) :: tpe) subtpe mods blacklist l | (SubType_Pattern pat, b) :: l -> extract_flags name tpe ((pat, b) :: subtpe) mods blacklist l | (In_Module id, b) :: l -> extract_flags name tpe subtpe ((id, b) :: mods) blacklist l | (Include_Blacklist, b) :: l -> extract_flags name tpe subtpe mods b l in fun flags -> let (name, tpe, subtpe, mods, blacklist) = extract_flags [] [] [] [] false flags in let filter_function ref env constr = let id = Names.Id.to_string (Nametab.basename_of_global ref) in let path = Libnames.dirpath (Nametab.path_of_global ref) in let toggle x b = if x then b else not b in let match_name (regexp, flag) = toggle (Str.string_match regexp id 0) flag in let match_type (pat, flag) = toggle (Constr_matching.is_matching env sigma pat (EConstr.of_constr constr)) flag in let match_subtype (pat, flag) = toggle (Constr_matching.is_matching_appsubterm ~closed:false env sigma pat (EConstr.of_constr constr)) flag in let match_module (mdl, flag) = toggle (Libnames.is_dirpath_prefix_of mdl path) flag in List.for_all match_name name && List.for_all match_type tpe && List.for_all match_subtype subtpe && List.for_all match_module mods && (blacklist || blacklist_filter ref kind env sigma constr) in let ans = ref [] in let print_function ref env constr = let fullpath = DirPath.repr (Nametab.dirpath_of_global ref) in let qualid = Nametab.shortest_qualid_of_global Id.Set.empty ref in let (shortpath, basename) = Libnames.repr_qualid qualid in let shortpath = DirPath.repr shortpath in (* [shortpath] is a suffix of [fullpath] and we're looking for the missing prefix *) let rec prefix full short accu = match full, short with | _, [] -> let full = List.rev_map Id.to_string full in (full, accu) | _ :: full, m :: short -> prefix full short (Id.to_string m :: accu) | _ -> assert false in let (prefix, qualid) = prefix fullpath shortpath [Id.to_string basename] in let answer = { coq_object_prefix = prefix; coq_object_qualid = qualid; coq_object_object = constr; } in ans := answer :: !ans; in let iter ref kind env typ = if filter_function ref env typ then print_function ref env typ in let () = generic_search env iter in !ans coq-8.15.0/vernac/search.mli000066400000000000000000000070171417001151100156110ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool) type glob_search_request = | GlobSearchLiteral of glob_search_item | GlobSearchDisjConj of (bool * glob_search_request) list list type filter_function = GlobRef.t -> Decls.logical_kind option -> env -> Evd.evar_map -> constr -> bool type display_function = GlobRef.t -> Decls.logical_kind option -> env -> constr -> unit (** {6 Generic filter functions} *) val blacklist_filter : filter_function (** Check whether a reference is blacklisted. *) val module_filter : DirPath.t list * bool -> filter_function (** Check whether a reference pertains or not to a set of modules *) val search_filter : glob_search_item -> filter_function (** {6 Specialized search functions} [search_xxx gl pattern modinout] searches the hypothesis of the [gl]th goal and the global environment for things matching [pattern] and satisfying module exclude/include clauses of [modinout]. *) val search_rewrite : env -> Evd.evar_map -> constr_pattern -> DirPath.t list * bool -> display_function -> unit val search_pattern : env -> Evd.evar_map -> constr_pattern -> DirPath.t list * bool -> display_function -> unit val search : env -> Evd.evar_map -> (bool * glob_search_request) list -> DirPath.t list * bool -> display_function -> unit type search_constraint = | Name_Pattern of Str.regexp (** Whether the name satisfies a regexp (uses Ocaml Str syntax) *) | Type_Pattern of Pattern.constr_pattern (** Whether the object type satisfies a pattern *) | SubType_Pattern of Pattern.constr_pattern (** Whether some subtype of object type satisfies a pattern *) | In_Module of Names.DirPath.t (** Whether the object pertains to a module *) | Include_Blacklist (** Bypass the Search blacklist *) type 'a coq_object = { coq_object_prefix : string list; coq_object_qualid : string list; coq_object_object : 'a; } val interface_search : env -> Evd.evar_map -> (search_constraint * bool) list -> constr coq_object list (** {6 Generic search function} *) val generic_search : env -> display_function -> unit (** This function iterates over all hypothesis of the goal numbered [glnum] (if present) and all known declarations. *) (** {6 Search function modifiers} *) val prioritize_search : (display_function -> unit) -> display_function -> unit (** [prioritize_search iter] iterates over the values of [iter] (seen as a sequence of declarations), in a relevance order. This requires to perform the entire iteration of [iter] before starting streaming. So [prioritize_search] should not be used for low-latency streaming. *) coq-8.15.0/vernac/topfmt.ml000066400000000000000000000350501417001151100155020ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* pp_global_params -> unit * set the parameters of a formatter *) let set_gp ft gp = Format.pp_set_margin ft gp.margin ; Format.pp_set_max_indent ft gp.max_indent ; Format.pp_set_max_boxes ft gp.max_depth ; Format.pp_set_ellipsis_text ft gp.ellipsis let set_dflt_gp ft = set_gp ft dflt_gp let get_gp ft = { margin = Format.pp_get_margin ft (); max_indent = Format.pp_get_max_indent ft (); max_depth = Format.pp_get_max_boxes ft (); ellipsis = Format.pp_get_ellipsis_text ft () } (* with_fp : 'a pp_formatter_params -> Format.formatter * returns of formatter for given formatter functions *) let with_fp chan out_function flush_function = let ft = Format.make_formatter out_function flush_function in Format.pp_set_formatter_out_channel ft chan; ft (* Output on a channel ch *) let with_output_to ch = let ft = with_fp ch (output_substring ch) (fun () -> flush ch) in set_gp ft deep_gp; ft let std_ft = ref Format.std_formatter let _ = set_dflt_gp !std_ft let err_ft = ref Format.err_formatter let _ = set_gp !err_ft deep_gp let deep_ft = ref (with_output_to stdout) let _ = set_gp !deep_ft deep_gp (* For parametrization through vernacular *) let default = Format.pp_get_max_boxes !std_ft () let default_margin = Format.pp_get_margin !std_ft () let get_depth_boxes () = Some (Format.pp_get_max_boxes !std_ft ()) let set_depth_boxes v = Format.pp_set_max_boxes !std_ft (match v with None -> default | Some v -> v) let get_margin () = Some (Format.pp_get_margin !std_ft ()) let set_margin v = let v = match v with None -> default_margin | Some v -> v in Format.pp_set_margin Format.str_formatter v; Format.pp_set_margin !std_ft v; Format.pp_set_margin !deep_ft v; Format.pp_set_margin !err_ft v; (* Heuristic, based on usage: the column on the right of max_indent column is 20% of width, capped to 30 characters *) let m = max (64 * v / 100) (v-30) in Format.pp_set_max_indent Format.str_formatter m; Format.pp_set_max_indent !std_ft m; Format.pp_set_max_indent !deep_ft m; Format.pp_set_max_indent !err_ft m (** Console display of feedback *) (** Default tags *) module Tag = struct let error = "message.error" let warning = "message.warning" let debug = "message.debug" end let msgnl_with fmt strm = pp_with fmt (strm ++ fnl ()); Format.pp_print_flush fmt () module Emacs = struct (* Special chars for emacs, to detect warnings inside goal output *) let quote_warning_start = "" let quote_warning_end = "" let quote_info_start = "" let quote_info_end = "" let quote_emacs q_start q_end msg = hov 0 (seq [str q_start; brk(0,0); msg; brk(0,0); str q_end]) let quote_warning = quote_emacs quote_warning_start quote_warning_end let quote_info = quote_emacs quote_info_start quote_info_end end let dbg_hdr = tag Tag.debug (str "Debug:") ++ spc () let info_hdr = mt () let warn_hdr = tag Tag.warning (str "Warning:") ++ spc () let err_hdr = tag Tag.error (str "Error:") ++ spc () let make_body quoter info ?pre_hdr s = pr_opt_no_spc (fun x -> x ++ fnl ()) pre_hdr ++ quoter (hov 0 (info ++ s)) (* The empty quoter *) let noq x = x (* Generic logger *) let gen_logger dbg warn ?pre_hdr level msg = let open Feedback in match level with | Debug -> msgnl_with !std_ft (make_body dbg dbg_hdr ?pre_hdr msg) | Info -> msgnl_with !std_ft (make_body dbg info_hdr ?pre_hdr msg) | Notice -> msgnl_with !std_ft (make_body noq info_hdr ?pre_hdr msg) | Warning -> Flags.if_warn (fun () -> msgnl_with !err_ft (make_body warn warn_hdr ?pre_hdr msg)) () | Error -> msgnl_with !err_ft (make_body noq err_hdr ?pre_hdr msg) (** Standard loggers *) (* We provide a generic clear_log_backend callback for backends wanting to do cleanup after the print. *) let std_logger_cleanup = ref (fun () -> ()) let std_logger ?pre_hdr level msg = gen_logger (fun x -> x) (fun x -> x) ?pre_hdr level msg; !std_logger_cleanup () (** Color logging. Moved from Ppstyle, it may need some more refactoring *) (* Tag map for terminal style *) let default_tag_map () = let open Terminal in [ (* Local to console toplevel *) "message.error" , make ~bold:true ~fg_color:`WHITE ~bg_color:`RED () ; "message.warning" , make ~bold:true ~fg_color:`WHITE ~bg_color:`YELLOW () ; "message.debug" , make ~bold:true ~fg_color:`WHITE ~bg_color:`MAGENTA () ; "message.prompt" , make ~fg_color:`GREEN () (* Coming from the printer *) ; "constr.evar" , make ~fg_color:`LIGHT_BLUE () ; "constr.keyword" , make ~bold:true () ; "constr.type" , make ~bold:true ~fg_color:`YELLOW () ; "constr.notation" , make ~fg_color:`WHITE () (* ["constr"; "variable"] is not assigned *) ; "constr.reference" , make ~fg_color:`LIGHT_GREEN () ; "constr.path" , make ~fg_color:`LIGHT_MAGENTA () ; "module.definition", make ~bold:true ~fg_color:`LIGHT_RED () ; "module.keyword" , make ~bold:true () ; "tactic.keyword" , make ~bold:true () ; "tactic.primitive" , make ~fg_color:`LIGHT_GREEN () ; "tactic.string" , make ~fg_color:`LIGHT_RED () ; "diff.added" , make ~bg_color:(`RGB(0,141,0)) ~underline:true () ; "diff.removed" , make ~bg_color:(`RGB(170,0,0)) ~underline:true () ; "diff.added.bg" , make ~bg_color:(`RGB(0,91,0)) () ; "diff.removed.bg" , make ~bg_color:(`RGB(91,0,0)) () ] let tag_map = ref CString.Map.empty let init_tag_map styles = let set accu (name, st) = CString.Map.add name st accu in tag_map := List.fold_left set !tag_map styles let default_styles () = init_tag_map (default_tag_map ()) let set_emacs_print_strings () = let open Terminal in let diff = "diff." in List.iter (fun b -> let (name, attrs) = b in if CString.is_sub diff name 0 then tag_map := CString.Map.add name { attrs with prefix = Some (Printf.sprintf "<%s>" name); suffix = Some (Printf.sprintf "" name) } !tag_map) (CString.Map.bindings !tag_map) let parse_color_config str = let styles = Terminal.parse str in init_tag_map styles let dump_tags () = CString.Map.bindings !tag_map let empty = Terminal.make () let default_style = Terminal.reset_style let get_style tag = try CString.Map.find tag !tag_map with Not_found -> empty;; let get_open_seq tags = let style = List.fold_left (fun a b -> Terminal.merge a (get_style b)) default_style tags in Terminal.eval (Terminal.diff default_style style);; let get_close_seq tags = let style = List.fold_left (fun a b -> Terminal.merge a (get_style b)) default_style tags in Terminal.eval (Terminal.diff style default_style);; let diff_tag_stack = ref [] (* global, just like std_ft *) (** Not thread-safe. We should put a lock somewhere if we print from different threads. Do we? *) let make_style_stack () = (* Default tag is to reset everything *) let style_stack = ref [] in let peek () = match !style_stack with | [] -> default_style (* Anomalous case, but for robustness *) | st :: _ -> st in let open_tag tag = let (tpfx, ttag) = split_tag tag in if tpfx = end_pfx then "" else let style = get_style ttag in (* Merge the current settings and the style being pushed. This allows restoring the previous settings correctly in a pop when both set the same attribute. Example: current settings have red FG, the pushed style has green FG. When popping the style, we should set red FG, not default FG. *) let style = Terminal.merge (peek ()) style in let diff = Terminal.diff (peek ()) style in style_stack := style :: !style_stack; if tpfx = start_pfx then diff_tag_stack := ttag :: !diff_tag_stack; Terminal.eval diff in let close_tag tag = let (tpfx, _) = split_tag tag in if tpfx = start_pfx then "" else begin if tpfx = end_pfx then diff_tag_stack := (try List.tl !diff_tag_stack with tl -> []); match !style_stack with | [] -> (* Something went wrong, we fallback *) Terminal.eval default_style | cur :: rem -> style_stack := rem; if cur = (peek ()) then "" else if rem = [] then Terminal.reset else Terminal.eval (Terminal.diff cur (peek ())) end in let clear () = style_stack := [] in open_tag, close_tag, clear let make_printing_functions () = let print_prefix ft tag = let (tpfx, ttag) = split_tag tag in if tpfx <> end_pfx then let style = get_style ttag in match style.Terminal.prefix with Some s -> Format.pp_print_as ft 0 s | None -> () in let print_suffix ft tag = let (tpfx, ttag) = split_tag tag in if tpfx <> start_pfx then let style = get_style ttag in match style.Terminal.suffix with Some s -> Format.pp_print_as ft 0 s | None -> () in print_prefix, print_suffix let init_output_fns () = let reopen_highlight = ref "" in let open Format in let fns = Format.pp_get_formatter_out_functions !std_ft () in let newline () = if !diff_tag_stack <> [] then begin let close = get_close_seq !diff_tag_stack in fns.out_string close 0 (String.length close); reopen_highlight := get_open_seq (List.rev !diff_tag_stack); end; fns.out_string "\n" 0 1 in let string s off n = if !reopen_highlight <> "" && String.trim (String.sub s off n) <> "" then begin fns.out_string !reopen_highlight 0 (String.length !reopen_highlight); reopen_highlight := "" end; fns.out_string s off n in let new_fns = { fns with out_string = string; out_newline = newline } in Format.pp_set_formatter_out_functions !std_ft new_fns;; let init_terminal_output ~color = let open_tag, close_tag, clear_tag = make_style_stack () in let print_prefix, print_suffix = make_printing_functions () in let tag_handler ft = { Format.mark_open_tag = open_tag; Format.mark_close_tag = close_tag; Format.print_open_tag = print_prefix ft; Format.print_close_tag = print_suffix ft; } in if color then (* Use 0-length markers *) begin std_logger_cleanup := clear_tag; init_output_fns (); Format.pp_set_mark_tags !std_ft true; Format.pp_set_mark_tags !err_ft true end else (* Use textual markers *) begin Format.pp_set_print_tags !std_ft true; Format.pp_set_print_tags !err_ft true end; Format.pp_set_formatter_tag_functions !std_ft (tag_handler !std_ft) [@warning "-3"]; Format.pp_set_formatter_tag_functions !err_ft (tag_handler !err_ft) [@warning "-3"] (* Rules for emacs: - Debug/info: emacs_quote_info - Warning/Error: emacs_quote_err - Notice: unquoted *) let emacs_logger = gen_logger Emacs.quote_info Emacs.quote_warning (* This is specific to the toplevel *) type execution_phase = | ParsingCommandLine | Initialization | LoadingPrelude | LoadingRcFile | InteractiveLoop | CompilationPhase let default_phase = ref InteractiveLoop let in_phase ~phase f x = let op = !default_phase in default_phase := phase; try let res = f x in default_phase := op; res with exn -> let iexn = Exninfo.capture exn in default_phase := op; Exninfo.iraise iexn let pr_loc loc = Loc.pr loc ++ str ":" let pr_phase ?loc () = match !default_phase, loc with | LoadingRcFile, loc -> (* For when all errors go through feedback: str "While loading rcfile:" ++ Option.cata (fun loc -> fnl () ++ pr_loc loc) (mt ()) loc *) Option.map pr_loc loc | LoadingPrelude, loc -> Some (str "While loading initial state:" ++ Option.cata (fun loc -> fnl () ++ pr_loc loc) (mt ()) loc) | _, Some loc -> Some (pr_loc loc) | ParsingCommandLine, _ | Initialization, _ | CompilationPhase, _ -> None | InteractiveLoop, _ -> (* Note: interactive messages such as "foo is defined" are not located *) None let print_err_exn any = let (e, info) = Exninfo.capture any in let loc = Loc.get_loc info in let pre_hdr = pr_phase ?loc () in let msg = CErrors.iprint (e, info) ++ fnl () in std_logger ?pre_hdr Feedback.Error msg let with_output_to_file fname func input = let channel = open_out (String.concat "." [fname; "out"]) in let old_fmt = !std_ft, !err_ft, !deep_ft in let new_ft = Format.formatter_of_out_channel channel in set_gp new_ft (get_gp !std_ft); std_ft := new_ft; err_ft := new_ft; deep_ft := new_ft; try let output = func input in std_ft := Util.pi1 old_fmt; err_ft := Util.pi2 old_fmt; deep_ft := Util.pi3 old_fmt; Format.pp_print_flush new_ft (); close_out channel; output with reraise -> let reraise = Exninfo.capture reraise in std_ft := Util.pi1 old_fmt; err_ft := Util.pi2 old_fmt; deep_ft := Util.pi3 old_fmt; Format.pp_print_flush new_ft (); close_out channel; Exninfo.iraise reraise (* For coqtop -time, we display the position in the file, and a glimpse of the executed command *) let pr_cmd_header com = let shorten s = if Unicode.utf8_length s > 33 then (Unicode.utf8_sub s 0 30) ^ "..." else s in let noblank s = String.map (fun c -> match c with | ' ' | '\n' | '\t' | '\r' -> '~' | x -> x ) s in let (start,stop) = Option.cata Loc.unloc (0,0) com.CAst.loc in let safe_pr_vernac x = try Ppvernac.pr_vernac x with e -> str (Printexc.to_string e) in let cmd = noblank (shorten (string_of_ppcmds (safe_pr_vernac com))) in str "Chars " ++ int start ++ str " - " ++ int stop ++ str " [" ++ str cmd ++ str "] " coq-8.15.0/vernac/topfmt.mli000066400000000000000000000047661417001151100156650ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* pp_global_params -> unit val set_dflt_gp : Format.formatter -> unit val get_gp : Format.formatter -> pp_global_params (** {6 Output functions of pretty-printing. } *) val with_output_to : out_channel -> Format.formatter val std_ft : Format.formatter ref val err_ft : Format.formatter ref val deep_ft : Format.formatter ref (** {6 For parametrization through vernacular. } *) val set_depth_boxes : int option -> unit val get_depth_boxes : unit -> int option val set_margin : int option -> unit val get_margin : unit -> int option (** Console display of feedback, we may add some location information *) val std_logger : ?pre_hdr:Pp.t -> Feedback.level -> Pp.t -> unit val emacs_logger : ?pre_hdr:Pp.t -> Feedback.level -> Pp.t -> unit (** Color output *) val default_styles : unit -> unit val parse_color_config : string -> unit val dump_tags : unit -> (string * Terminal.style) list val set_emacs_print_strings : unit -> unit (** Initialization of interpretation of tags *) val init_terminal_output : color:bool -> unit (** Error printing *) (* To be deprecated when we can fully move to feedback-based error printing. *) type execution_phase = | ParsingCommandLine | Initialization | LoadingPrelude | LoadingRcFile | InteractiveLoop | CompilationPhase val in_phase : phase:execution_phase -> ('a -> 'b) -> 'a -> 'b val pr_phase : ?loc:Loc.t -> unit -> Pp.t option val print_err_exn : exn -> unit (** [with_output_to_file file f x] executes [f x] with logging redirected to a file [file] *) val with_output_to_file : string -> ('a -> 'b) -> 'a -> 'b val pr_cmd_header : Vernacexpr.vernac_control -> Pp.t coq-8.15.0/vernac/vernac.mllib000066400000000000000000000000001417001151100161210ustar00rootroot00000000000000coq-8.15.0/vernac/vernac_classifier.ml000066400000000000000000000215471417001151100176610ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* "Later" | VtNow -> "Now" let string_of_vernac_classification = function | VtStartProof _ -> "StartProof" | VtSideff (_,w) -> "Sideff"^" "^(string_of_vernac_when w) | VtQed (VtKeep VtKeepAxiom) -> "Qed(admitted)" | VtQed (VtKeep (VtKeepOpaque | VtKeepDefined)) -> "Qed(keep)" | VtQed VtDrop -> "Qed(drop)" | VtProofStep { proof_block_detection } -> "ProofStep " ^ Option.default "" proof_block_detection | VtQuery -> "Query" | VtMeta -> "Meta " | VtProofMode _ -> "Proof Mode" let vtkeep_of_opaque = function | Opaque -> VtKeepOpaque | Transparent -> VtKeepDefined let idents_of_name : Names.Name.t -> Names.Id.t list = function | Names.Anonymous -> [] | Names.Name n -> [n] let stm_allow_nested_proofs_option_name = ["Nested";"Proofs";"Allowed"] let options_affecting_stm_scheduling = [ Attributes.universe_polymorphism_option_name; stm_allow_nested_proofs_option_name; Vernacinterp.proof_mode_opt_name; Attributes.program_mode_option_name; Proof_using.proof_using_opt_name; ] let classify_vernac e = let static_classifier ~atts e = match e with (* Univ poly compatibility: we run it now, so that we can just * look at Flags in stm.ml. Would be nicer to have the stm * look at the entire dag to detect this option. *) | VernacSetOption (_, l,_) when CList.exists (CList.equal String.equal l) options_affecting_stm_scheduling -> VtSideff ([], VtNow) (* Qed *) | VernacAbort _ -> VtQed VtDrop | VernacEndProof Admitted -> VtQed (VtKeep VtKeepAxiom) | VernacEndProof (Proved (opaque,_)) -> VtQed (VtKeep (vtkeep_of_opaque opaque)) | VernacExactProof _ -> VtQed (VtKeep VtKeepOpaque) (* Query *) | VernacShow _ | VernacPrint _ | VernacSearch _ | VernacLocate _ | VernacGlobalCheck _ | VernacCheckMayEval _ -> VtQuery (* ProofStep *) | VernacProof _ | VernacFocus _ | VernacUnfocus | VernacSubproof _ | VernacCheckGuard | VernacUnfocused | VernacBullet _ -> VtProofStep { proof_block_detection = Some "bullet" } | VernacEndSubproof -> VtProofStep { proof_block_detection = Some "curly" } (* StartProof *) | VernacDefinition ((DoDischarge,_),({v=i},_),ProveBody _) -> VtStartProof(Doesn'tGuaranteeOpacity, idents_of_name i) | VernacDefinition (_,({v=i},_),ProveBody _) -> let polymorphic = Attributes.(parse_drop_extra polymorphic atts) in let guarantee = if polymorphic then Doesn'tGuaranteeOpacity else GuaranteesOpacity in VtStartProof(guarantee, idents_of_name i) | VernacStartTheoremProof (_,l) -> let polymorphic = Attributes.(parse_drop_extra polymorphic atts) in let ids = List.map (fun (({v=i}, _), _) -> i) l in let guarantee = if polymorphic then Doesn'tGuaranteeOpacity else GuaranteesOpacity in VtStartProof (guarantee,ids) | VernacFixpoint (discharge,l) -> let polymorphic = Attributes.(parse_drop_extra polymorphic atts) in let guarantee = if discharge = DoDischarge || polymorphic then Doesn'tGuaranteeOpacity else GuaranteesOpacity in let ids, open_proof = List.fold_left (fun (l,b) {Vernacexpr.fname={CAst.v=id}; body_def} -> id::l, b || body_def = None) ([],false) l in if open_proof then VtStartProof (guarantee,ids) else VtSideff (ids, VtLater) | VernacCoFixpoint (discharge,l) -> let polymorphic = Attributes.(parse_drop_extra polymorphic atts) in let guarantee = if discharge = DoDischarge || polymorphic then Doesn'tGuaranteeOpacity else GuaranteesOpacity in let ids, open_proof = List.fold_left (fun (l,b) { Vernacexpr.fname={CAst.v=id}; body_def } -> id::l, b || body_def = None) ([],false) l in if open_proof then VtStartProof (guarantee,ids) else VtSideff (ids, VtLater) (* Sideff: apply to all open branches. usually run on master only *) | VernacAssumption (_,_,l) -> let ids = List.flatten (List.map (fun (_,(l,_)) -> List.map (fun (id, _) -> id.v) l) l) in VtSideff (ids, VtLater) | VernacPrimitive ((id,_),_,_) -> VtSideff ([id.CAst.v], VtLater) | VernacDefinition (_,({v=id},_),DefineBody _) -> VtSideff (idents_of_name id, VtLater) | VernacInductive (_,l) -> let ids = List.map (fun (((_,({v=id},_)),_,_,cl),_) -> id :: match cl with | Constructors l -> List.map (fun (_,({v=id},_)) -> id) l | RecordDecl (oid,l) -> (match oid with Some {v=x} -> [x] | _ -> []) @ CList.map_filter (function | AssumExpr({v=Names.Name n},_,_), _ -> Some n | _ -> None) l) l in VtSideff (List.flatten ids, VtLater) | VernacScheme l -> let ids = List.map (fun {v}->v) (CList.map_filter (fun (x,_) -> x) l) in VtSideff (ids, VtLater) | VernacCombinedScheme ({v=id},_) -> VtSideff ([id], VtLater) | VernacBeginSection {v=id} -> VtSideff ([id], VtLater) | VernacUniverse _ | VernacConstraint _ | VernacCanonical _ | VernacCoercion _ | VernacIdentityCoercion _ | VernacAddLoadPath _ | VernacRemoveLoadPath _ | VernacAddMLPath _ | VernacChdir _ | VernacCreateHintDb _ | VernacRemoveHints _ | VernacHints _ | VernacArguments _ | VernacReserve _ | VernacGeneralizable _ | VernacSetOpacity _ | VernacSetStrategy _ | VernacSetOption _ | VernacAddOption _ | VernacRemoveOption _ | VernacMemOption _ | VernacPrintOption _ | VernacDeclareReduction _ | VernacExistingClass _ | VernacExistingInstance _ | VernacRegister _ | VernacNameSectionHypSet _ | VernacComments _ | VernacDeclareInstance _ -> VtSideff ([], VtLater) (* Who knows *) | VernacLoad _ -> VtSideff ([], VtNow) (* (Local) Notations have to disappear *) | VernacEndSegment _ -> VtSideff ([], VtNow) (* Modules with parameters have to be executed: can import notations *) | VernacDeclareModule (exp,{v=id},bl,_) | VernacDefineModule (exp,{v=id},bl,_,_) -> VtSideff ([id], if bl = [] && exp = None then VtLater else VtNow) | VernacDeclareModuleType ({v=id},bl,_,_) -> VtSideff ([id], if bl = [] then VtLater else VtNow) (* These commands alter the parser *) | VernacDeclareCustomEntry _ | VernacOpenCloseScope _ | VernacDeclareScope _ | VernacDelimiters _ | VernacBindScope _ | VernacNotation _ | VernacNotationAddFormat _ | VernacReservedNotation _ | VernacSyntacticDefinition _ | VernacRequire _ | VernacImport _ | VernacInclude _ | VernacDeclareMLModule _ | VernacContext _ (* TASSI: unsure *) -> VtSideff ([], VtNow) | VernacProofMode pm -> VtProofMode pm | VernacInstance ((name,_),_,_,props,_) -> let program, refine = Attributes.(parse_drop_extra Notations.(program ++ Classes.refine_att) atts) in if program || (props <> None && not refine) then VtSideff (idents_of_name name.CAst.v, VtLater) else let polymorphic = Attributes.(parse_drop_extra polymorphic atts) in let guarantee = if polymorphic then Doesn'tGuaranteeOpacity else GuaranteesOpacity in VtStartProof (guarantee, idents_of_name name.CAst.v) (* Stm will install a new classifier to handle these *) | VernacBack _ | VernacAbortAll | VernacUndoTo _ | VernacUndo _ | VernacResetName _ | VernacResetInitial | VernacRestart -> VtMeta (* Plugins should classify their commands *) | VernacExtend (s,l) -> try Vernacextend.get_vernac_classifier s l with Not_found -> anomaly(str"No classifier for"++spc()++str (fst s)++str".") in let static_control_classifier ({ CAst.v ; _ } as cmd) = (* Fail Qed or Fail Lemma must not join/fork the DAG *) (* XXX why is Fail not always Query? *) if Vernacprop.has_query_control cmd then (match static_classifier ~atts:v.attrs v.expr with | VtQuery | VtProofStep _ | VtSideff _ | VtMeta as x -> x | VtQed _ -> VtProofStep { proof_block_detection = None } | VtStartProof _ | VtProofMode _ -> VtQuery) else static_classifier ~atts:v.attrs v.expr in static_control_classifier e coq-8.15.0/vernac/vernac_classifier.mli000066400000000000000000000016471417001151100200310ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* string (** What does a vernacular do *) val classify_vernac : Vernacexpr.vernac_control -> vernac_classification (** *) val stm_allow_nested_proofs_option_name : string list coq-8.15.0/vernac/vernacentries.ml000066400000000000000000002660561417001151100170550ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* let env = Global.env () in Evd.(from_env env, env) | Some p -> Declare.Proof.get_current_context p let get_goal_or_global_context ~pstate glnum = match pstate with | None -> let env = Global.env () in Evd.(from_env env, env) | Some p -> Declare.Proof.get_goal_context p glnum let cl_of_qualid = function | FunClass -> Coercionops.CL_FUN | SortClass -> Coercionops.CL_SORT | RefClass r -> ComCoercion.class_of_global (Smartlocate.smart_global ~head:true r) let scope_class_of_qualid qid = Notation.scope_class_of_class (cl_of_qualid qid) (** Standard attributes for definition-like commands. *) module DefAttributes = struct type t = { locality : bool option; polymorphic : bool; program : bool; deprecated : Deprecation.t option; canonical_instance : bool; typing_flags : Declarations.typing_flags option; using : Vernacexpr.section_subset_expr option; } let parse f = let open Attributes in let (((((locality, deprecated), polymorphic), program), canonical_instance), typing_flags), using = parse Notations.(locality ++ deprecation ++ polymorphic ++ program ++ canonical_instance ++ typing_flags ++ using) f in let using = Option.map Proof_using.using_from_string using in { polymorphic; program; locality; deprecated; canonical_instance; typing_flags; using } end let module_locality = Attributes.Notations.(locality >>= fun l -> return (make_module_locality l)) let with_locality ~atts f = let local = Attributes.(parse locality atts) in f ~local let with_section_locality ~atts f = let local = Attributes.(parse locality atts) in let section_local = make_section_locality local in f ~section_local let with_module_locality ~atts f = let module_local = Attributes.(parse module_locality atts) in f ~module_local let with_def_attributes ~atts f = let atts = DefAttributes.parse atts in if atts.DefAttributes.program then Declare.Obls.check_program_libraries (); f ~atts (*******************) (* "Show" commands *) let show_proof ~pstate = (* spiwack: this would probably be cooler with a bit of polishing. *) try let pstate = Option.get pstate in let p = Declare.Proof.get pstate in let sigma, _ = Declare.Proof.get_current_context pstate in let pprf = Proof.partial_proof p in (* In the absence of an environment explicitly attached to the proof and on top of which side effects of the proof would be pushed, , we take the global environment which in practise should be a superset of the initial environment in which the proof was started *) let env = Global.env() in Pp.prlist_with_sep Pp.fnl (Printer.pr_econstr_env env sigma) pprf (* We print nothing if there are no goals left *) with | Proof.NoSuchGoal _ | Option.IsNone -> user_err (str "No goals to show.") let show_top_evars ~proof = (* spiwack: new as of Feb. 2010: shows goal evars in addition to non-goal evars. *) let Proof.{goals; sigma} = Proof.data proof in let shelf = Evd.shelf sigma in let given_up = Evar.Set.elements @@ Evd.given_up sigma in pr_evars_int sigma ~shelf ~given_up 1 (Evd.undefined_map sigma) let show_universes ~proof = let Proof.{goals;sigma} = Proof.data proof in let ctx = Evd.universe_context_set (Evd.minimize_universes sigma) in Termops.pr_evar_universe_context (Evd.evar_universe_context sigma) ++ fnl () ++ str "Normalized constraints:" ++ brk(1,1) ++ Univ.pr_universe_context_set (Termops.pr_evd_level sigma) ctx (* Simulate the Intro(s) tactic *) let show_intro ~proof all = let open EConstr in let Proof.{goals;sigma} = Proof.data proof in if not (List.is_empty goals) then begin let evi = Evd.find sigma (List.hd goals) in let env = Evd.evar_filtered_env (Global.env ()) evi in let l,_= decompose_prod_assum sigma (Termops.strip_outer_cast sigma (Evd.evar_concl evi)) in if all then let lid = Tactics.find_intro_names env sigma l in hov 0 (prlist_with_sep spc Id.print lid) else if not (List.is_empty l) then let n = List.last l in Id.print (List.hd (Tactics.find_intro_names env sigma [n])) else mt () end else mt () (** Textual display of a generic "match" template *) let show_match id = let patterns = try ComInductive.make_cases (Nametab.global_inductive id) with Not_found -> user_err Pp.(str "Unknown inductive type.") in let pr_branch l = str "| " ++ hov 1 (prlist_with_sep spc str l) ++ str " =>" in v 1 (str "match # with" ++ fnl () ++ prlist_with_sep fnl pr_branch patterns ++ fnl () ++ str "end" ++ fnl ()) (* "Print" commands *) let print_loadpath dir = let l = Loadpath.get_load_paths () in let l = match dir with | None -> l | Some dir -> let filter p = is_dirpath_prefix_of dir (Loadpath.logical p) in List.filter filter l in str "Logical Path / Physical path:" ++ fnl () ++ prlist_with_sep fnl Loadpath.pp l let print_libraries () = let loaded = Library.loaded_libraries () in str"Loaded library files: " ++ pr_vertical_list DirPath.print loaded let print_module qid = match Nametab.locate_module qid with | mp -> Printmod.print_module ~with_body:true mp | exception Not_found -> user_err (str"Unknown Module " ++ pr_qualid qid) let print_modtype qid = try let kn = Nametab.locate_modtype qid in Printmod.print_modtype kn with Not_found -> (* Is there a module of this name ? If yes we display its type *) try let mp = Nametab.locate_module qid in Printmod.print_module ~with_body:false mp with Not_found -> user_err (str"Unknown Module Type or Module " ++ pr_qualid qid) let print_namespace ~pstate ns = let ns = List.rev (Names.DirPath.repr ns) in (* [match_dirpath], [match_modulpath] are helpers for [matches] which checks whether a constant is in the namespace [ns]. *) let rec match_dirpath ns = function | [] -> Some ns | id::dir -> begin match match_dirpath ns dir with | Some [] as y -> y | Some (a::ns') -> if Names.Id.equal a id then Some ns' else None | None -> None end in let rec match_modulepath ns = function | MPbound _ -> None (* Not a proper namespace. *) | MPfile dir -> match_dirpath ns (Names.DirPath.repr dir) | MPdot (mp,lbl) -> let id = Names.Label.to_id lbl in begin match match_modulepath ns mp with | Some [] as y -> y | Some (a::ns') -> if Names.Id.equal a id then Some ns' else None | None -> None end in (* [qualified_minus n mp] returns a list of qualifiers representing [mp] except the [n] first (in the concrete syntax order). The idea is that if [mp] matches [ns], then [qualified_minus mp (length ns)] will be the correct representation of [mp] assuming [ns] is imported. *) (* precondition: [mp] matches some namespace of length [n] *) let qualified_minus n mp = let rec list_of_modulepath = function | MPbound _ -> assert false (* MPbound never matches *) | MPfile dir -> Names.DirPath.repr dir | MPdot (mp,lbl) -> (Names.Label.to_id lbl)::(list_of_modulepath mp) in snd (Util.List.chop n (List.rev (list_of_modulepath mp))) in let print_list pr l = prlist_with_sep (fun () -> str".") pr l in let print_kn kn = let (mp,lbl) = Names.KerName.repr kn in let qn = (qualified_minus (List.length ns) mp)@[Names.Label.to_id lbl] in print_list Id.print qn in let print_constant ~pstate k body = (* FIXME: universes *) let t = body.Declarations.const_type in let sigma, env = get_current_or_global_context ~pstate in print_kn k ++ str":" ++ spc() ++ Printer.pr_type_env env sigma t in let matches mp = match match_modulepath ns mp with | Some [] -> true | _ -> false in let constants_in_namespace = Environ.fold_constants (fun c body acc -> let kn = Constant.user c in if matches (KerName.modpath kn) then acc++fnl()++hov 2 (print_constant ~pstate kn body) else acc) (Global.env ()) (str"") in (print_list Id.print ns)++str":"++fnl()++constants_in_namespace let print_strategy r = let open Conv_oracle in let pr_level = function | Expand -> str "expand" | Level 0 -> str "transparent" | Level n -> str "level" ++ spc() ++ int n | Opaque -> str "opaque" in let pr_strategy (ref, lvl) = pr_global ref ++ str " : " ++ pr_level lvl in let oracle = Environ.oracle (Global.env ()) in match r with | None -> let fold key lvl (vacc, cacc) = match key with | VarKey id -> ((GlobRef.VarRef id, lvl) :: vacc, cacc) | ConstKey cst -> (vacc, (GlobRef.ConstRef cst, lvl) :: cacc) | RelKey _ -> (vacc, cacc) in let var_lvl, cst_lvl = fold_strategy fold oracle ([], []) in let var_msg = if List.is_empty var_lvl then mt () else str "Variable strategies" ++ fnl () ++ hov 0 (prlist_with_sep fnl pr_strategy var_lvl) ++ fnl () in let cst_msg = if List.is_empty cst_lvl then mt () else str "Constant strategies" ++ fnl () ++ hov 0 (prlist_with_sep fnl pr_strategy cst_lvl) in var_msg ++ cst_msg | Some r -> let r = Smartlocate.smart_global r in let key = let open GlobRef in match r with | VarRef id -> VarKey id | ConstRef cst -> ConstKey cst | IndRef _ | ConstructRef _ -> user_err Pp.(str "The reference is not unfoldable") in let lvl = get_strategy oracle key in pr_strategy (r, lvl) let print_registered () = let pr_lib_ref (s,r) = pr_global r ++ str " registered as " ++ str s in hov 0 (prlist_with_sep fnl pr_lib_ref @@ Coqlib.get_lib_refs ()) let dump_universes output g = let open Univ in let dump_arc u = function | UGraph.Node ltle -> Univ.Level.Map.iter (fun v strict -> let typ = if strict then Lt else Le in output typ u v) ltle; | UGraph.Alias v -> output Eq u v in Univ.Level.Map.iter dump_arc g let dump_universes_gen prl g s = let output = open_out s in let output_constraint, close = if Filename.check_suffix s ".dot" || Filename.check_suffix s ".gv" then begin (* the lazy unit is to handle errors while printing the first line *) let init = lazy (Printf.fprintf output "digraph universes {\n") in begin fun kind left right -> let () = Lazy.force init in match kind with | Univ.Lt -> Printf.fprintf output " \"%s\" -> \"%s\" [style=bold];\n" right left | Univ.Le -> Printf.fprintf output " \"%s\" -> \"%s\" [style=solid];\n" right left | Univ.Eq -> Printf.fprintf output " \"%s\" -> \"%s\" [style=dashed];\n" left right end, begin fun () -> if Lazy.is_val init then Printf.fprintf output "}\n"; close_out output end end else begin begin fun kind left right -> let kind = match kind with | Univ.Lt -> "<" | Univ.Le -> "<=" | Univ.Eq -> "=" in Printf.fprintf output "%s %s %s ;\n" left kind right end, (fun () -> close_out output) end in let output_constraint k l r = output_constraint k (prl l) (prl r) in try dump_universes output_constraint g; close (); str "Universes written to file \"" ++ str s ++ str "\"." with reraise -> let reraise = Exninfo.capture reraise in close (); Exninfo.iraise reraise let universe_subgraph ?loc kept univ = let open Univ in let sigma = Evd.from_env (Global.env()) in let parse q = let q = Constrexpr.CType q in (* this function has a nice error message for not found univs *) Constrintern.interp_known_level sigma q in let kept = List.fold_left (fun kept q -> Level.Set.add (parse q) kept) Level.Set.empty kept in let csts = UGraph.constraints_for ~kept univ in let add u newgraph = let strict = UGraph.check_constraint univ (Level.set,Lt,u) in UGraph.add_universe u ~lbound:UGraph.Bound.Set ~strict newgraph in let univ = Level.Set.fold add kept UGraph.initial_universes in UGraph.merge_constraints csts univ let sort_universes g = let open Univ in let rec normalize u = match Level.Map.find u g with | UGraph.Alias u -> normalize u | UGraph.Node _ -> u in let get_next u = match Level.Map.find u g with | UGraph.Alias u -> assert false (* nodes are normalized *) | UGraph.Node ltle -> ltle in (* Compute the longest chain of Lt constraints from Set to any universe *) let rec traverse accu todo = match todo with | [] -> accu | (u, n) :: todo -> let () = assert (Level.equal (normalize u) u) in let n = match Level.Map.find u accu with | m -> if m < n then Some n else None | exception Not_found -> Some n in match n with | None -> traverse accu todo | Some n -> let accu = Level.Map.add u n accu in let next = get_next u in let fold v lt todo = let v = normalize v in if lt then (v, n + 1) :: todo else (v, n) :: todo in let todo = Level.Map.fold fold next todo in traverse accu todo in (* Only contains normalized nodes *) let levels = traverse Level.Map.empty [normalize Level.set, 0] in let max_level = Level.Map.fold (fun _ n accu -> max n accu) levels 0 in let dummy_mp = Names.DirPath.make [Names.Id.of_string "Type"] in let ulevels = Array.init max_level (fun i -> Level.(make (UGlobal.make dummy_mp "" i))) in let ulevels = Array.cons Level.set ulevels in (* Add the normal universes *) let fold (cur, ans) u = let ans = Level.Map.add cur (UGraph.Node (Level.Map.singleton u true)) ans in (u, ans) in let _, ans = Array.fold_left fold (Level.prop, Level.Map.empty) ulevels in (* Add alias pointers *) let fold u _ ans = if Level.is_small u then ans else let n = Level.Map.find (normalize u) levels in Level.Map.add u (UGraph.Alias ulevels.(n)) ans in Level.Map.fold fold g ans let print_universes ?loc ~sort ~subgraph dst = let univ = Global.universes () in let univ = match subgraph with | None -> univ | Some g -> universe_subgraph ?loc g univ in let univ = UGraph.repr univ in let univ = if sort then sort_universes univ else univ in let pr_remaining = if Global.is_joined_environment () then mt () else str"There may remain asynchronous universe constraints" in let prl = UnivNames.(pr_with_global_universes empty_binders) in begin match dst with | None -> UGraph.pr_universes prl univ ++ pr_remaining | Some s -> dump_universes_gen (fun u -> Pp.string_of_ppcmds (prl u)) univ s end (*********************) (* "Locate" commands *) let locate_file f = let file = Flags.silently Loadpath.locate_file f in str file let msg_found_library = function | Loadpath.LibLoaded, fulldir, file -> hov 0 (DirPath.print fulldir ++ strbrk " has been loaded from file " ++ str file) | Loadpath.LibInPath, fulldir, file -> hov 0 (DirPath.print fulldir ++ strbrk " is bound to file " ++ str file) let err_unmapped_library ?from qid = let prefix = match from with | None -> mt () | Some from -> str " with prefix " ++ DirPath.print from in strbrk "Cannot find a physical path bound to logical path " ++ pr_qualid qid ++ prefix ++ str "." let err_notfound_library ?from qid = let prefix = match from with | None -> mt () | Some from -> str " with prefix " ++ DirPath.print from in let bonus = if !Flags.load_vos_libraries then mt () else str " (while searching for a .vos file)" in strbrk "Unable to locate library " ++ pr_qualid qid ++ prefix ++ bonus ++ str "." exception UnmappedLibrary of Names.DirPath.t option * Libnames.qualid exception NotFoundLibrary of Names.DirPath.t option * Libnames.qualid let _ = CErrors.register_handler begin function | UnmappedLibrary (from, qid) -> Some (err_unmapped_library ?from qid) | NotFoundLibrary (from, qid) -> Some (err_notfound_library ?from qid) | _ -> None end let print_located_library qid = let open Loadpath in match locate_qualified_library qid with | Ok lib -> msg_found_library lib | Error LibUnmappedDir -> raise (UnmappedLibrary (None, qid)) | Error LibNotFound -> raise (NotFoundLibrary (None, qid)) let smart_global r = let gr = Smartlocate.smart_global r in Dumpglob.add_glob ?loc:r.loc gr; gr let dump_global r = try let gr = Smartlocate.smart_global r in Dumpglob.add_glob ?loc:r.loc gr with e when CErrors.noncritical e -> () (**********) (* Syntax *) let vernac_reserved_notation ~module_local ~infix l = Metasyntax.add_reserved_notation ~local:module_local ~infix l let vernac_declare_scope ~module_local sc = Metasyntax.declare_scope module_local sc let vernac_delimiters ~module_local sc action = match action with | Some lr -> Metasyntax.add_delimiters module_local sc lr | None -> Metasyntax.remove_delimiters module_local sc let vernac_bind_scope ~module_local sc cll = Metasyntax.add_class_scope module_local sc (List.map scope_class_of_qualid cll) let vernac_open_close_scope ~section_local (b,s) = Notation.open_close_scope (section_local,b,s) let vernac_notation ~atts ~infix = let module_local, deprecation = Attributes.(parse Notations.(module_locality ++ deprecation) atts) in Metasyntax.add_notation ~local:module_local ~infix deprecation (Global.env()) let vernac_custom_entry ~module_local s = Metasyntax.declare_custom_entry module_local s (***********) (* Gallina *) let check_name_freshness locality {CAst.loc;v=id} : unit = (* We check existence here: it's a bit late at Qed time *) if Nametab.exists_cci (Lib.make_path id) || Termops.is_section_variable (Global.env ()) id || locality <> Locality.Discharge && Nametab.exists_cci (Lib.make_path_except_section id) then user_err ?loc (Id.print id ++ str " already exists.") let program_inference_hook env sigma ev = let tac = !Declare.Obls.default_tactic in let evi = Evd.find sigma ev in let evi = Evarutil.nf_evar_info sigma evi in let env = Evd.evar_filtered_env env evi in try let concl = evi.Evd.evar_concl in if not (Evarutil.is_ground_env sigma env && Evarutil.is_ground_term sigma concl) then None else let c, _, _, _, ctx = Declare.build_by_tactic ~poly:false env ~uctx:(Evd.evar_universe_context sigma) ~typ:concl tac in Some (Evd.set_universe_context sigma ctx, EConstr.of_constr c) with | Logic_monad.TacticFailure e when noncritical e -> user_err Pp.(str "The statement obligations could not be resolved \ automatically, write a statement definition first.") let vernac_set_used_variables ~pstate using : Declare.Proof.t = let env = Global.env () in let sigma, _ = Declare.Proof.get_current_context pstate in let initial_goals pf = Proofview.initial_goals Proof.((data pf).entry) in let terms = List.map snd (initial_goals (Declare.Proof.get pstate)) in let using = Proof_using.definition_using env sigma ~using ~terms in let vars = Environ.named_context env in Names.Id.Set.iter (fun id -> if not (List.exists (NamedDecl.get_id %> Id.equal id) vars) then user_err (str "Unknown variable: " ++ Id.print id)) using; let _, pstate = Declare.Proof.set_used_variables pstate ~using in pstate let vernac_set_used_variables_opt ?using pstate = match using with | None -> pstate | Some expr -> vernac_set_used_variables ~pstate expr (* XXX: Interpretation of lemma command, duplication with ComFixpoint / ComDefinition ? *) let interp_lemma ~program_mode ~flags ~scope env0 evd thms = let inference_hook = if program_mode then Some program_inference_hook else None in List.fold_left_map (fun evd ((id, _), (bl, t)) -> let evd, (impls, ((env, ctx), imps)) = Constrintern.interp_context_evars ~program_mode env0 evd bl in let evd, (t', imps') = Constrintern.interp_type_evars_impls ~flags ~impls env evd t in let flags = Pretyping.{ all_and_fail_flags with program_mode } in let evd = Pretyping.solve_remaining_evars ?hook:inference_hook flags env evd in let ids = List.map Context.Rel.Declaration.get_name ctx in check_name_freshness scope id; let thm = Declare.CInfo.make ~name:id.CAst.v ~typ:(EConstr.it_mkProd_or_LetIn t' ctx) ~args:ids ~impargs:(imps @ imps') () in evd, thm) evd thms (* Checks done in start_lemma_com *) let post_check_evd ~udecl ~poly evd = let () = if not UState.(udecl.univdecl_extensible_instance && udecl.univdecl_extensible_constraints) then ignore (Evd.check_univ_decl ~poly evd udecl) in if poly then evd else (* We fix the variables to ensure they won't be lowered to Set *) Evd.fix_undefined_variables evd let start_lemma_com ~typing_flags ~program_mode ~poly ~scope ~kind ?using ?hook thms = let env0 = Global.env () in let env0 = Environ.update_typing_flags ?typing_flags env0 in let flags = Pretyping.{ all_no_fail_flags with program_mode } in let decl = fst (List.hd thms) in let evd, udecl = Constrintern.interp_univ_decl_opt env0 (snd decl) in let evd, thms = interp_lemma ~program_mode ~flags ~scope env0 evd thms in let mut_analysis = RecLemmas.look_for_possibly_mutual_statements evd thms in let evd = Evd.minimize_universes evd in let info = Declare.Info.make ?hook ~poly ~scope ~kind ~udecl ?typing_flags () in begin match mut_analysis with | RecLemmas.NonMutual thm -> let thm = Declare.CInfo.to_constr evd thm in let evd = post_check_evd ~udecl ~poly evd in Declare.Proof.start_with_initialization ~info ~cinfo:thm evd | RecLemmas.Mutual { mutual_info; cinfo ; possible_guards } -> let cinfo = List.map (Declare.CInfo.to_constr evd) cinfo in let evd = post_check_evd ~udecl ~poly evd in Declare.Proof.start_mutual_with_initialization ~info ~cinfo evd ~mutual_info (Some possible_guards) end (* XXX: This should be handled in start_with_initialization, see duplicate using in declare.ml *) |> vernac_set_used_variables_opt ?using let vernac_definition_hook ~canonical_instance ~local ~poly = let open Decls in function | Coercion -> Some (ComCoercion.add_coercion_hook ~poly) | CanonicalStructure -> Some (Declare.Hook.(make (fun { S.dref } -> Canonical.declare_canonical_structure ?local dref))) | SubClass -> Some (ComCoercion.add_subclass_hook ~poly) | Definition when canonical_instance -> Some (Declare.Hook.(make (fun { S.dref } -> Canonical.declare_canonical_structure ?local dref))) | Let when canonical_instance -> Some (Declare.Hook.(make (fun { S.dref } -> Canonical.declare_canonical_structure dref))) | _ -> None let default_thm_id = Id.of_string "Unnamed_thm" let fresh_name_for_anonymous_theorem () = Namegen.next_global_ident_away default_thm_id Id.Set.empty let vernac_definition_name lid local = let lid = match lid with | { v = Name.Anonymous; loc } -> CAst.make ?loc (fresh_name_for_anonymous_theorem ()) | { v = Name.Name n; loc } -> CAst.make ?loc n in let () = match local with | Discharge -> Dumpglob.dump_definition lid true "var" | Global _ -> Dumpglob.dump_definition lid false "def" in lid let vernac_definition_interactive ~atts (discharge, kind) (lid, pl) bl t = let open DefAttributes in let local = enforce_locality_exp atts.locality discharge in let hook = vernac_definition_hook ~canonical_instance:atts.canonical_instance ~local:atts.locality ~poly:atts.polymorphic kind in let program_mode = atts.program in let poly = atts.polymorphic in let typing_flags = atts.typing_flags in let name = vernac_definition_name lid local in start_lemma_com ~typing_flags ~program_mode ~poly ~scope:local ~kind:(Decls.IsDefinition kind) ?using:atts.using ?hook [(name, pl), (bl, t)] let vernac_definition ~atts ~pm (discharge, kind) (lid, pl) bl red_option c typ_opt = let open DefAttributes in let scope = enforce_locality_exp atts.locality discharge in let hook = vernac_definition_hook ~canonical_instance:atts.canonical_instance ~local:atts.locality ~poly:atts.polymorphic kind in let program_mode = atts.program in let typing_flags = atts.typing_flags in let name = vernac_definition_name lid scope in let red_option = match red_option with | None -> None | Some r -> let env = Global.env () in let sigma = Evd.from_env env in Some (snd (Hook.get f_interp_redexp env sigma r)) in if program_mode then let kind = Decls.IsDefinition kind in ComDefinition.do_definition_program ~pm ~name:name.v ~poly:atts.polymorphic ?typing_flags ~scope ~kind pl bl red_option c typ_opt ?hook else let () = ComDefinition.do_definition ~name:name.v ~poly:atts.polymorphic ?typing_flags ~scope ~kind ?using:atts.using pl bl red_option c typ_opt ?hook in pm (* NB: pstate argument to use combinators easily *) let vernac_start_proof ~atts kind l = let open DefAttributes in let scope = enforce_locality_exp atts.locality NoDischarge in if Dumpglob.dump () then List.iter (fun ((id, _), _) -> Dumpglob.dump_definition id false "prf") l; start_lemma_com ~typing_flags:atts.typing_flags ~program_mode:atts.program ~poly:atts.polymorphic ~scope ~kind:(Decls.IsProof kind) ?using:atts.using l let vernac_end_proof ~lemma ~pm = let open Vernacexpr in function | Admitted -> Declare.Proof.save_admitted ~pm ~proof:lemma | Proved (opaque,idopt) -> let pm, _ = Declare.Proof.save ~pm ~proof:lemma ~opaque ~idopt in pm let vernac_exact_proof ~lemma ~pm c = (* spiwack: for simplicity I do not enforce that "Proof proof_term" is called only at the beginning of a proof. *) let lemma, status = Declare.Proof.by (Tactics.exact_proof c) lemma in let pm, _ = Declare.Proof.save ~pm ~proof:lemma ~opaque:Opaque ~idopt:None in if not status then Feedback.feedback Feedback.AddedAxiom; pm let vernac_assumption ~atts discharge kind l nl = let open DefAttributes in let scope = enforce_locality_exp atts.locality discharge in List.iter (fun (is_coe,(idl,c)) -> if Dumpglob.dump () then List.iter (fun (lid, _) -> match scope with | Global _ -> Dumpglob.dump_definition lid false "ax" | Discharge -> Dumpglob.dump_definition lid true "var") idl) l; if Option.has_some atts.using then Attributes.unsupported_attributes [CAst.make ("using",VernacFlagEmpty)]; ComAssumption.do_assumptions ~poly:atts.polymorphic ~program_mode:atts.program ~scope ~kind nl l let is_polymorphic_inductive_cumulativity = declare_bool_option_and_ref ~depr:false ~value:false ~key:["Polymorphic";"Inductive";"Cumulativity"] let polymorphic_cumulative ~is_defclass = let error_poly_context () = user_err Pp.(str "The cumulative attribute can only be used in a polymorphic context."); in let open Attributes in let open Notations in (* EJGA: this seems redudant with code in attributes.ml *) qualify_attribute "universes" (bool_attribute ~name:"polymorphic" ++ bool_attribute ~name:"cumulative") >>= fun (poly,cumul) -> if is_defclass && Option.has_some cumul then user_err Pp.(str "Definitional classes do not support the inductive cumulativity attribute."); match poly, cumul with | Some poly, Some cumul -> (* Case of Polymorphic|Monomorphic Cumulative|NonCumulative Inductive and #[ universes(polymorphic|monomorphic,cumulative|noncumulative) ] Inductive *) if poly then return (true, cumul) else error_poly_context () | Some poly, None -> (* Case of Polymorphic|Monomorphic Inductive and #[ universes(polymorphic|monomorphic) ] Inductive *) if poly then return (true, is_polymorphic_inductive_cumulativity ()) else return (false, false) | None, Some cumul -> (* Case of Cumulative|NonCumulative Inductive *) if is_universe_polymorphism () then return (true, cumul) else error_poly_context () | None, None -> (* Case of Inductive *) if is_universe_polymorphism () then return (true, is_polymorphic_inductive_cumulativity ()) else return (false, false) let get_uniform_inductive_parameters = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Uniform"; "Inductive"; "Parameters"] ~value:false let should_treat_as_uniform () = if get_uniform_inductive_parameters () then ComInductive.UniformParameters else ComInductive.NonUniformParameters let vernac_record ~template udecl ~cumulative k ~poly ?typing_flags ~primitive_proj finite records = let map ((is_coercion, name), binders, sort, nameopt, cfs) = let idbuild = match nameopt with | None -> Nameops.add_prefix "Build_" name.v | Some lid -> let () = Dumpglob.dump_definition lid false "constr" in lid.v in let () = if Dumpglob.dump () then let () = Dumpglob.dump_definition name false "rec" in let iter (x, _) = match x with | Vernacexpr.(AssumExpr ({loc;v=Name id}, _, _) | DefExpr ({loc;v=Name id}, _, _, _)) -> Dumpglob.dump_definition (make ?loc id) false "proj" | _ -> () in List.iter iter cfs in Record.Ast.{ name; is_coercion; binders; cfs; idbuild; sort } in let records = List.map map records in match typing_flags with | Some _ -> CErrors.user_err (Pp.str "typing flags are not yet supported for records") | None -> let _ : _ list = Record.definition_structure ~template udecl k ~cumulative ~poly ~primitive_proj finite records in () let extract_inductive_udecl (indl:(inductive_expr * decl_notation list) list) = match indl with | [] -> assert false | (((coe,(id,udecl)),b,c,d),e) :: rest -> let rest = List.map (fun (((coe,(id,udecl)),b,c,d),e) -> if Option.has_some udecl then user_err Pp.(strbrk "Universe binders must be on the first inductive of the block.") else (((coe,id),b,c,d),e)) rest in udecl, (((coe,id),b,c,d),e) :: rest let finite_of_kind = let open Declarations in function | Inductive_kw -> Finite | CoInductive -> CoFinite | Variant | Record | Structure | Class _ -> BiFinite let private_ind = let open Attributes in let open Notations in attribute_of_list [ "matching" , single_key_parser ~name:"Private (matching) inductive type" ~key:"matching" () ] |> qualify_attribute "private" >>= function | Some () -> return true | None -> return false (** Flag governing use of primitive projections. Disabled by default. *) let primitive_flag = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Primitive";"Projections"] ~value:false let primitive_proj = let open Attributes in let open Notations in qualify_attribute "projections" (bool_attribute ~name:"primitive") >>= function | Some t -> return t | None -> return (primitive_flag ()) let vernac_inductive ~atts kind indl = let udecl, indl = extract_inductive_udecl indl in let is_defclass = match kind, indl with | Class _, [ ( id , bl , c , Constructors [l]), [] ] -> Some (id, bl, c, l) | _ -> None in let finite = finite_of_kind kind in let is_record = function | ((_ , _ , _ , RecordDecl _), _) -> true | _ -> false in let is_constructor = function | ((_ , _ , _ , Constructors _), _) -> true | _ -> false in (* We only allow the #[projections(primitive)] attribute for records. *) let prim_proj_attr : bool Attributes.Notations.t = if List.for_all is_record indl then primitive_proj else Notations.return false in let (((template, (poly, cumulative)), private_ind), typing_flags), primitive_proj = Attributes.( parse Notations.( template ++ polymorphic_cumulative ~is_defclass:(Option.has_some is_defclass) ++ private_ind ++ typing_flags ++ prim_proj_attr) atts) in if Dumpglob.dump () then List.iter (fun (((coe,lid), _, _, cstrs), _) -> match cstrs with | Constructors cstrs -> Dumpglob.dump_definition lid false "ind"; List.iter (fun (_, (lid, _)) -> Dumpglob.dump_definition lid false "constr") cstrs | _ -> () (* dumping is done by vernac_record (called below) *) ) indl; if Option.has_some is_defclass then (* Definitional class case *) let (id, bl, c, l) = Option.get is_defclass in let bl = match bl with | bl, None -> bl | _ -> CErrors.user_err Pp.(str "Definitional classes do not support the \"|\" syntax.") in let (coe, (lid, ce)) = l in let coe' = if coe then BackInstance else NoInstance in let f = AssumExpr ((make ?loc:lid.loc @@ Name lid.v), [], ce), { rf_subclass = coe' ; rf_priority = None ; rf_notation = [] ; rf_canonical = true } in vernac_record ~template udecl ~cumulative (Class true) ~poly ?typing_flags ~primitive_proj finite [id, bl, c, None, [f]] else if List.for_all is_record indl then (* Mutual record case *) let () = match kind with | Variant -> user_err (str "The Variant keyword does not support syntax { ... }.") | Record | Structure | Class _ | Inductive_kw | CoInductive -> () in let check_where ((_, _, _, _), wh) = match wh with | [] -> () | _ :: _ -> user_err (str "where clause not supported for records") in let () = List.iter check_where indl in let unpack ((id, bl, c, decl), _) = match decl with | RecordDecl (oc, fs) -> let bl = match bl with | bl, None -> bl | _ -> CErrors.user_err Pp.(str "Records do not support the \"|\" syntax.") in (id, bl, c, oc, fs) | Constructors _ -> assert false (* ruled out above *) in let kind = match kind with Class _ -> Class false | _ -> kind in let recordl = List.map unpack indl in vernac_record ~template udecl ~cumulative kind ~poly ?typing_flags ~primitive_proj finite recordl else if List.for_all is_constructor indl then (* Mutual inductive case *) let () = match kind with | (Record | Structure) -> user_err (str "The Record keyword is for types defined using the syntax { ... }.") | Class _ -> user_err (str "Inductive classes not supported") | Variant | Inductive_kw | CoInductive -> () in let check_name ((na, _, _, _), _) = match na with | (true, _) -> user_err (str "Variant types do not handle the \"> Name\" \ syntax, which is reserved for records. Use the \":>\" \ syntax on constructors instead.") | _ -> () in let () = List.iter check_name indl in let unpack (((_, id) , bl, c, decl), ntn) = match decl with | Constructors l -> (id, bl, c, l), ntn | RecordDecl _ -> assert false (* ruled out above *) in let indl = List.map unpack indl in let uniform = should_treat_as_uniform () in ComInductive.do_mutual_inductive ~template udecl indl ~cumulative ~poly ?typing_flags ~private_ind ~uniform finite else user_err (str "Mixed record-inductive definitions are not allowed") let vernac_fixpoint_common ~atts discharge l = if Dumpglob.dump () then List.iter (fun { fname } -> Dumpglob.dump_definition fname false "def") l; enforce_locality_exp atts.DefAttributes.locality discharge let vernac_fixpoint_interactive ~atts discharge l = let open DefAttributes in let scope = vernac_fixpoint_common ~atts discharge l in if atts.program then CErrors.user_err Pp.(str"Program Fixpoint requires a body"); let typing_flags = atts.typing_flags in ComFixpoint.do_fixpoint_interactive ~scope ~poly:atts.polymorphic ?typing_flags l |> vernac_set_used_variables_opt ?using:atts.using let vernac_fixpoint ~atts ~pm discharge l = let open DefAttributes in let scope = vernac_fixpoint_common ~atts discharge l in let typing_flags = atts.typing_flags in if atts.program then (* XXX: Switch to the attribute system and match on ~atts *) ComProgramFixpoint.do_fixpoint ~pm ~scope ~poly:atts.polymorphic ?typing_flags ?using:atts.using l else let () = ComFixpoint.do_fixpoint ~scope ~poly:atts.polymorphic ?typing_flags ?using:atts.using l in pm let vernac_cofixpoint_common ~atts discharge l = if Dumpglob.dump () then List.iter (fun { fname } -> Dumpglob.dump_definition fname false "def") l; enforce_locality_exp atts.DefAttributes.locality discharge let vernac_cofixpoint_interactive ~atts discharge l = let open DefAttributes in let scope = vernac_cofixpoint_common ~atts discharge l in if atts.program then CErrors.user_err Pp.(str"Program CoFixpoint requires a body"); vernac_set_used_variables_opt ?using:atts.using (ComFixpoint.do_cofixpoint_interactive ~scope ~poly:atts.polymorphic l) let vernac_cofixpoint ~atts ~pm discharge l = let open DefAttributes in let scope = vernac_cofixpoint_common ~atts discharge l in if atts.program then ComProgramFixpoint.do_cofixpoint ~pm ~scope ~poly:atts.polymorphic ?using:atts.using l else let () = ComFixpoint.do_cofixpoint ~scope ~poly:atts.polymorphic ?using:atts.using l in pm let vernac_scheme l = if Dumpglob.dump () then List.iter (fun (lid, s) -> Option.iter (fun lid -> Dumpglob.dump_definition lid false "def") lid; match s with | InductionScheme (_, r, _) | CaseScheme (_, r, _) | EqualityScheme r -> dump_global r) l; Indschemes.do_scheme l let vernac_combined_scheme lid l = if Dumpglob.dump () then (Dumpglob.dump_definition lid false "def"; List.iter (fun {loc;v=id} -> dump_global (make ?loc @@ Constrexpr.AN (qualid_of_ident ?loc id))) l); Indschemes.do_combined_scheme lid l let vernac_universe ~poly l = if poly && not (Global.sections_are_opened ()) then user_err (str"Polymorphic universes can only be declared inside sections, " ++ str "use Monomorphic Universe instead."); DeclareUniv.do_universe ~poly l let vernac_constraint ~poly l = if poly && not (Global.sections_are_opened ()) then user_err (str"Polymorphic universe constraints can only be declared" ++ str " inside sections, use Monomorphic Constraint instead."); DeclareUniv.do_constraint ~poly l (**********************) (* Modules *) let warn_not_importable = CWarnings.create ~name:"not-importable" ~category:"modules" Pp.(fun c -> str "Cannot import local constant " ++ Printer.pr_constant (Global.env()) c ++ str ", it will be ignored.") let importable_extended_global_of_path ?loc path = match Nametab.extended_global_of_path path with | Globnames.TrueGlobal (GlobRef.ConstRef c) as ref -> if Declare.is_local_constant c then begin warn_not_importable ?loc c; None end else Some ref | ref -> Some ref let add_subnames_of ?loc len n ns full_n ref = let open GlobRef in let add1 r ns = (len, Globnames.TrueGlobal r) :: ns in match ref with | Globnames.SynDef _ | Globnames.TrueGlobal (ConstRef _ | ConstructRef _ | VarRef _) -> CErrors.user_err ?loc Pp.(str "Only inductive types can be used with Import (...).") | Globnames.TrueGlobal (IndRef (mind,i)) -> let open Declarations in let dp = Libnames.dirpath full_n in let mib = Global.lookup_mind mind in let mip = mib.mind_packets.(i) in let ns = add1 (IndRef (mind,i)) ns in let ns = Array.fold_left_i (fun j ns _ -> add1 (ConstructRef ((mind,i),j+1)) ns) ns mip.mind_consnames in List.fold_left (fun ns f -> let s = Indrec.elimination_suffix f in let n_elim = Id.of_string (Id.to_string mip.mind_typename ^ s) in match importable_extended_global_of_path ?loc (Libnames.make_path dp n_elim) with | exception Not_found -> ns | None -> ns | Some ref -> (len, ref) :: ns) ns Sorts.all_families let interp_names m ns = let dp_m = Nametab.dirpath_of_module m in let ns = List.fold_left (fun ns (n,etc) -> let len, full_n = let dp_n,n = repr_qualid n in List.length (DirPath.repr dp_n), make_path (append_dirpath dp_m dp_n) n in let ref = try importable_extended_global_of_path ?loc:n.loc full_n with Not_found -> CErrors.user_err ?loc:n.loc Pp.(str "Cannot find name " ++ pr_qualid n ++ spc() ++ str "in module " ++ pr_qualid (Nametab.shortest_qualid_of_module m)) in (* TODO dumpglob? *) match ref with | Some ref -> let ns = (len,ref) :: ns in if etc then add_subnames_of ?loc:n.loc len n ns full_n ref else ns | None -> ns) [] ns in ns let cache_name (len,n) = let open Globnames in let open GlobRef in match n with | SynDef kn -> Syntax_def.import_syntax_constant (len+1) (Nametab.path_of_syndef kn) kn | TrueGlobal (VarRef _) -> assert false | TrueGlobal (ConstRef c) when Declare.is_local_constant c -> (* Can happen through functor application *) warn_not_importable c | TrueGlobal gr -> Nametab.(push (Exactly (len+1)) (path_of_global gr) gr) let cache_names (_,ns) = List.iter cache_name ns let subst_names (subst,ns) = List.Smart.map (on_snd (Globnames.subst_extended_reference subst)) ns let inExportNames = Libobject.declare_object (Libobject.global_object "EXPORTNAMES" ~cache:cache_names ~subst:(Some subst_names) ~discharge:(fun (_,x) -> Some x)) let import_names ~export m ns = let ns = interp_names m ns in if export then Lib.add_anonymous_leaf (inExportNames ns) else cache_names ((),ns) let vernac_import export cats refl = if Option.has_some cats then List.iter (function | _, ImportAll -> () | q, ImportNames _ -> CErrors.user_err ?loc:q.loc Pp.(str "Cannot combine importing by categories and importing by names.")) refl; let cats = Option.cata (fun cats -> Libobject.make_filter ~finite:(not cats.negative) cats.import_cats) Libobject.unfiltered cats in let import_mod (qid,f) = let loc = qid.loc in let m = try let m = Nametab.locate_module qid in let () = Dumpglob.dump_modref ?loc m "mod" in let () = if Modops.is_functor (Global.lookup_module m).Declarations.mod_type then CErrors.user_err ?loc Pp.(str "Cannot import functor " ++ pr_qualid qid ++ str".") in m with Not_found -> CErrors.user_err ?loc Pp.(str "Cannot find module " ++ pr_qualid qid) in match f with | ImportAll -> Declaremods.import_module cats ~export m | ImportNames ns -> import_names ~export m ns in List.iter import_mod refl let vernac_declare_module export {loc;v=id} binders_ast mty_ast = (* We check the state of the system (in section, in module type) and what module information is supplied *) if Global.sections_are_opened () then user_err Pp.(str "Modules and Module Types are not allowed inside sections."); let binders_ast = List.map (fun (export,idl,ty) -> if not (Option.is_empty export) then user_err Pp.(str "Arguments of a functor declaration cannot be exported. Remove the \"Export\" and \"Import\" keywords from every functor argument.") else (idl,ty)) binders_ast in let mp = Declaremods.declare_module id binders_ast (Declaremods.Enforce mty_ast) [] in Dumpglob.dump_moddef ?loc mp "mod"; Flags.if_verbose Feedback.msg_info (str "Module " ++ Id.print id ++ str " is declared"); Option.iter (fun export -> vernac_import export None [qualid_of_ident id, ImportAll]) export let vernac_define_module export {loc;v=id} (binders_ast : module_binder list) mty_ast_o mexpr_ast_l = (* We check the state of the system (in section, in module type) and what module information is supplied *) if Global.sections_are_opened () then user_err Pp.(str "Modules and Module Types are not allowed inside sections."); match mexpr_ast_l with | [] -> let binders_ast,argsexport = List.fold_right (fun (export,idl,ty) (args,argsexport) -> (idl,ty)::args, (List.map (fun {v=i} -> export,i)idl)@argsexport) binders_ast ([],[]) in let mp = Declaremods.start_module export id binders_ast mty_ast_o in Dumpglob.dump_moddef ?loc mp "mod"; Flags.if_verbose Feedback.msg_info (str "Interactive Module " ++ Id.print id ++ str " started"); List.iter (fun (export,id) -> Option.iter (fun export -> vernac_import export None [qualid_of_ident id, ImportAll]) export ) argsexport | _::_ -> let binders_ast = List.map (fun (export,idl,ty) -> if not (Option.is_empty export) then user_err Pp.(str "Arguments of a functor definition can be imported only if the definition is interactive. Remove the \"Export\" and \"Import\" keywords from every functor argument.") else (idl,ty)) binders_ast in let mp = Declaremods.declare_module id binders_ast mty_ast_o mexpr_ast_l in Dumpglob.dump_moddef ?loc mp "mod"; Flags.if_verbose Feedback.msg_info (str "Module " ++ Id.print id ++ str " is defined"); Option.iter (fun export -> vernac_import export None [qualid_of_ident id, ImportAll]) export let vernac_end_module export {loc;v=id} = let mp = Declaremods.end_module () in Dumpglob.dump_modref ?loc mp "mod"; Flags.if_verbose Feedback.msg_info (str "Module " ++ Id.print id ++ str " is defined"); Option.iter (fun export -> vernac_import export None [qualid_of_ident ?loc id, ImportAll]) export let vernac_declare_module_type {loc;v=id} binders_ast mty_sign mty_ast_l = if Global.sections_are_opened () then user_err Pp.(str "Modules and Module Types are not allowed inside sections."); match mty_ast_l with | [] -> let binders_ast,argsexport = List.fold_right (fun (export,idl,ty) (args,argsexport) -> (idl,ty)::args, (List.map (fun {v=i} -> export,i)idl)@argsexport) binders_ast ([],[]) in let mp = Declaremods.start_modtype id binders_ast mty_sign in Dumpglob.dump_moddef ?loc mp "modtype"; Flags.if_verbose Feedback.msg_info (str "Interactive Module Type " ++ Id.print id ++ str " started"); List.iter (fun (export,id) -> Option.iter (fun export -> vernac_import export None [qualid_of_ident ?loc id, ImportAll]) export ) argsexport | _ :: _ -> let binders_ast = List.map (fun (export,idl,ty) -> if not (Option.is_empty export) then user_err Pp.(str "Arguments of a functor definition can be imported only if the definition is interactive. Remove the \"Export\" and \"Import\" keywords from every functor argument.") else (idl,ty)) binders_ast in let mp = Declaremods.declare_modtype id binders_ast mty_sign mty_ast_l in Dumpglob.dump_moddef ?loc mp "modtype"; Flags.if_verbose Feedback.msg_info (str "Module Type " ++ Id.print id ++ str " is defined") let vernac_end_modtype {loc;v=id} = let mp = Declaremods.end_modtype () in Dumpglob.dump_modref ?loc mp "modtype"; Flags.if_verbose Feedback.msg_info (str "Module Type " ++ Id.print id ++ str " is defined") let vernac_include l = Declaremods.declare_include l (**********************) (* Gallina extensions *) (* Sections *) let vernac_begin_section ~poly ({v=id} as lid) = Dumpglob.dump_definition lid true "sec"; Lib.open_section id; (* If there was no polymorphism attribute this just sets the option to its current value ie noop. *) set_bool_option_value_gen ~locality:OptLocal ["Universe"; "Polymorphism"] poly let vernac_end_section {CAst.loc; v} = Dumpglob.dump_reference ?loc (DirPath.to_string (Lib.current_dirpath true)) "<>" "sec"; Lib.close_section () let vernac_name_sec_hyp {v=id} set = Proof_using.name_set id set (* Dispatcher of the "End" command *) let msg_of_subsection ss id = let kind = match ss with | Lib.OpenedModule (false,_,_,_) -> "module" | Lib.OpenedModule (true,_,_,_) -> "module type" | Lib.OpenedSection _ -> "section" | _ -> "unknown" in Pp.str kind ++ spc () ++ Id.print id let vernac_end_segment ~pm ~proof ({v=id} as lid) = let ss = Lib.find_opening_node id in let what_for = msg_of_subsection ss lid.v in if Option.has_some proof then CErrors.user_err (Pp.str "Command not supported (Open proofs remain)"); Declare.Obls.check_solved_obligations ~pm ~what_for; match ss with | Lib.OpenedModule (false,export,_,_) -> vernac_end_module export lid | Lib.OpenedModule (true,_,_,_) -> vernac_end_modtype lid | Lib.OpenedSection _ -> vernac_end_section lid | _ -> assert false let vernac_end_segment lid = Vernacextend.TypedVernac { inprog = Use; outprog = Pop; inproof = UseOpt; outproof = No; run = (fun ~pm ~proof -> let () = vernac_end_segment ~pm ~proof lid in (), ()) } [@@ocaml.warning "-40"] let vernac_begin_segment ~interactive f = let inproof = Vernacextend.InProof.(if interactive then Reject else Ignore) in let outprog = Vernacextend.OutProg.(if interactive then Push else No) in Vernacextend.TypedVernac { inprog = Ignore; outprog; inproof; outproof = No; run = (fun ~pm ~proof -> let () = f () in (), ()) } [@@ocaml.warning "-40"] (* Libraries *) let warn_require_in_section = CWarnings.create ~name:"require-in-section" ~category:"fragile" (fun () -> strbrk "Use of “Require” inside a section is fragile." ++ spc() ++ strbrk "It is not recommended to use this functionality in finished proof scripts.") let vernac_require from import qidl = if Global.sections_are_opened () then warn_require_in_section (); let root = match from with | None -> None | Some from -> let (hd, tl) = Libnames.repr_qualid from in Some (Libnames.add_dirpath_suffix hd tl) in let locate qid = let open Loadpath in match locate_qualified_library ?root qid with | Ok (_,dir,f) -> dir, f | Error LibUnmappedDir -> raise (UnmappedLibrary (root, qid)) | Error LibNotFound -> raise (NotFoundLibrary (root, qid)) in let modrefl = List.map locate qidl in if Dumpglob.dump () then List.iter2 (fun {CAst.loc} (dp,_) -> Dumpglob.dump_libref ?loc dp "lib") qidl modrefl; let lib_resolver = Loadpath.try_locate_absolute_library in Library.require_library_from_dirpath ~lib_resolver modrefl import (* Coercions and canonical structures *) let vernac_canonical ~local r = Canonical.declare_canonical_structure ?local (smart_global r) let vernac_coercion ~atts ref qids qidt = let local, poly = Attributes.(parse Notations.(locality ++ polymorphic) atts) in let local = enforce_locality local in let target = cl_of_qualid qidt in let source = cl_of_qualid qids in let ref' = smart_global ref in ComCoercion.try_add_new_coercion_with_target ref' ~local ~poly ~source ~target; Flags.if_verbose Feedback.msg_info (pr_global ref' ++ str " is now a coercion") let vernac_identity_coercion ~atts id qids qidt = let local, poly = Attributes.(parse Notations.(locality ++ polymorphic) atts) in let local = enforce_locality local in let target = cl_of_qualid qidt in let source = cl_of_qualid qids in ComCoercion.try_add_new_identity_coercion id ~local ~poly ~source ~target (* Type classes *) let vernac_instance_program ~atts ~pm name bl t props info = Dumpglob.dump_constraint (fst name) false "inst"; let locality, poly = Attributes.(parse (Notations.(Classes.instance_locality ++ polymorphic))) atts in let pm, _id = Classes.new_instance_program ~pm ~locality ~poly name bl t props info in pm let vernac_instance_interactive ~atts name bl t info props = Dumpglob.dump_constraint (fst name) false "inst"; let locality, poly = Attributes.(parse (Notations.(Classes.instance_locality ++ polymorphic))) atts in let _id, pstate = Classes.new_instance_interactive ~locality ~poly name bl t info props in pstate let vernac_instance ~atts name bl t props info = Dumpglob.dump_constraint (fst name) false "inst"; let locality, poly = Attributes.(parse (Notations.(Classes.instance_locality ++ polymorphic))) atts in let _id : Id.t = Classes.new_instance ~locality ~poly name bl t props info in () let vernac_declare_instance ~atts id bl inst pri = Dumpglob.dump_definition (fst id) false "inst"; let (program, locality), poly = Attributes.(parse (Notations.(program ++ Classes.instance_locality ++ polymorphic))) atts in Classes.declare_new_instance ~program_mode:program ~locality ~poly id bl inst pri let vernac_existing_instance ~atts insts = let locality = Attributes.parse Classes.instance_locality atts in List.iter (fun (id, info) -> Classes.existing_instance locality id (Some info)) insts let vernac_existing_class id = Record.declare_existing_class (Nametab.global id) (***********) (* Solving *) let command_focus = Proof.new_focus_kind () let focus_command_cond = Proof.no_cond command_focus let vernac_set_end_tac ~pstate tac = let env = Genintern.empty_glob_sign (Global.env ()) in let _, tac = Genintern.generic_intern env tac in (* TO DO verifier s'il faut pas mettre exist s | TacId s ici*) Declare.Proof.set_endline_tactic tac pstate (*****************************) (* Auxiliary file management *) let expand filename = Envars.expand_path_macros ~warn:(fun x -> Feedback.msg_warning (str x)) filename let vernac_add_loadpath ~implicit pdir coq_path = let open Loadpath in let pdir = expand pdir in add_vo_path { unix_path = pdir; coq_path; has_ml = true; implicit; recursive = true } let vernac_remove_loadpath path = Loadpath.remove_load_path (expand path) (* Coq syntax for ML or system commands *) let vernac_add_ml_path path = Mltop.add_ml_dir (expand path) let vernac_declare_ml_module ~local l = let local = Option.default false local in Mltop.declare_ml_modules local (List.map expand l) let vernac_chdir = function | None -> Feedback.msg_notice (str (Sys.getcwd())) | Some path -> begin try Sys.chdir (expand path) with Sys_error err -> (* Cd is typically used to control the output directory of extraction. A failed Cd could lead to overwriting .ml files so we make it an error. *) user_err Pp.(str ("Cd failed: " ^ err)) end; Flags.if_verbose Feedback.msg_info (str (Sys.getcwd())) (************) (* Commands *) let vernac_create_hintdb ~module_local id b = Hints.create_hint_db module_local id TransparentState.full b let warn_implicit_core_hint_db = CWarnings.create ~name:"implicit-core-hint-db" ~category:"deprecated" (fun () -> strbrk "Adding and removing hints in the core database implicitly is deprecated. " ++ strbrk"Please specify a hint database.") let vernac_remove_hints ~atts dbnames ids = let locality = Attributes.(parse really_hint_locality atts) in let dbnames = if List.is_empty dbnames then (warn_implicit_core_hint_db (); ["core"]) else dbnames in Hints.remove_hints ~locality dbnames (List.map Smartlocate.global_with_alias ids) let vernac_hints ~atts dbnames h = let dbnames = if List.is_empty dbnames then (warn_implicit_core_hint_db (); ["core"]) else dbnames in let locality, poly = Attributes.(parse Notations.(really_hint_locality ++ polymorphic) atts) in Hints.add_hints ~locality dbnames (ComHints.interp_hints ~poly h) let vernac_syntactic_definition ~atts lid x only_parsing = let module_local, deprecation = Attributes.(parse Notations.(module_locality ++ deprecation) atts) in Dumpglob.dump_definition lid false "syndef"; Metasyntax.add_syntactic_definition ~local:module_local deprecation (Global.env()) lid.v x only_parsing let default_env () = { Notation_term.ninterp_var_type = Id.Map.empty; ninterp_rec_vars = Id.Map.empty; } let vernac_reserve bl = let sb_decl = (fun (idl,c) -> let env = Global.env() in let sigma = Evd.from_env env in let t,ctx = Constrintern.interp_type env sigma c in let t = Flags.without_option Detyping.print_universes (fun () -> Detyping.detype Detyping.Now false Id.Set.empty env (Evd.from_ctx ctx) t) () in let t,_ = Notation_ops.notation_constr_of_glob_constr (default_env ()) t in Reserve.declare_reserved_type idl t) in List.iter sb_decl bl let vernac_generalizable ~local = let local = Option.default true local in Implicit_quantifiers.declare_generalizable ~local let allow_sprop_opt_name = ["Allow";"StrictProp"] let cumul_sprop_opt_name = ["Cumulative";"StrictProp"] let () = declare_bool_option { optdepr = false; optkey = allow_sprop_opt_name; optread = (fun () -> Global.sprop_allowed()); optwrite = Global.set_allow_sprop } let () = declare_bool_option { optdepr = false; optkey = cumul_sprop_opt_name; optread = Global.is_cumulative_sprop; optwrite = Global.set_cumulative_sprop } let () = declare_bool_option { optdepr = false; optkey = ["Silent"]; optread = (fun () -> !Flags.quiet); optwrite = ((:=) Flags.quiet) } let () = declare_bool_option { optdepr = false; optkey = ["Implicit";"Arguments"]; optread = Impargs.is_implicit_args; optwrite = Impargs.make_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Strict";"Implicit"]; optread = Impargs.is_strict_implicit_args; optwrite = Impargs.make_strict_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Strongly";"Strict";"Implicit"]; optread = Impargs.is_strongly_strict_implicit_args; optwrite = Impargs.make_strongly_strict_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Contextual";"Implicit"]; optread = Impargs.is_contextual_implicit_args; optwrite = Impargs.make_contextual_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Reversible";"Pattern";"Implicit"]; optread = Impargs.is_reversible_pattern_implicit_args; optwrite = Impargs.make_reversible_pattern_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Maximal";"Implicit";"Insertion"]; optread = Impargs.is_maximal_implicit_args; optwrite = Impargs.make_maximal_implicit_args } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Coercions"]; optread = (fun () -> !Constrextern.print_coercions); optwrite = (fun b -> Constrextern.print_coercions := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Parentheses"]; optread = (fun () -> !Constrextern.print_parentheses); optwrite = (fun b -> Constrextern.print_parentheses := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Existential";"Instances"]; optread = (fun () -> !Detyping.print_evar_arguments); optwrite = (:=) Detyping.print_evar_arguments } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Implicit"]; optread = (fun () -> !Constrextern.print_implicits); optwrite = (fun b -> Constrextern.print_implicits := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Implicit";"Defensive"]; optread = (fun () -> !Constrextern.print_implicits_defensive); optwrite = (fun b -> Constrextern.print_implicits_defensive := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Projections"]; optread = (fun () -> !Constrextern.print_projections); optwrite = (fun b -> Constrextern.print_projections := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Notations"]; optread = (fun () -> not !Constrextern.print_no_symbol); optwrite = (fun b -> Constrextern.print_no_symbol := not b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Raw";"Literals"]; optread = (fun () -> !Constrextern.print_raw_literal); optwrite = (fun b -> Constrextern.print_raw_literal := b) } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"All"]; optread = (fun () -> !Flags.raw_print); optwrite = (fun b -> Flags.raw_print := b) } let () = declare_int_option { optdepr = false; optkey = ["Inline";"Level"]; optread = (fun () -> Some (Flags.get_inline_level ())); optwrite = (fun o -> let lev = Option.default Flags.default_inline_level o in Flags.set_inline_level lev) } let () = declare_bool_option { optdepr = false; optkey = ["Kernel"; "Term"; "Sharing"]; optread = (fun () -> (Global.typing_flags ()).Declarations.share_reduction); optwrite = Global.set_share_reduction } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Compact";"Contexts"]; optread = (fun () -> Printer.get_compact_context()); optwrite = (fun b -> Printer.set_compact_context b) } let () = declare_int_option { optdepr = false; optkey = ["Printing";"Depth"]; optread = Topfmt.get_depth_boxes; optwrite = Topfmt.set_depth_boxes } let () = declare_int_option { optdepr = false; optkey = ["Printing";"Width"]; optread = Topfmt.get_margin; optwrite = Topfmt.set_margin } let () = declare_bool_option { optdepr = false; optkey = ["Printing";"Universes"]; optread = (fun () -> !Constrextern.print_universes); optwrite = (fun b -> Constrextern.print_universes:=b) } let () = declare_bool_option { optdepr = false; optkey = ["Dump";"Bytecode"]; optread = (fun () -> !Vmbytegen.dump_bytecode); optwrite = (:=) Vmbytegen.dump_bytecode } let () = declare_bool_option { optdepr = false; optkey = ["Dump";"Lambda"]; optread = (fun () -> !Vmlambda.dump_lambda); optwrite = (:=) Vmlambda.dump_lambda } let () = declare_bool_option { optdepr = false; optkey = ["Parsing";"Explicit"]; optread = (fun () -> !Constrintern.parsing_explicit); optwrite = (fun b -> Constrintern.parsing_explicit := b) } let () = declare_string_option ~preprocess:CWarnings.normalize_flags_string { optdepr = false; optkey = ["Warnings"]; optread = CWarnings.get_flags; optwrite = CWarnings.set_flags } let () = declare_string_option { optdepr = false; optkey = ["Debug"]; optread = CDebug.get_flags; optwrite = CDebug.set_flags } let () = declare_bool_option { optdepr = false; optkey = ["Guard"; "Checking"]; optread = (fun () -> (Global.typing_flags ()).Declarations.check_guarded); optwrite = (fun b -> Global.set_check_guarded b) } let () = declare_bool_option { optdepr = false; optkey = ["Positivity"; "Checking"]; optread = (fun () -> (Global.typing_flags ()).Declarations.check_positive); optwrite = (fun b -> Global.set_check_positive b) } let () = declare_bool_option { optdepr = false; optkey = ["Universe"; "Checking"]; optread = (fun () -> (Global.typing_flags ()).Declarations.check_universes); optwrite = (fun b -> Global.set_check_universes b) } let () = declare_bool_option { optdepr = false; optkey = ["Definitional"; "UIP"]; optread = (fun () -> (Global.typing_flags ()).Declarations.allow_uip); optwrite = (fun b -> Global.set_typing_flags {(Global.typing_flags ()) with Declarations.allow_uip = b}) } let vernac_set_strategy ~local l = let open Tacred in let local = Option.default false local in let glob_ref r = match smart_global r with | GlobRef.ConstRef sp -> EvalConstRef sp | GlobRef.VarRef id -> EvalVarRef id | _ -> user_err Pp.(str "cannot set an inductive type or a constructor as transparent") in let l = List.map (fun (lev,ql) -> (lev,List.map glob_ref ql)) l in Redexpr.set_strategy local l let vernac_set_opacity ~local (v,l) = let open Tacred in let local = Option.default true local in let glob_ref r = match smart_global r with | GlobRef.ConstRef sp -> EvalConstRef sp | GlobRef.VarRef id -> EvalVarRef id | _ -> user_err Pp.(str "cannot set an inductive type or a constructor as transparent") in let l = List.map glob_ref l in Redexpr.set_strategy local [v,l] let vernac_set_option0 ~locality key opt = match opt with | OptionUnset -> unset_option_value_gen ~locality key | OptionSetString s -> set_string_option_value_gen ~locality key s | OptionSetInt n -> set_int_option_value_gen ~locality key (Some n) | OptionSetTrue -> set_bool_option_value_gen ~locality key true let vernac_set_append_option ~locality key s = set_string_option_append_value_gen ~locality key s let vernac_set_option ~locality table v = match v with | OptionSetString s -> (* We make a special case for warnings and debug flags because appending is their natural semantics *) if CString.List.equal table ["Warnings"] || CString.List.equal table ["Debug"] then vernac_set_append_option ~locality table s else let (last, prefix) = List.sep_last table in if String.equal last "Append" && not (List.is_empty prefix) then vernac_set_append_option ~locality prefix s else vernac_set_option0 ~locality table v | _ -> vernac_set_option0 ~locality table v let vernac_add_option = iter_table { aux = fun table -> table.add } let vernac_remove_option = iter_table { aux = fun table -> table.remove } let vernac_mem_option = iter_table { aux = fun table -> table.mem } let vernac_print_option key = try (get_ref_table key).print () with Not_found -> try (get_string_table key).print () with Not_found -> try print_option_value key with Not_found -> error_undeclared_key key let get_current_context_of_args ~pstate = match pstate with | None -> fun _ -> let env = Global.env () in Evd.(from_env env, env) | Some lemma -> function | Some n -> Declare.Proof.get_goal_context lemma n | None -> Declare.Proof.get_current_context lemma let query_command_selector ?loc = function | None -> None | Some (Goal_select.SelectNth n) -> Some n | _ -> user_err ?loc (str "Query commands only support the single numbered goal selector.") let vernac_check_may_eval ~pstate redexp glopt rc = let glopt = query_command_selector glopt in let sigma, env = get_current_context_of_args ~pstate glopt in let sigma, c = Constrintern.interp_open_constr ~expected_type:Pretyping.UnknownIfTermOrType env sigma rc in let sigma = Evarconv.solve_unif_constraints_with_heuristics env sigma in Evarconv.check_problems_are_solved env sigma; let sigma = Evd.minimize_universes sigma in let uctx = Evd.universe_context_set sigma in let env = Environ.push_context_set uctx (Evarutil.nf_env_evar sigma env) in let j = if Evarutil.has_undefined_evars sigma c then Evarutil.j_nf_evar sigma (Retyping.get_judgment_of env sigma c) else let c = EConstr.to_constr sigma c in (* OK to call kernel which does not support evars *) Environ.on_judgment EConstr.of_constr (Arguments_renaming.rename_typing env c) in let j = { j with Environ.uj_type = Reductionops.nf_betaiota env sigma j.Environ.uj_type } in let pp = match redexp with | None -> let evars_of_term c = Evarutil.undefined_evars_of_term sigma c in let l = Evar.Set.union (evars_of_term j.Environ.uj_val) (evars_of_term j.Environ.uj_type) in Prettyp.print_judgment env sigma j ++ pr_ne_evar_set (fnl () ++ str "where" ++ fnl ()) (mt ()) sigma l | Some r -> let (sigma,r_interp) = Hook.get f_interp_redexp env sigma r in let redfun env evm c = let (redfun, _) = Redexpr.reduction_of_red_expr env r_interp in let (_, c) = redfun env evm c in c in Prettyp.print_eval redfun env sigma rc j in pp ++ Printer.pr_universe_ctx_set sigma uctx let vernac_declare_reduction ~local s r = let local = Option.default false local in let env = Global.env () in let sigma = Evd.from_env env in Redexpr.declare_red_expr local s (snd (Hook.get f_interp_redexp env sigma r)) (* The same but avoiding the current goal context if any *) let vernac_global_check c = let env = Global.env() in let sigma = Evd.from_env env in let c,uctx = Constrintern.interp_constr env sigma c in let senv = Global.safe_env() in let uctx = UState.context_set uctx in let senv = Safe_typing.push_context_set ~strict:false uctx senv in let c = EConstr.to_constr sigma c in let j = Safe_typing.typing senv c in let env = Safe_typing.env_of_safe_env senv in Prettyp.print_safe_judgment env sigma j ++ pr_universe_ctx_set sigma uctx let get_nth_goal ~pstate n = let pf = Declare.Proof.get pstate in let Proof.{goals;sigma} = Proof.data pf in let gl = {Evd.it=List.nth goals (n-1) ; sigma = sigma; } in gl (* Printing "About" information of a hypothesis of the current goal. We only print the type and a small statement to this comes from the goal. Precondition: there must be at least one current goal. *) let print_about_hyp_globs ~pstate ?loc ref_or_by_not udecl glopt = let exception NoHyp in let open Context.Named.Declaration in try (* Fallback early to globals *) let pstate = match pstate with | None -> raise Not_found | Some pstate -> pstate in (* FIXME error on non None udecl if we find the hyp. *) let glnumopt = query_command_selector ?loc glopt in let gl,id = let open Constrexpr in match glnumopt, ref_or_by_not.v with | None,AN qid when qualid_is_ident qid -> (* goal number not given, catch any failure *) (try get_nth_goal ~pstate 1, qualid_basename qid with _ -> raise NoHyp) | Some n,AN qid when qualid_is_ident qid -> (* goal number given, catch if wong *) (try get_nth_goal ~pstate n, qualid_basename qid with Failure _ -> user_err ?loc (str "No such goal: " ++ int n ++ str ".")) | _ , _ -> raise NoHyp in let hyps = Tacmach.Old.pf_hyps gl in let decl = Context.Named.lookup id hyps in let natureofid = match decl with | LocalAssum _ -> "Hypothesis" | LocalDef (_,bdy,_) ->"Constant (let in)" in let sigma, env = Declare.Proof.get_current_context pstate in v 0 (Id.print id ++ str":" ++ pr_econstr_env env sigma (NamedDecl.get_type decl) ++ fnl() ++ fnl() ++ str natureofid ++ str " of the goal context.") with (* fallback to globals *) | NoHyp | Not_found -> let sigma, env = get_current_or_global_context ~pstate in Prettyp.print_about env sigma ref_or_by_not udecl let vernac_print ~pstate = let sigma, env = get_current_or_global_context ~pstate in function | PrintTypingFlags -> pr_typing_flags (Environ.typing_flags (Global.env ())) | PrintTables -> print_tables () | PrintFullContext-> Prettyp.print_full_context_typ env sigma | PrintSectionContext qid -> Prettyp.print_sec_context_typ env sigma qid | PrintInspect n -> Prettyp.inspect env sigma n | PrintGrammar ent -> Metasyntax.pr_grammar ent | PrintCustomGrammar ent -> Metasyntax.pr_custom_grammar ent | PrintLoadPath dir -> (* For compatibility ? *) print_loadpath dir | PrintLibraries -> print_libraries () | PrintModule qid -> print_module qid | PrintModuleType qid -> print_modtype qid | PrintNamespace ns -> print_namespace ~pstate ns | PrintMLLoadPath -> Mltop.print_ml_path () | PrintMLModules -> Mltop.print_ml_modules () | PrintDebugGC -> Mltop.print_gc () | PrintName (qid,udecl) -> dump_global qid; Prettyp.print_name env sigma qid udecl | PrintGraph -> Prettyp.print_graph () | PrintClasses -> Prettyp.print_classes() | PrintTypeClasses -> Prettyp.print_typeclasses() | PrintInstances c -> Prettyp.print_instances (smart_global c) | PrintCoercions -> Prettyp.print_coercions () | PrintCoercionPaths (cls,clt) -> Prettyp.print_path_between (cl_of_qualid cls) (cl_of_qualid clt) | PrintCanonicalConversions qids -> let grefs = List.map Smartlocate.smart_global qids in Prettyp.print_canonical_projections env sigma grefs | PrintUniverses (sort, subgraph, dst) -> print_universes ~sort ~subgraph dst | PrintHint r -> Hints.pr_hint_ref env sigma (smart_global r) | PrintHintGoal -> begin match pstate with | Some pstate -> let pf = Declare.Proof.get pstate in Hints.pr_applicable_hint pf | None -> str "No proof in progress" end | PrintHintDbName s -> Hints.pr_hint_db_by_name env sigma s | PrintHintDb -> Hints.pr_searchtable env sigma | PrintScopes -> Notation.pr_scopes (Constrextern.without_symbols (pr_glob_constr_env env sigma)) | PrintScope s -> Notation.pr_scope (Constrextern.without_symbols (pr_glob_constr_env env sigma)) s | PrintVisibility s -> Notation.pr_visibility (Constrextern.without_symbols (pr_glob_constr_env env sigma)) s | PrintAbout (ref_or_by_not,udecl,glnumopt) -> print_about_hyp_globs ~pstate ref_or_by_not udecl glnumopt | PrintImplicit qid -> dump_global qid; Prettyp.print_impargs qid | PrintAssumptions (o,t,r) -> (* Prints all the axioms and section variables used by a term *) let gr = smart_global r in let cstr = Globnames.printable_constr_of_global gr in let st = Conv_oracle.get_transp_state (Environ.oracle (Global.env())) in let nassums = Assumptions.assumptions st ~add_opaque:o ~add_transparent:t gr cstr in Printer.pr_assumptionset env sigma nassums | PrintStrategy r -> print_strategy r | PrintRegistered -> print_registered () let vernac_search ~pstate ~atts s gopt r = let open ComSearch in let gopt = query_command_selector gopt in let sigma, env = match gopt with (* 1st goal by default if it exists, otherwise no goal at all *) | None -> get_goal_or_global_context ~pstate 1 (* if goal selector is given and wrong, then let exceptions be raised. *) | Some g -> get_goal_or_global_context ~pstate g in interp_search env sigma s r let vernac_locate ~pstate = let open Constrexpr in function | LocateAny {v=AN qid} -> Prettyp.print_located_qualid qid | LocateTerm {v=AN qid} -> Prettyp.print_located_term qid | LocateAny {v=ByNotation (ntn, sc)} (* TODO : handle Ltac notations *) | LocateTerm {v=ByNotation (ntn, sc)} -> let sigma, env = get_current_or_global_context ~pstate in Notation.locate_notation (Constrextern.without_symbols (pr_glob_constr_env env sigma)) ntn sc | LocateLibrary qid -> print_located_library qid | LocateModule qid -> Prettyp.print_located_module qid | LocateOther (s, qid) -> Prettyp.print_located_other s qid | LocateFile f -> locate_file f let vernac_register qid r = let gr = Smartlocate.global_with_alias qid in match r with | RegisterInline -> begin match gr with | GlobRef.ConstRef c -> Global.register_inline c | _ -> CErrors.user_err (Pp.str "Register Inline: expecting a constant") end | RegisterCoqlib n -> let ns, id = Libnames.repr_qualid n in if DirPath.equal (dirpath_of_string "kernel") ns then begin if Global.sections_are_opened () then user_err Pp.(str "Registering a kernel type is not allowed in sections"); let CPrimitives.PIE pind = match Id.to_string id with | "ind_bool" -> CPrimitives.(PIE PIT_bool) | "ind_carry" -> CPrimitives.(PIE PIT_carry) | "ind_pair" -> CPrimitives.(PIE PIT_pair) | "ind_cmp" -> CPrimitives.(PIE PIT_cmp) | "ind_f_cmp" -> CPrimitives.(PIE PIT_f_cmp) | "ind_f_class" -> CPrimitives.(PIE PIT_f_class) | k -> CErrors.user_err Pp.(str "Register: unknown identifier “" ++ str k ++ str "” in the “kernel” namespace") in match gr with | GlobRef.IndRef ind -> Global.register_inductive ind pind | _ -> CErrors.user_err (Pp.str "Register in kernel: expecting an inductive type") end else Coqlib.register_ref (Libnames.string_of_qualid n) gr (********************) (* Proof management *) let vernac_focus ~pstate gln = Declare.Proof.map ~f:(fun p -> match gln with | None -> Proof.focus focus_command_cond () 1 p | Some 0 -> user_err Pp.(str "Invalid goal number: 0. Goal numbering starts with 1.") | Some n -> Proof.focus focus_command_cond () n p) pstate (* Unfocuses one step in the focus stack. *) let vernac_unfocus ~pstate = Declare.Proof.map ~f:(fun p -> Proof.unfocus command_focus p ()) pstate (* Checks that a proof is fully unfocused. Raises an error if not. *) let vernac_unfocused ~pstate = let p = Declare.Proof.get pstate in if Proof.unfocused p then str"The proof is indeed fully unfocused." else user_err Pp.(str "The proof is not fully unfocused.") (* "{" focuses on the first goal, "n: {" focuses on the n-th goal "}" unfocuses, provided that the proof of the goal has been completed. *) let subproof_kind = Proof.new_focus_kind () let subproof_cond = Proof.done_cond subproof_kind let vernac_subproof gln ~pstate = Declare.Proof.map ~f:(fun p -> match gln with | None -> Proof.focus subproof_cond () 1 p | Some (Goal_select.SelectNth n) -> Proof.focus subproof_cond () n p | Some (Goal_select.SelectId id) -> Proof.focus_id subproof_cond () id p | _ -> user_err (str "Brackets do not support multi-goal selectors.")) pstate let vernac_end_subproof ~pstate = Declare.Proof.map ~f:(fun p -> Proof.unfocus subproof_kind p ()) pstate let vernac_bullet (bullet : Proof_bullet.t) ~pstate = Declare.Proof.map ~f:(fun p -> Proof_bullet.put p bullet) pstate (* Stack is needed due to show proof names, should deprecate / remove and take pstate *) let vernac_show ~pstate = match pstate with (* Show functions that don't require a proof state *) | None -> begin function | ShowProof -> show_proof ~pstate:None | ShowMatch id -> show_match id | _ -> user_err (str "This command requires an open proof.") end (* Show functions that require a proof state *) | Some pstate -> let proof = Declare.Proof.get pstate in begin function | ShowGoal goalref -> begin match goalref with | OpenSubgoals -> pr_open_subgoals ~proof | NthGoal n -> pr_nth_open_subgoal ~proof n | GoalId id -> pr_goal_by_id ~proof id end | ShowExistentials -> show_top_evars ~proof | ShowUniverses -> show_universes ~proof (* Deprecate *) | ShowProofNames -> Id.print (Declare.Proof.get_name pstate) | ShowIntros all -> show_intro ~proof all | ShowProof -> show_proof ~pstate:(Some pstate) | ShowMatch id -> show_match id end let vernac_check_guard ~pstate = let pts = Declare.Proof.get pstate in let pfterm = List.hd (Proof.partial_proof pts) in let message = try let { Evd.it=gl ; sigma=sigma } = Proof.V82.top_goal pts in let env = Evd.evar_filtered_env (Global.env ()) (Evd.find sigma gl) in Inductiveops.control_only_guard env sigma pfterm; (str "The condition holds up to here") with UserError s -> (str ("Condition violated: ") ++ s ++ str ".") in message (* We interpret vernacular commands to a DSL that specifies their allowed actions on proof states *) let translate_vernac ?loc ~atts v = let open Vernacextend in match v with | VernacAbortAll | VernacRestart | VernacUndo _ | VernacUndoTo _ | VernacResetName _ | VernacResetInitial | VernacBack _ | VernacAbort _ -> anomaly (str "type_vernac") | VernacLoad _ -> anomaly (str "Load is not supported recursively") (* Syntax *) | VernacReservedNotation (infix, sl) -> vtdefault(fun () -> with_module_locality ~atts vernac_reserved_notation ~infix sl) | VernacDeclareScope sc -> vtdefault(fun () -> with_module_locality ~atts vernac_declare_scope sc) | VernacDelimiters (sc,lr) -> vtdefault(fun () -> with_module_locality ~atts vernac_delimiters sc lr) | VernacBindScope (sc,rl) -> vtdefault(fun () -> with_module_locality ~atts vernac_bind_scope sc rl) | VernacOpenCloseScope (b, s) -> vtdefault(fun () -> with_section_locality ~atts vernac_open_close_scope (b,s)) | VernacNotation (infix,c,infpl,sc) -> vtdefault(fun () -> vernac_notation ~atts ~infix c infpl sc) | VernacNotationAddFormat(n,k,v) -> vtdefault(fun () -> unsupported_attributes atts; Metasyntax.add_notation_extra_printing_rule n k v) | VernacDeclareCustomEntry s -> vtdefault(fun () -> with_module_locality ~atts vernac_custom_entry s) (* Gallina *) | VernacDefinition (discharge,lid,DefineBody (bl,red_option,c,typ)) -> vtmodifyprogram (fun ~pm -> with_def_attributes ~atts vernac_definition ~pm discharge lid bl red_option c typ) | VernacDefinition (discharge,lid,ProveBody(bl,typ)) -> vtopenproof(fun () -> with_def_attributes ~atts vernac_definition_interactive discharge lid bl typ) | VernacStartTheoremProof (k,l) -> vtopenproof(fun () -> with_def_attributes ~atts vernac_start_proof k l) | VernacExactProof c -> vtcloseproof (fun ~lemma -> unsupported_attributes atts; vernac_exact_proof ~lemma c) | VernacDefineModule (export,lid,bl,mtys,mexprl) -> let i () = unsupported_attributes atts; vernac_define_module export lid bl mtys mexprl in (* XXX: We should investigate if eventually this should be made VtNoProof in all cases. *) vernac_begin_segment ~interactive:(List.is_empty mexprl) i | VernacDeclareModuleType (lid,bl,mtys,mtyo) -> vernac_begin_segment ~interactive:(List.is_empty mtyo) (fun () -> unsupported_attributes atts; vernac_declare_module_type lid bl mtys mtyo) | VernacAssumption ((discharge,kind),nl,l) -> vtdefault(fun () -> with_def_attributes ~atts vernac_assumption discharge kind l nl) | VernacInductive (finite, l) -> vtdefault(fun () -> vernac_inductive ~atts finite l) | VernacFixpoint (discharge, l) -> let opens = List.exists (fun { body_def } -> Option.is_empty body_def) l in if opens then vtopenproof (fun () -> with_def_attributes ~atts vernac_fixpoint_interactive discharge l) else vtmodifyprogram (fun ~pm -> with_def_attributes ~atts (vernac_fixpoint ~pm) discharge l) | VernacCoFixpoint (discharge, l) -> let opens = List.exists (fun { body_def } -> Option.is_empty body_def) l in if opens then vtopenproof(fun () -> with_def_attributes ~atts vernac_cofixpoint_interactive discharge l) else vtmodifyprogram(fun ~pm -> with_def_attributes ~atts (vernac_cofixpoint ~pm) discharge l) | VernacScheme l -> vtdefault(fun () -> unsupported_attributes atts; vernac_scheme l) | VernacCombinedScheme (id, l) -> vtdefault(fun () -> unsupported_attributes atts; vernac_combined_scheme id l) | VernacUniverse l -> vtdefault(fun () -> vernac_universe ~poly:(only_polymorphism atts) l) | VernacConstraint l -> vtdefault(fun () -> vernac_constraint ~poly:(only_polymorphism atts) l) (* Modules *) | VernacDeclareModule (export,lid,bl,mtyo) -> vtdefault(fun () -> unsupported_attributes atts; vernac_declare_module export lid bl mtyo) | VernacInclude in_asts -> vtdefault(fun () -> unsupported_attributes atts; vernac_include in_asts) (* Gallina extensions *) | VernacBeginSection lid -> vernac_begin_segment ~interactive:true (fun () -> vernac_begin_section ~poly:(only_polymorphism atts) lid) | VernacEndSegment lid -> unsupported_attributes atts; vernac_end_segment lid | VernacNameSectionHypSet (lid, set) -> vtdefault(fun () -> unsupported_attributes atts; vernac_name_sec_hyp lid set) | VernacRequire (from, export, qidl) -> vtdefault(fun () -> unsupported_attributes atts; vernac_require from export qidl) | VernacImport (export,cats,qidl) -> vtdefault(fun () -> unsupported_attributes atts; vernac_import export cats qidl) | VernacCanonical qid -> vtdefault(fun () -> vernac_canonical ~local:(only_locality atts) qid) | VernacCoercion (r,s,t) -> vtdefault(fun () -> vernac_coercion ~atts r s t) | VernacIdentityCoercion ({v=id},s,t) -> vtdefault(fun () -> vernac_identity_coercion ~atts id s t) (* Type classes *) | VernacInstance (name, bl, t, props, info) -> let atts, program = Attributes.(parse_with_extra program) atts in if program then vtmodifyprogram (vernac_instance_program ~atts name bl t props info) else begin match props with | None -> vtopenproof (fun () -> vernac_instance_interactive ~atts name bl t info None) | Some props -> let atts, refine = Attributes.parse_with_extra Classes.refine_att atts in if refine then vtopenproof (fun () -> vernac_instance_interactive ~atts name bl t info (Some props)) else vtdefault (fun () -> vernac_instance ~atts name bl t props info) end | VernacDeclareInstance (id, bl, inst, info) -> vtdefault(fun () -> vernac_declare_instance ~atts id bl inst info) | VernacContext sup -> vtdefault(fun () -> ComAssumption.context ~poly:(only_polymorphism atts) sup) | VernacExistingInstance insts -> vtdefault(fun () -> vernac_existing_instance ~atts insts) | VernacExistingClass id -> vtdefault(fun () -> unsupported_attributes atts; vernac_existing_class id) (* Auxiliary file and library management *) | VernacAddLoadPath { implicit; physical_path; logical_path } -> vtdefault(fun () -> unsupported_attributes atts; vernac_add_loadpath ~implicit physical_path logical_path) | VernacRemoveLoadPath s -> vtdefault(fun () -> unsupported_attributes atts; vernac_remove_loadpath s) | VernacAddMLPath (s) -> vtdefault(fun () -> unsupported_attributes atts; vernac_add_ml_path s) | VernacDeclareMLModule l -> vtdefault(fun () -> with_locality ~atts vernac_declare_ml_module l) | VernacChdir s -> vtdefault(fun () -> unsupported_attributes atts; vernac_chdir s) (* Commands *) | VernacCreateHintDb (dbname,b) -> vtdefault(fun () -> with_module_locality ~atts vernac_create_hintdb dbname b) | VernacRemoveHints (dbnames,ids) -> vtdefault(fun () -> vernac_remove_hints ~atts dbnames ids) | VernacHints (dbnames,hints) -> vtdefault(fun () -> vernac_hints ~atts dbnames hints) | VernacSyntacticDefinition (id,c,b) -> vtdefault(fun () -> vernac_syntactic_definition ~atts id c b) | VernacArguments (qid, args, more_implicits, flags) -> vtdefault(fun () -> with_section_locality ~atts (ComArguments.vernac_arguments qid args more_implicits flags)) | VernacReserve bl -> vtdefault(fun () -> unsupported_attributes atts; vernac_reserve bl) | VernacGeneralizable gen -> vtdefault(fun () -> with_locality ~atts vernac_generalizable gen) | VernacSetOpacity qidl -> vtdefault(fun () -> with_locality ~atts vernac_set_opacity qidl) | VernacSetStrategy l -> vtdefault(fun () -> with_locality ~atts vernac_set_strategy l) | VernacSetOption (export,key,v) -> let atts = if export then CAst.make ?loc ("export", VernacFlagEmpty) :: atts else atts in vtdefault(fun () -> vernac_set_option ~locality:(parse option_locality atts) key v) | VernacRemoveOption (key,v) -> vtdefault(fun () -> unsupported_attributes atts; vernac_remove_option key v) | VernacAddOption (key,v) -> vtdefault(fun () -> unsupported_attributes atts; vernac_add_option key v) | VernacMemOption (key,v) -> vtdefault(fun () -> unsupported_attributes atts; vernac_mem_option key v) | VernacPrintOption key -> vtdefault(fun () -> unsupported_attributes atts; vernac_print_option key) | VernacCheckMayEval (r,g,c) -> vtreadproofopt(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_check_may_eval ~pstate r g c) | VernacDeclareReduction (s,r) -> vtdefault(fun () -> with_locality ~atts vernac_declare_reduction s r) | VernacGlobalCheck c -> vtdefault(fun () -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_global_check c) | VernacPrint p -> vtreadproofopt(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_print ~pstate p) | VernacSearch (s,g,r) -> vtreadproofopt( unsupported_attributes atts; vernac_search ~atts s g r) | VernacLocate l -> vtreadproofopt(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_locate ~pstate l) | VernacRegister (qid, r) -> vtnoproof(fun () -> unsupported_attributes atts; vernac_register qid r) | VernacPrimitive ((id, udecl), prim, typopt) -> vtdefault(fun () -> unsupported_attributes atts; ComPrimitive.do_primitive id udecl prim typopt) | VernacComments l -> vtdefault(fun () -> unsupported_attributes atts; Flags.if_verbose Feedback.msg_info (str "Comments ok\n")) (* Proof management *) | VernacFocus n -> vtmodifyproof(unsupported_attributes atts;vernac_focus n) | VernacUnfocus -> vtmodifyproof(unsupported_attributes atts;vernac_unfocus) | VernacUnfocused -> vtreadproof(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_unfocused ~pstate) | VernacBullet b -> vtmodifyproof( unsupported_attributes atts; vernac_bullet b) | VernacSubproof n -> vtmodifyproof( unsupported_attributes atts; vernac_subproof n) | VernacEndSubproof -> vtmodifyproof( unsupported_attributes atts; vernac_end_subproof) | VernacShow s -> vtreadproofopt(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_show ~pstate s) | VernacCheckGuard -> vtreadproof(fun ~pstate -> unsupported_attributes atts; Feedback.msg_notice @@ vernac_check_guard ~pstate) | VernacProof (tac, using) -> vtmodifyproof(fun ~pstate -> unsupported_attributes atts; let using = Option.append using (Proof_using.get_default_proof_using ()) in let tacs = if Option.is_empty tac then "tac:no" else "tac:yes" in let usings = if Option.is_empty using then "using:no" else "using:yes" in Aux_file.record_in_aux_at "VernacProof" (tacs^" "^usings); let pstate = Option.cata (vernac_set_end_tac ~pstate) pstate tac in Option.cata (vernac_set_used_variables ~pstate) pstate using) | VernacProofMode mn -> vtdefault(fun () -> unsupported_attributes atts) | VernacEndProof pe -> unsupported_attributes atts; vtcloseproof (vernac_end_proof pe) (* Extensions *) | VernacExtend (opn,args) -> Vernacextend.type_vernac ?loc ~atts opn args () coq-8.15.0/vernac/vernacentries.mli000066400000000000000000000023561417001151100172150ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* atts:Attributes.vernac_flags -> Vernacexpr.vernac_expr -> Vernacextend.typed_vernac (** Vernacular require command *) val vernac_require : Libnames.qualid option -> bool option -> Libnames.qualid list -> unit (** Hook to dissappear when #8240 is fixed *) val interp_redexp_hook : (Environ.env -> Evd.evar_map -> Genredexpr.raw_red_expr -> Evd.evar_map * Redexpr.red_expr) Hook.t (** Miscellaneous stuff *) val command_focus : unit Proof.focus_kind val allow_sprop_opt_name : string list coq-8.15.0/vernac/vernacexpr.ml000066400000000000000000000410231417001151100163430ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* {i string}} command. {b ("ExtractionInlinedConstant", 0)} indicates {b Extract Inlined Constant {i qualid} => {i string}} command. {b ("ExtractionInductive", 0)} indicates {b Extract Inductive {i qualid} => {i string} [ {i string} ... {string} ] {i optstring}} command. {b ("ExtractionBlacklist", 0)} indicates {b Extraction Blacklist {i ident{_1}} ... {i ident{_n}}} command. *) (* This type allows registering the inlining of constants in native compiler. It will be extended with primitive inductive types and operators *) type register_kind = | RegisterInline | RegisterCoqlib of qualid (** {6 Types concerning the module layer} *) type module_ast_inl = module_ast * Declaremods.inline type module_binder = bool option * lident list * module_ast_inl (** {6 The type of vernacular expressions} *) type vernac_one_argument_status = { name : Name.t; recarg_like : bool; notation_scope : string CAst.t option; implicit_status : Glob_term.binding_kind; } type vernac_argument_status = | VolatileArg | BidiArg | RealArg of vernac_one_argument_status type arguments_modifier = [ `Assert | `ClearBidiHint | `ClearImplicits | `ClearScopes | `DefaultImplicits | `ExtraScopes | `ReductionDontExposeCase | `ReductionNeverUnfold | `Rename ] type extend_name = (* Name of the vernac entry where the tactic is defined, typically found after the VERNAC EXTEND statement in the source. *) string * (* Index of the extension in the VERNAC EXTEND statement. Each parsing branch is given an offset, starting from zero. *) int type discharge = DoDischarge | NoDischarge type hint_info_expr = Constrexpr.constr_pattern_expr Typeclasses.hint_info_gen type reference_or_constr = | HintsReference of Libnames.qualid | HintsConstr of Constrexpr.constr_expr type hints_expr = | HintsResolve of (hint_info_expr * bool * reference_or_constr) list | HintsResolveIFF of bool * Libnames.qualid list * int option | HintsImmediate of reference_or_constr list | HintsUnfold of Libnames.qualid list | HintsTransparency of Libnames.qualid Hints.hints_transparency_target * bool | HintsMode of Libnames.qualid * Hints.hint_mode list | HintsConstructors of Libnames.qualid list | HintsExtern of int * Constrexpr.constr_expr option * Genarg.raw_generic_argument type nonrec vernac_expr = | VernacLoad of verbose_flag * string (* Syntax *) | VernacReservedNotation of infix_flag * (lstring * syntax_modifier CAst.t list) | VernacOpenCloseScope of bool * scope_name | VernacDeclareScope of scope_name | VernacDelimiters of scope_name * string option | VernacBindScope of scope_name * class_rawexpr list | VernacNotation of infix_flag * constr_expr * (lstring * syntax_modifier CAst.t list) * scope_name option | VernacNotationAddFormat of string * string * string | VernacDeclareCustomEntry of string (* Gallina *) | VernacDefinition of (discharge * Decls.definition_object_kind) * name_decl * definition_expr | VernacStartTheoremProof of Decls.theorem_kind * proof_expr list | VernacEndProof of proof_end | VernacExactProof of constr_expr | VernacAssumption of (discharge * Decls.assumption_object_kind) * Declaremods.inline * (ident_decl list * constr_expr) with_coercion list | VernacInductive of inductive_kind * (inductive_expr * decl_notation list) list | VernacFixpoint of discharge * fixpoint_expr list | VernacCoFixpoint of discharge * cofixpoint_expr list | VernacScheme of (lident option * scheme) list | VernacCombinedScheme of lident * lident list | VernacUniverse of lident list | VernacConstraint of univ_constraint_expr list (* Gallina extensions *) | VernacBeginSection of lident | VernacEndSegment of lident | VernacRequire of qualid option * export_flag option * qualid list | VernacImport of export_flag * import_categories option * (qualid * import_filter_expr) list | VernacCanonical of qualid or_by_notation | VernacCoercion of qualid or_by_notation * class_rawexpr * class_rawexpr | VernacIdentityCoercion of lident * class_rawexpr * class_rawexpr | VernacNameSectionHypSet of lident * section_subset_expr (* Type classes *) | VernacInstance of name_decl * (* name *) local_binder_expr list * (* binders *) constr_expr * (* type *) (bool * constr_expr) option * (* body (bool=true when using {}) *) hint_info_expr | VernacDeclareInstance of ident_decl * (* name *) local_binder_expr list * (* binders *) constr_expr * (* type *) hint_info_expr | VernacContext of local_binder_expr list | VernacExistingInstance of (qualid * hint_info_expr) list (* instances names, priorities and patterns *) | VernacExistingClass of qualid (* inductive or definition name *) (* Modules and Module Types *) | VernacDeclareModule of bool option * lident * module_binder list * module_ast_inl | VernacDefineModule of bool option * lident * module_binder list * module_ast_inl Declaremods.module_signature * module_ast_inl list | VernacDeclareModuleType of lident * module_binder list * module_ast_inl list * module_ast_inl list | VernacInclude of module_ast_inl list (* Auxiliary file and library management *) | VernacAddLoadPath of { implicit : bool ; physical_path : CUnix.physical_path ; logical_path : DirPath.t } | VernacRemoveLoadPath of string | VernacAddMLPath of string | VernacDeclareMLModule of string list | VernacChdir of string option (* Resetting *) | VernacResetName of lident | VernacResetInitial | VernacBack of int (* Commands *) | VernacCreateHintDb of string * bool | VernacRemoveHints of string list * qualid list | VernacHints of string list * hints_expr | VernacSyntacticDefinition of lident * (Id.t list * constr_expr) * syntax_modifier CAst.t list | VernacArguments of qualid or_by_notation * vernac_argument_status list (* Main arguments status list *) * (Name.t * Glob_term.binding_kind) list list (* Extra implicit status lists *) * arguments_modifier list | VernacReserve of simple_binder list | VernacGeneralizable of (lident list) option | VernacSetOpacity of (Conv_oracle.level * qualid or_by_notation list) | VernacSetStrategy of (Conv_oracle.level * qualid or_by_notation list) list | VernacSetOption of bool (* Export modifier? *) * Goptions.option_name * option_setting | VernacAddOption of Goptions.option_name * Goptions.table_value list | VernacRemoveOption of Goptions.option_name * Goptions.table_value list | VernacMemOption of Goptions.option_name * Goptions.table_value list | VernacPrintOption of Goptions.option_name | VernacCheckMayEval of Genredexpr.raw_red_expr option * Goal_select.t option * constr_expr | VernacGlobalCheck of constr_expr | VernacDeclareReduction of string * Genredexpr.raw_red_expr | VernacPrint of printable | VernacSearch of searchable * Goal_select.t option * search_restriction | VernacLocate of locatable | VernacRegister of qualid * register_kind | VernacPrimitive of ident_decl * CPrimitives.op_or_type * constr_expr option | VernacComments of comment list (* Proof management *) | VernacAbort of lident option | VernacAbortAll | VernacRestart | VernacUndo of int | VernacUndoTo of int | VernacFocus of int option | VernacUnfocus | VernacUnfocused | VernacBullet of Proof_bullet.t | VernacSubproof of Goal_select.t option | VernacEndSubproof | VernacShow of showable | VernacCheckGuard | VernacProof of Genarg.raw_generic_argument option * section_subset_expr option | VernacProofMode of string (* For extension *) | VernacExtend of extend_name * Genarg.raw_generic_argument list type control_flag = | ControlTime of bool (* boolean is true when the `-time` batch-mode command line flag was set. the flag is used to print differently in `-time` vs `Time foo` *) | ControlRedirect of string | ControlTimeout of int | ControlFail | ControlSucceed type vernac_control_r = { control : control_flag list ; attrs : Attributes.vernac_flags ; expr : vernac_expr } and vernac_control = vernac_control_r CAst.t coq-8.15.0/vernac/vernacextend.ml000066400000000000000000000323741417001151100166650ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* () | Use -> x end module OutProg = struct type _ t = | No : unit t | Yes : Declare.OblState.t t | Push | Pop let cast (type a) (x:a) (ty:a t) (orig:Declare.OblState.t NeList.t) : Declare.OblState.t NeList.t = match ty with | No -> orig | Yes -> NeList.map_head (fun _ -> x) orig | Push -> NeList.push Declare.OblState.empty (Some orig) | Pop -> (match NeList.tail orig with Some tl -> tl | None -> assert false) end module InProof = struct type _ t = | Ignore : unit t | Reject : unit t | Use : Declare.Proof.t t | UseOpt : Declare.Proof.t option t let cast (type a) (x:Declare.Proof.t option) (ty:a t) : a = match x, ty with | _, Ignore -> () | None, Reject -> () | Some _, Reject -> CErrors.user_err (Pp.str "Command not supported (Open proofs remain)") | Some x, Use -> x | None, Use -> CErrors.user_err (Pp.str "Command not supported (No proof-editing in progress)") | _, UseOpt -> x end module OutProof = struct type _ t = | No : unit t | Close : unit t | Yes : Declare.Proof.t t type result = | Ignored | Closed | Open of Declare.Proof.t let cast (type a) (x:a) (ty:a t) : result = match ty with | No -> Ignored | Close -> Closed | Yes -> Open x end type ('inprog,'outprog,'inproof,'outproof) vernac_type = { inprog : 'inprog InProg.t; outprog : 'outprog InProg.t; inproof : 'inproof InProof.t; outproof : 'outproof OutProof.t; } type typed_vernac = TypedVernac : { inprog : 'inprog InProg.t; outprog : 'outprog OutProg.t; inproof : 'inproof InProof.t; outproof : 'outproof OutProof.t; run : pm:'inprog -> proof:'inproof -> 'outprog * 'outproof; } -> typed_vernac [@@@ocaml.warning "-40"] let vtdefault f = TypedVernac { inprog = Ignore; outprog = No; inproof = Ignore; outproof = No; run = (fun ~pm:() ~proof:() -> let () = f () in (), ()) } let vtnoproof f = TypedVernac { inprog = Ignore; outprog = No; inproof = Ignore; outproof = No; run = (fun ~pm:() ~proof:() -> let () = f () in (), ()) } let vtcloseproof f = TypedVernac { inprog = Use; outprog = Yes; inproof = Use; outproof = Close; run = (fun ~pm ~proof -> let pm = f ~lemma:proof ~pm in pm, ()) } let vtopenproof f = TypedVernac { inprog = Ignore; outprog = No; inproof = Ignore; outproof = Yes; run = (fun ~pm:() ~proof:() -> let proof = f () in (), proof) } let vtmodifyproof f = TypedVernac { inprog = Ignore; outprog = No; inproof = Use; outproof = Yes; run = (fun ~pm:() ~proof -> let proof = f ~pstate:proof in (), proof) } let vtreadproofopt f = TypedVernac { inprog = Ignore; outprog = No; inproof = UseOpt; outproof = No; run = (fun ~pm:() ~proof -> let () = f ~pstate:proof in (), ()) } let vtreadproof f = TypedVernac { inprog = Ignore; outprog = No; inproof = Use; outproof = No; run = (fun ~pm:() ~proof -> let () = f ~pstate:proof in (), ()) } let vtreadprogram f = TypedVernac { inprog = Use; outprog = No; inproof = Ignore; outproof = No; run = (fun ~pm ~proof:() -> let () = f ~pm in (), ()) } let vtmodifyprogram f = TypedVernac { inprog = Use; outprog = Yes; inproof = Ignore; outproof = No; run = (fun ~pm ~proof:() -> let pm = f ~pm in pm, ()) } let vtdeclareprogram f = TypedVernac { inprog = Use; outprog = No; inproof = Ignore; outproof = Yes; run = (fun ~pm ~proof:() -> let proof = f ~pm in (), proof) } let vtopenproofprogram f = TypedVernac { inprog = Use; outprog = Yes; inproof = Ignore; outproof = Yes; run = (fun ~pm ~proof:() -> let pm, proof = f ~pm in pm, proof) } [@@@ocaml.warning "+40"] type vernac_command = ?loc:Loc.t -> atts:Attributes.vernac_flags -> unit -> typed_vernac type plugin_args = Genarg.raw_generic_argument list (* Table of vernac entries *) let vernac_tab = (Hashtbl.create 211 : (Vernacexpr.extend_name, bool * (plugin_args -> vernac_command)) Hashtbl.t) let vinterp_add depr s f = try Hashtbl.add vernac_tab s (depr, f) with Failure _ -> user_err (str"Cannot add the vernac command " ++ str (fst s) ++ str" twice.") let vinterp_map s = try Hashtbl.find vernac_tab s with Failure _ | Not_found -> user_err (str"Cannot find vernac command " ++ str (fst s) ++ str".") let warn_deprecated_command = let open CWarnings in create ~name:"deprecated-command" ~category:"deprecated" (fun pr -> str "Deprecated vernacular command: " ++ pr) (* Interpretation of a vernac command *) let type_vernac opn converted_args ?loc ~atts () = let depr, callback = vinterp_map opn in let () = if depr then let rules = Egramml.get_extend_vernac_rule opn in let pr_gram = function | Egramml.GramTerminal s -> str s | Egramml.GramNonTerminal _ -> str "_" in let pr = pr_sequence pr_gram rules in warn_deprecated_command pr; in let hunk = callback converted_args in hunk ?loc ~atts () (** VERNAC EXTEND registering *) type classifier = Genarg.raw_generic_argument list -> vernac_classification (** Classifiers *) let classifiers : classifier array String.Map.t ref = ref String.Map.empty let get_vernac_classifier (name, i) args = (String.Map.find name !classifiers).(i) args let declare_vernac_classifier name f = classifiers := String.Map.add name f !classifiers let classify_as_query = VtQuery let classify_as_sideeff = VtSideff ([], VtLater) let classify_as_proofstep = VtProofStep { proof_block_detection = None} type (_, _) ty_sig = | TyNil : (vernac_command, vernac_classification) ty_sig | TyTerminal : string * ('r, 's) ty_sig -> ('r, 's) ty_sig | TyNonTerminal : ('a, 'b, 'c) Extend.ty_user_symbol * ('r, 's) ty_sig -> ('a -> 'r, 'a -> 's) ty_sig type ty_ml = TyML : bool * ('r, 's) ty_sig * 'r * 's option -> ty_ml let type_error () = CErrors.anomaly (Pp.str "Ill-typed VERNAC EXTEND") let rec untype_classifier : type r s. (r, s) ty_sig -> s -> classifier = function | TyNil -> fun f args -> begin match args with | [] -> f | _ :: _ -> type_error () end | TyTerminal (_, ty) -> fun f args -> untype_classifier ty f args | TyNonTerminal (tu, ty) -> fun f args -> let open Genarg in begin match args with | [] -> type_error () | GenArg (Rawwit tag, v) :: args -> match Genarg.genarg_type_eq tag (Egramml.proj_symbol tu) with | None -> type_error () | Some Refl -> untype_classifier ty (f v) args end (** Stupid GADTs forces us to duplicate the definition just for typing *) let rec untype_command : type r s. (r, s) ty_sig -> r -> plugin_args -> vernac_command = function | TyNil -> fun f args -> begin match args with | [] -> f | _ :: _ -> type_error () end | TyTerminal (_, ty) -> fun f args -> untype_command ty f args | TyNonTerminal (tu, ty) -> fun f args -> let open Genarg in begin match args with | [] -> type_error () | GenArg (Rawwit tag, v) :: args -> match genarg_type_eq tag (Egramml.proj_symbol tu) with | None -> type_error () | Some Refl -> untype_command ty (f v) args end let rec untype_user_symbol : type s a b c. (a, b, c) Extend.ty_user_symbol -> (s, Gramlib.Grammar.norec, a) Pcoq.Symbol.t = let open Extend in function | TUlist1 l -> Pcoq.Symbol.list1 (untype_user_symbol l) | TUlist1sep (l, s) -> Pcoq.Symbol.list1sep (untype_user_symbol l) (Pcoq.Symbol.tokens [Pcoq.TPattern (CLexer.terminal s)]) false | TUlist0 l -> Pcoq.Symbol.list0 (untype_user_symbol l) | TUlist0sep (l, s) -> Pcoq.Symbol.list0sep (untype_user_symbol l) (Pcoq.Symbol.tokens [Pcoq.TPattern (CLexer.terminal s)]) false | TUopt o -> Pcoq.Symbol.opt (untype_user_symbol o) | TUentry a -> Pcoq.Symbol.nterm (Pcoq.genarg_grammar (Genarg.ExtraArg a)) | TUentryl (a, i) -> Pcoq.Symbol.nterml (Pcoq.genarg_grammar (Genarg.ExtraArg a)) (string_of_int i) let rec untype_grammar : type r s. (r, s) ty_sig -> 'a Egramml.grammar_prod_item list = function | TyNil -> [] | TyTerminal (tok, ty) -> Egramml.GramTerminal tok :: untype_grammar ty | TyNonTerminal (tu, ty) -> let t = Genarg.rawwit (Egramml.proj_symbol tu) in let symb = untype_user_symbol tu in Egramml.GramNonTerminal (Loc.tag (t, symb)) :: untype_grammar ty let vernac_extend ~command ?classifier ?entry ext = let get_classifier (TyML (_, ty, _, cl)) = match cl with | Some cl -> untype_classifier ty cl | None -> match classifier with | Some cl -> fun _ -> cl command | None -> let e = match entry with | None -> "COMMAND" | Some e -> Pcoq.Entry.name e in let msg = Printf.sprintf "\ Vernac entry \"%s\" misses a classifier. \ A classifier is a function that returns an expression \ of type vernac_classification (see Vernacexpr). You can: \n\ - Use '... EXTEND %s CLASSIFIED AS QUERY ...' if the \ new vernacular command does not alter the system state;\n\ - Use '... EXTEND %s CLASSIFIED AS SIDEFF ...' if the \ new vernacular command alters the system state but not the \ parser nor it starts a proof or ends one;\n\ - Use '... EXTEND %s CLASSIFIED BY f ...' to specify \ a global function f. The function f will be called passing\ \"%s\" as the only argument;\n\ - Add a specific classifier in each clause using the syntax:\n\ '[...] => [ f ] -> [...]'.\n\ Specific classifiers have precedence over global \ classifiers. Only one classifier is called." command e e e command in CErrors.user_err (Pp.strbrk msg) in let cl = Array.map_of_list get_classifier ext in let iter i (TyML (depr, ty, f, _)) = let f = untype_command ty f in let r = untype_grammar ty in let () = vinterp_add depr (command, i) f in Egramml.extend_vernac_command_grammar (command, i) entry r in let () = declare_vernac_classifier command cl in List.iteri iter ext (** VERNAC ARGUMENT EXTEND registering *) type 'a argument_rule = | Arg_alias of 'a Pcoq.Entry.t | Arg_rules of 'a Pcoq.Production.t list type 'a vernac_argument = { arg_printer : Environ.env -> Evd.evar_map -> 'a -> Pp.t; arg_parsing : 'a argument_rule; } let vernac_argument_extend ~name arg = let wit = Genarg.create_arg name in let entry = match arg.arg_parsing with | Arg_alias e -> let () = Pcoq.register_grammar wit e in e | Arg_rules rules -> let e = Pcoq.create_generic_entry2 name (Genarg.rawwit wit) in let () = Pcoq.grammar_extend e (Pcoq.Fresh (Gramlib.Gramext.First, [None, None, rules])) in e in let pr = arg.arg_printer in let pr x = Genprint.PrinterBasic (fun env sigma -> pr env sigma x) in let () = Genprint.register_vernac_print0 wit pr in (wit, entry) coq-8.15.0/vernac/vernacextend.mli000066400000000000000000000146431417001151100170350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* 'a t -> 'a end module OutProg : sig type _ t = | No : unit t | Yes : Declare.OblState.t t | Push | Pop val cast : 'a -> 'a t -> Declare.OblState.t NeList.t -> Declare.OblState.t NeList.t end module InProof : sig type _ t = | Ignore : unit t | Reject : unit t | Use : Declare.Proof.t t | UseOpt : Declare.Proof.t option t val cast : Declare.Proof.t option -> 'a t -> 'a end module OutProof : sig type _ t = | No : unit t | Close : unit t | Yes : Declare.Proof.t t type result = | Ignored | Closed | Open of Declare.Proof.t val cast : 'a -> 'a t -> result end type ('inprog,'outprog,'inproof,'outproof) vernac_type = { inprog : 'inprog InProg.t; outprog : 'outprog InProg.t; inproof : 'inproof InProof.t; outproof : 'outproof OutProof.t; } type typed_vernac = TypedVernac : { inprog : 'inprog InProg.t; outprog : 'outprog OutProg.t; inproof : 'inproof InProof.t; outproof : 'outproof OutProof.t; run : pm:'inprog -> proof:'inproof -> 'outprog * 'outproof; } -> typed_vernac (** Some convenient typed_vernac constructors *) val vtdefault : (unit -> unit) -> typed_vernac val vtnoproof : (unit -> unit) -> typed_vernac val vtcloseproof : (lemma:Declare.Proof.t -> pm:Declare.OblState.t -> Declare.OblState.t) -> typed_vernac val vtopenproof : (unit -> Declare.Proof.t) -> typed_vernac val vtmodifyproof : (pstate:Declare.Proof.t -> Declare.Proof.t) -> typed_vernac val vtreadproofopt : (pstate:Declare.Proof.t option -> unit) -> typed_vernac val vtreadproof : (pstate:Declare.Proof.t -> unit) -> typed_vernac val vtreadprogram : (pm:Declare.OblState.t -> unit) -> typed_vernac val vtmodifyprogram : (pm:Declare.OblState.t -> Declare.OblState.t) -> typed_vernac val vtdeclareprogram : (pm:Declare.OblState.t -> Declare.Proof.t) -> typed_vernac val vtopenproofprogram : (pm:Declare.OblState.t -> Declare.OblState.t * Declare.Proof.t) -> typed_vernac type vernac_command = ?loc:Loc.t -> atts:Attributes.vernac_flags -> unit -> typed_vernac type plugin_args = Genarg.raw_generic_argument list val type_vernac : Vernacexpr.extend_name -> plugin_args -> vernac_command (** {5 VERNAC EXTEND} *) type classifier = Genarg.raw_generic_argument list -> vernac_classification type (_, _) ty_sig = | TyNil : (vernac_command, vernac_classification) ty_sig | TyTerminal : string * ('r, 's) ty_sig -> ('r, 's) ty_sig | TyNonTerminal : ('a, 'b, 'c) Extend.ty_user_symbol * ('r, 's) ty_sig -> ('a -> 'r, 'a -> 's) ty_sig type ty_ml = TyML : bool (* deprecated *) * ('r, 's) ty_sig * 'r * 's option -> ty_ml (** Wrapper to dynamically extend vernacular commands. *) val vernac_extend : command:string -> ?classifier:(string -> vernac_classification) -> ?entry:Vernacexpr.vernac_expr Pcoq.Entry.t -> ty_ml list -> unit (** {5 VERNAC ARGUMENT EXTEND} *) type 'a argument_rule = | Arg_alias of 'a Pcoq.Entry.t (** This is used because CAMLP5 parser can be dumb about rule factorization, which sometimes requires two entries to be the same. *) | Arg_rules of 'a Pcoq.Production.t list (** There is a discrepancy here as we use directly extension rules and thus entries instead of ty_user_symbol and thus arguments as roots. *) type 'a vernac_argument = { arg_printer : Environ.env -> Evd.evar_map -> 'a -> Pp.t; arg_parsing : 'a argument_rule; } val vernac_argument_extend : name:string -> 'a vernac_argument -> ('a, unit, unit) Genarg.genarg_type * 'a Pcoq.Entry.t (** {5 STM classifiers} *) val get_vernac_classifier : Vernacexpr.extend_name -> classifier (** Standard constant classifiers *) val classify_as_query : vernac_classification val classify_as_sideeff : vernac_classification val classify_as_proofstep : vernac_classification coq-8.15.0/vernac/vernacinterp.ml000066400000000000000000000261311417001151100166710ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* stack | Some stack, Closed -> snd (LStack.pop stack) | None, Closed -> assert false | Some stack, Open proof -> Some (LStack.map_top ~f:(fun _ -> proof) stack) | None, Open proof -> Some (LStack.push None proof) in stack, pm (* Default proof mode, to be set at the beginning of proofs for programs that cannot be statically classified. *) let proof_mode_opt_name = ["Default";"Proof";"Mode"] let get_default_proof_mode = Goptions.declare_interpreted_string_option_and_ref ~depr:false ~key:proof_mode_opt_name ~value:(Pvernac.register_proof_mode "Noedit" Pvernac.Vernac_.noedit_mode) (fun name -> match Pvernac.lookup_proof_mode name with | Some pm -> pm | None -> CErrors.user_err Pp.(str (Format.sprintf "No proof mode named \"%s\"." name))) Pvernac.proof_mode_to_string (** A global default timeout, controlled by option "Set Default Timeout n". Use "Unset Default Timeout" to deactivate it (or set it to 0). *) let default_timeout = ref None (* Timeout *) let vernac_timeout ?timeout (f : 'a -> 'b) (x : 'a) : 'b = match !default_timeout, timeout with | _, Some n | Some n, None -> (match Control.timeout (float_of_int n) f x with | None -> Exninfo.iraise (Exninfo.capture CErrors.Timeout) | Some x -> x) | None, None -> f x (* Fail *) let test_mode = ref false (* Restoring the state is the caller's responsibility *) let with_fail f : (Loc.t option * Pp.t, unit) result = try let _ = f () in Error () with (* Fail Timeout is a common pattern so we need to support it. *) | e when CErrors.noncritical e || e = CErrors.Timeout -> (* The error has to be printed in the failing state *) let _, info as e = Exninfo.capture e in Ok (Loc.get_loc info, CErrors.iprint e) let real_error_loc ~cmdloc ~eloc = if Loc.finer eloc cmdloc then eloc else cmdloc (* We restore the state always *) let with_fail ~loc ~st f = let res = with_fail f in Vernacstate.invalidate_cache (); Vernacstate.unfreeze_interp_state st; match res with | Error () -> CErrors.user_err (Pp.str "The command has not failed!") | Ok (eloc, msg) -> let loc = if !test_mode then real_error_loc ~cmdloc:loc ~eloc else None in if not !Flags.quiet || !test_mode then Feedback.msg_notice ?loc Pp.(str "The command has indeed failed with message:" ++ fnl () ++ msg) let with_succeed ~st f = let () = ignore (f ()) in Vernacstate.invalidate_cache (); Vernacstate.unfreeze_interp_state st; if not !Flags.quiet then Feedback.msg_notice Pp.(str "The command has succeeded and its effects have been reverted.") let locate_if_not_already ?loc (e, info) = (e, Option.cata (Loc.add_loc info) info (real_error_loc ~cmdloc:loc ~eloc:(Loc.get_loc info))) let mk_time_header = (* Drop the time header to print the command, we should indeed use a different mechanism to `-time` commands than the current hack of adding a time control to the AST. *) let pr_time_header vernac = let vernac = match vernac with | { CAst.v = { control = ControlTime _ :: control; attrs; expr }; loc } -> CAst.make ?loc { control; attrs; expr } | _ -> vernac in Topfmt.pr_cmd_header vernac in fun vernac -> Lazy.from_fun (fun () -> pr_time_header vernac) let interp_control_flag ~loc ~time_header (f : control_flag) ~st (fn : st:Vernacstate.t -> Vernacstate.LemmaStack.t option * Declare.OblState.t NeList.t) = match f with | ControlFail -> with_fail ~loc ~st (fun () -> fn ~st); st.Vernacstate.lemmas, st.Vernacstate.program | ControlSucceed -> with_succeed ~st (fun () -> fn ~st); st.Vernacstate.lemmas, st.Vernacstate.program | ControlTimeout timeout -> vernac_timeout ~timeout (fun () -> fn ~st) () | ControlTime batch -> let header = if batch then Lazy.force time_header else Pp.mt () in System.with_time ~batch ~header (fun () -> fn ~st) () | ControlRedirect s -> Topfmt.with_output_to_file s (fun () -> fn ~st) () (* "locality" is the prefix "Local" attribute, while the "local" component * is the outdated/deprecated "Local" attribute of some vernacular commands * still parsed as the obsolete_locality grammar entry for retrocompatibility. * loc is the Loc.t of the vernacular command being interpreted. *) let rec interp_expr ?loc ~atts ~st c = vernac_pperr_endline Pp.(fun () -> str "interpreting: " ++ Ppvernac.pr_vernac_expr c); match c with (* The STM should handle that, but LOAD bypasses the STM... *) | VernacAbortAll -> CErrors.user_err (Pp.str "AbortAll cannot be used through the Load command") | VernacRestart -> CErrors.user_err (Pp.str "Restart cannot be used through the Load command") | VernacUndo _ -> CErrors.user_err (Pp.str "Undo cannot be used through the Load command") | VernacUndoTo _ -> CErrors.user_err (Pp.str "UndoTo cannot be used through the Load command") (* Resetting *) | VernacResetName _ -> CErrors.anomaly (Pp.str "VernacResetName not handled by Stm.") | VernacResetInitial -> CErrors.anomaly (Pp.str "VernacResetInitial not handled by Stm.") | VernacBack _ -> CErrors.anomaly (Pp.str "VernacBack not handled by Stm.") (* This one is possible to handle here *) | VernacAbort id -> CErrors.user_err (Pp.str "Abort cannot be used through the Load command") | VernacLoad (verbosely, fname) -> Attributes.unsupported_attributes atts; vernac_load ~verbosely fname | v -> let fv = Vernacentries.translate_vernac ?loc ~atts v in let stack = st.Vernacstate.lemmas in let program = st.Vernacstate.program in interp_typed_vernac ~pm:program ~stack fv and vernac_load ~verbosely fname = (* Note that no proof should be open here, so the state here is just token for now *) let st = Vernacstate.freeze_interp_state ~marshallable:false in let fname = Envars.expand_path_macros ~warn:(fun x -> Feedback.msg_warning (Pp.str x)) fname in let fname = CUnix.make_suffix fname ".v" in let input = let longfname = Loadpath.locate_file fname in let in_chan = Util.open_utf8_file_in longfname in Pcoq.Parsable.make ~loc:Loc.(initial (InFile { dirpath=None; file=longfname})) (Stream.of_channel in_chan) in (* Parsing loop *) let v_mod = if verbosely then Flags.verbosely else Flags.silently in let parse_sentence proof_mode = Flags.with_option Flags.we_are_parsing (Pcoq.Entry.parse (Pvernac.main_entry proof_mode)) in let rec load_loop ~pm ~stack = let proof_mode = Option.map (fun _ -> get_default_proof_mode ()) stack in match parse_sentence proof_mode input with | None -> stack, pm | Some stm -> let stack, pm = v_mod (interp_control ~st:{ st with Vernacstate.lemmas = stack; program = pm }) stm in (load_loop [@ocaml.tailcall]) ~stack ~pm in let stack, pm = load_loop ~pm:st.Vernacstate.program ~stack:st.Vernacstate.lemmas in (* If Load left a proof open, we fail too. *) if Option.has_some stack then CErrors.user_err Pp.(str "Files processed by Load cannot leave open proofs."); stack, pm and interp_control ~st ({ CAst.v = cmd; loc } as vernac) = let time_header = mk_time_header vernac in List.fold_right (fun flag fn -> interp_control_flag ~loc ~time_header flag fn) cmd.control (fun ~st -> let before_univs = Global.universes () in let pstack, pm = interp_expr ?loc ~atts:cmd.attrs ~st cmd.expr in let after_univs = Global.universes () in if before_univs == after_univs then pstack, pm else let f = Declare.Proof.update_sigma_univs after_univs in Option.map (Vernacstate.LemmaStack.map ~f) pstack, pm) ~st (* XXX: This won't properly set the proof mode, as of today, it is controlled by the STM. Thus, we would need access information from the classifier. The proper fix is to move it to the STM, however, the way the proof mode is set there makes the task non trivial without a considerable amount of refactoring. *) (* Interpreting a possibly delayed proof *) let interp_qed_delayed ~proof ~st pe = let stack = st.Vernacstate.lemmas in let pm = st.Vernacstate.program in let stack = Option.cata (fun stack -> snd @@ Vernacstate.LemmaStack.pop stack) None stack in let pm = NeList.map_head (fun pm -> match pe with | Admitted -> Declare.Proof.save_lemma_admitted_delayed ~pm ~proof | Proved (_,idopt) -> let pm, _ = Declare.Proof.save_lemma_proved_delayed ~pm ~proof ~idopt in pm) pm in stack, pm let interp_qed_delayed_control ~proof ~st ~control { CAst.loc; v=pe } = let time_header = mk_time_header (CAst.make ?loc { control; attrs = []; expr = VernacEndProof pe }) in List.fold_right (fun flag fn -> interp_control_flag ~loc ~time_header flag fn) control (fun ~st -> interp_qed_delayed ~proof ~st pe) ~st (* General interp with management of state *) let () = let open Goptions in declare_int_option { optdepr = false; optkey = ["Default";"Timeout"]; optread = (fun () -> !default_timeout); optwrite = ((:=) default_timeout) } (* Be careful with the cache here in case of an exception. *) let interp_gen ~verbosely ~st ~interp_fn cmd = Vernacstate.unfreeze_interp_state st; try vernac_timeout (fun st -> let v_mod = if verbosely then Flags.verbosely else Flags.silently in let ontop = v_mod (interp_fn ~st) cmd in Vernacstate.Declare.set ontop [@ocaml.warning "-3"]; Vernacstate.freeze_interp_state ~marshallable:false ) st with exn -> let exn = Exninfo.capture exn in let exn = locate_if_not_already ?loc:cmd.CAst.loc exn in Vernacstate.invalidate_cache (); Exninfo.iraise exn (* Regular interp *) let interp ?(verbosely=true) ~st cmd = interp_gen ~verbosely ~st ~interp_fn:interp_control cmd let interp_qed_delayed_proof ~proof ~st ~control pe : Vernacstate.t = interp_gen ~verbosely:false ~st ~interp_fn:(interp_qed_delayed_control ~proof ~control) pe (* 8.15 only compat definition *) let with_fail ~st f = with_fail ~loc:None ~st f coq-8.15.0/vernac/vernacinterp.mli000066400000000000000000000030031417001151100170330ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* st:Vernacstate.t -> Vernacexpr.vernac_control -> Vernacstate.t (** Execute a Qed but with a proof_object which may contain a delayed proof and won't be forced *) val interp_qed_delayed_proof : proof:Declare.Proof.proof_object -> st:Vernacstate.t -> control:Vernacexpr.control_flag list -> Vernacexpr.proof_end CAst.t -> Vernacstate.t (** [with_fail ~st f] runs [f ()] and expects it to fail, otherwise it fails. *) val with_fail : st:Vernacstate.t -> (unit -> 'a) -> unit (** Flag set when the test-suite is called. Its only effect to display verbose information for [Fail] *) val test_mode : bool ref (** Default proof mode set by `start_proof` *) val get_default_proof_mode : unit -> Pvernac.proof_mode val proof_mode_opt_name : string list coq-8.15.0/vernac/vernacprop.ml000066400000000000000000000026461417001151100163550ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* true | _ -> false) v.control (* Navigation commands are allowed in a coqtop session but not in a .v file *) let is_navigation_vernac = function | VernacResetInitial | VernacResetName _ | VernacBack _ -> true | _ -> false (* NB: Reset is now allowed again as asked by A. Chlipala *) let is_reset = function | VernacResetInitial | VernacResetName _ -> true | _ -> false let is_debug = function | VernacSetOption (_, ["Ltac";"Debug"], _) -> true | _ -> false let is_undo = function | VernacUndo _ | VernacUndoTo _ -> true | _ -> false coq-8.15.0/vernac/vernacprop.mli000066400000000000000000000017441417001151100165240ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* bool val is_navigation_vernac : vernac_expr -> bool val is_reset : vernac_expr -> bool val is_debug : vernac_expr -> bool val is_undo : vernac_expr -> bool coq-8.15.0/vernac/vernacstate.ml000066400000000000000000000202011417001151100165000ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Pcoq.Entry.parse entry pa) () end module System : sig type t val protect : ('a -> 'b) -> 'a -> 'b val freeze : marshallable:bool -> t val unfreeze : t -> unit module Stm : sig val make_shallow : t -> t val lib : t -> Lib.frozen val summary : t -> Summary.frozen val replace_summary : t -> Summary.frozen -> t end end = struct type t = Lib.frozen * Summary.frozen let freeze ~marshallable = (Lib.freeze (), Summary.freeze_summaries ~marshallable) let unfreeze (fl,fs) = Lib.unfreeze fl; Summary.unfreeze_summaries fs let protect f x = let st = freeze ~marshallable:false in try let a = f x in unfreeze st; a with reraise -> let reraise = Exninfo.capture reraise in (unfreeze st; Exninfo.iraise reraise) (* STM-specific state manipulations *) module Stm = struct let make_shallow (lib, summary) = Lib.drop_objects lib, summary let lib = fst let summary = snd let replace_summary (lib,_) summary = (lib,summary) end end module LemmaStack = struct type t = Declare.Proof.t NeList.t let map ~f x = NeList.map f x let map_top ~f x = NeList.map_head f x let pop x = NeList.head x, NeList.tail x let get_top = NeList.head let with_top x ~f = f (get_top x) let push ontop a = NeList.push a ontop let get_all_proof_names (pf : t) = let prj x = Declare.Proof.get x in List.map Proof.(function pf -> (data (prj pf)).name) (NeList.to_list pf) let copy_info src tgt = Declare.Proof.map ~f:(fun _ -> Declare.Proof.get tgt) src let copy_info ~(src : t) ~(tgt : t) = NeList.map2 copy_info src tgt end type t = { parsing : Parser.t; system : System.t; (* summary + libstack *) lemmas : LemmaStack.t option; (* proofs of lemmas currently opened *) program : Declare.OblState.t NeList.t; (* obligations table *) opaques : Opaques.Summary.t; (* opaque proof terms *) shallow : bool (* is the state trimmed down (libstack) *) } let s_cache = ref None let s_lemmas = ref None let s_program = ref (NeList.singleton Declare.OblState.empty) let invalidate_cache () = s_cache := None; s_lemmas := None; s_program := NeList.singleton Declare.OblState.empty let update_cache rf v = rf := Some v; v let do_if_not_cached rf f v = match !rf with | None -> rf := Some v; f v | Some vc when vc != v -> rf := Some v; f v | Some _ -> () let freeze_interp_state ~marshallable = { system = update_cache s_cache (System.freeze ~marshallable); lemmas = !s_lemmas; program = !s_program; opaques = Opaques.Summary.freeze ~marshallable; shallow = false; parsing = Parser.cur_state (); } let unfreeze_interp_state { system; lemmas; program; parsing; opaques } = do_if_not_cached s_cache System.unfreeze system; s_lemmas := lemmas; s_program := program; Opaques.Summary.unfreeze opaques; Pcoq.unfreeze parsing (* Compatibility module *) module Declare_ = struct let get () = !s_lemmas let get_program () = !s_program let set (pstate,pm) = s_lemmas := pstate; s_program := pm let get_pstate () = Option.map (LemmaStack.with_top ~f:(fun x -> x)) !s_lemmas let freeze ~marshallable:_ = get () let unfreeze x = s_lemmas := Some x exception NoCurrentProof let () = CErrors.register_handler begin function | NoCurrentProof -> Some (Pp.(str "No focused proof (No proof-editing in progress).")) | _ -> None end let cc f = match !s_lemmas with | None -> raise NoCurrentProof | Some x -> LemmaStack.with_top ~f x let cc_stack f = match !s_lemmas with | None -> raise NoCurrentProof | Some x -> f x let dd f = match !s_lemmas with | None -> raise NoCurrentProof | Some x -> s_lemmas := Some (LemmaStack.map_top ~f x) let there_are_pending_proofs () = !s_lemmas <> None let get_open_goals () = cc Declare.Proof.get_open_goals let give_me_the_proof_opt () = Option.map (LemmaStack.with_top ~f:Declare.Proof.get) !s_lemmas let give_me_the_proof () = cc Declare.Proof.get let get_current_proof_name () = cc Declare.Proof.get_name let map_proof f = dd (Declare.Proof.map ~f) let with_current_proof f = match !s_lemmas with | None -> raise NoCurrentProof | Some stack -> let pf, res = LemmaStack.with_top stack ~f:(Declare.Proof.map_fold_endline ~f) in let stack = LemmaStack.map_top stack ~f:(fun _ -> pf) in s_lemmas := Some stack; res let return_proof () = cc Declare.Proof.return_proof let return_partial_proof () = cc Declare.Proof.return_partial_proof let close_future_proof ~feedback_id pf = cc (fun pt -> Declare.Proof.close_future_proof ~feedback_id pt pf) let close_proof ~opaque ~keep_body_ucst_separate = cc (fun pt -> Declare.Proof.close_proof ~opaque ~keep_body_ucst_separate pt) let discard_all () = s_lemmas := None let update_sigma_univs ugraph = dd (Declare.Proof.update_sigma_univs ugraph) let get_current_context () = cc Declare.Proof.get_current_context let get_all_proof_names () = try cc_stack LemmaStack.get_all_proof_names with NoCurrentProof -> [] let copy_terminators ~src ~tgt = match src, tgt with | None, None -> None | Some _ , None -> None | None, Some x -> Some x | Some src, Some tgt -> Some (LemmaStack.copy_info ~src ~tgt) end (* STM-specific state-handling *) module Stm = struct (* Proof-related state, for workers; ideally the two counters would be contained in the lemmas state themselves, as there is no need for evar / metas to be global among proofs *) type nonrec pstate = LemmaStack.t option * int * (* Evarutil.meta_counter_summary_tag *) int (* Evd.evar_counter_summary_tag *) (* Parts of the system state that are morally part of the proof state *) let pstate { lemmas; system } = let st = System.Stm.summary system in lemmas, Summary.project_from_summary st Evarutil.meta_counter_summary_tag, Summary.project_from_summary st Evd.evar_counter_summary_tag let set_pstate ({ lemmas; system } as s) (pstate,c1,c2) = { s with lemmas = Declare_.copy_terminators ~src:s.lemmas ~tgt:pstate ; system = System.Stm.replace_summary s.system begin let st = System.Stm.summary s.system in let st = Summary.modify_summary st Evarutil.meta_counter_summary_tag c1 in let st = Summary.modify_summary st Evd.evar_counter_summary_tag c2 in st end } type non_pstate = Summary.frozen * Lib.frozen let non_pstate { system } = let st = System.Stm.summary system in let st = Summary.remove_from_summary st Evarutil.meta_counter_summary_tag in let st = Summary.remove_from_summary st Evd.evar_counter_summary_tag in st, System.Stm.lib system let same_env { system = s1 } { system = s2 } = let s1 = System.Stm.summary s1 in let e1 = Summary.project_from_summary s1 Global.global_env_summary_tag in let s2 = System.Stm.summary s2 in let e2 = Summary.project_from_summary s2 Global.global_env_summary_tag in e1 == e2 let make_shallow st = { st with system = System.Stm.make_shallow st.system ; shallow = true } end module Declare = Declare_ coq-8.15.0/vernac/vernacstate.mli000066400000000000000000000102211417001151100166520ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* t val cur_state : unit -> t val parse : t -> 'a Pcoq.Entry.t -> Pcoq.Parsable.t -> 'a end (** System State *) module System : sig (** The system state includes the summary and the libobject *) type t (** [protect f x] runs [f x] and discards changes in the system state *) val protect : ('a -> 'b) -> 'a -> 'b end module LemmaStack : sig type t val pop : t -> Declare.Proof.t * t option val push : t option -> Declare.Proof.t -> t val map : f:(Declare.Proof.t -> Declare.Proof.t) -> t -> t val map_top : f:(Declare.Proof.t -> Declare.Proof.t) -> t -> t val with_top : t -> f:(Declare.Proof.t -> 'a ) -> 'a val get_top : t -> Declare.Proof.t end type t = { parsing : Parser.t (** parsing state [parsing state may not behave 100% functionally yet, beware] *) ; system : System.t (** summary + libstack *) ; lemmas : LemmaStack.t option (** proofs of lemmas currently opened *) ; program : Declare.OblState.t NeList.t (** program mode table. One per open module/section including the toplevel module. *) ; opaques : Opaques.Summary.t (** qed-terminated proofs *) ; shallow : bool (** is the state trimmed down (libstack) *) } val freeze_interp_state : marshallable:bool -> t val unfreeze_interp_state : t -> unit (* WARNING: Do not use, it will go away in future releases *) val invalidate_cache : unit -> unit (** STM-specific state handling *) module Stm : sig (** Proof state + meta/evar counters *) type pstate val pstate : t -> pstate val set_pstate : t -> pstate -> t (** Rest of the state, unfortunately this is used in low-level so we need to expose it *) type non_pstate = Summary.frozen * Lib.frozen val non_pstate : t -> non_pstate (** Checks if two states have the same Environ.env (physical eq) *) val same_env : t -> t -> bool (** Call [Lib.drop_objects] on the state *) val make_shallow : t -> t end (* Compatibility module: Do Not Use *) module Declare : sig exception NoCurrentProof val there_are_pending_proofs : unit -> bool val get_open_goals : unit -> int val give_me_the_proof : unit -> Proof.t val give_me_the_proof_opt : unit -> Proof.t option val get_current_proof_name : unit -> Names.Id.t val map_proof : (Proof.t -> Proof.t) -> unit val with_current_proof : (unit Proofview.tactic -> Proof.t -> Proof.t * 'a) -> 'a val return_proof : unit -> Declare.Proof.closed_proof_output val return_partial_proof : unit -> Declare.Proof.closed_proof_output val close_future_proof : feedback_id:Stateid.t -> Declare.Proof.closed_proof_output Future.computation -> Declare.Proof.proof_object val close_proof : opaque:Vernacexpr.opacity_flag -> keep_body_ucst_separate:bool -> Declare.Proof.proof_object val discard_all : unit -> unit val update_sigma_univs : UGraph.t -> unit val get_current_context : unit -> Evd.evar_map * Environ.env val get_all_proof_names : unit -> Names.Id.t list val copy_terminators : src:LemmaStack.t option -> tgt:LemmaStack.t option -> LemmaStack.t option (* Low-level stuff *) val get : unit -> LemmaStack.t option val get_program : unit -> Declare.OblState.t NeList.t val set : LemmaStack.t option * Declare.OblState.t NeList.t -> unit val get_pstate : unit -> Declare.Proof.t option val freeze : marshallable:bool -> LemmaStack.t option val unfreeze : LemmaStack.t -> unit end [@@ocaml.deprecated "This module is internal and should not be used, instead, thread the proof state"]
  • {{ _('Versions') }}
    {% for slug, url in versions %}
    {{ slug }}
    {% endfor %}
    {{ _('Downloads') }}
    {% for type, url in downloads %}
    {{ type }}
    {% endfor %}